number-plate-study/test1.ipynb

2012 lines
3.6 MiB
Plaintext
Raw Normal View History

2024-11-28 18:44:43 +01:00
{
"cells": [
{
"cell_type": "code",
2024-11-29 07:15:25 +01:00
"execution_count": 1,
2024-11-28 18:44:43 +01:00
"metadata": {},
2024-11-29 07:15:25 +01:00
"outputs": [
{
"ename": "ModuleNotFoundError",
"evalue": "No module named 'pytesseract'",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[1], line 7\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mnumpy\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mnp\u001b[39;00m\n\u001b[0;32m 6\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mmatplotlib\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mpyplot\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mplt\u001b[39;00m\n\u001b[1;32m----> 7\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mpytesseract\u001b[39;00m\n\u001b[0;32m 8\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mre\u001b[39;00m\n\u001b[0;32m 9\u001b[0m pytesseract\u001b[38;5;241m.\u001b[39mpytesseract\u001b[38;5;241m.\u001b[39mtesseract_cmd \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mr\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mC:\u001b[39m\u001b[38;5;124m\\\u001b[39m\u001b[38;5;124mProgram Files\u001b[39m\u001b[38;5;124m\\\u001b[39m\u001b[38;5;124mTesseract-OCR\u001b[39m\u001b[38;5;124m\\\u001b[39m\u001b[38;5;124mtesseract.exe\u001b[39m\u001b[38;5;124m'\u001b[39m\n",
"\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'pytesseract'"
]
}
],
2024-11-28 18:44:43 +01:00
"source": [
"# Working perfectly 3 8 12 14\n",
"\n",
"# Import dependencies\n",
"import cv2 \n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import pytesseract\n",
"import re\n",
"pytesseract.pytesseract.tesseract_cmd = r'C:\\Program Files\\Tesseract-OCR\\tesseract.exe'\n",
"# Setup function to detect car plate\n",
"def carplate_detect(image, carplate_haar_cascade):\n",
" carplate_overlay = image.copy() # Create overlay to display red rectangle of detected car plate\n",
" carplate_rects = carplate_haar_cascade.detectMultiScale(carplate_overlay,scaleFactor=1.1, minNeighbors=5) \n",
"\n",
" for x,y,w,h in carplate_rects: \n",
" cv2.rectangle(carplate_overlay, (x,y), (x+w,y+h), (255,0,0), 5) \n",
" \n",
" return carplate_overlay\n",
"\n",
"def enlarge_plt_display(image, scale_factor):\n",
" width = int(image.shape[1] * scale_factor / 100)\n",
" height = int(image.shape[0] * scale_factor / 100)\n",
" dim = (width, height)\n",
" plt.figure(figsize = dim)\n",
" plt.axis('off') \n",
" plt.imshow(image)\n",
"\n",
" cv2.imwrite(\"result.png\", image)\n",
"\n",
"def carplate_extract(image, carplate_haar_cascade):\n",
" \n",
" carplate_rects = carplate_haar_cascade.detectMultiScale(image,scaleFactor=1.1, minNeighbors=5) \n",
"\n",
" for x,y,w,h in carplate_rects: \n",
" carplate_img_extract = image[y+15:y+h-10 ,x+15:x+w-20] \n",
" \n",
" return carplate_img_extract\n",
"\n",
"def enlarge_img(image, scale_percent):\n",
" width = int(image.shape[1] * scale_percent / 100)\n",
" height = int(image.shape[0] * scale_percent / 100)\n",
" dim = (width, height)\n",
" resized_image = cv2.resize(image, dim, interpolation = cv2.INTER_AREA)\n",
" return resized_image\n",
"\n",
"\n",
"def convertToText(carplate_img):\n",
" #Detection \n",
" carplate_img_rgb = cv2.cvtColor(carplate_img, cv2.COLOR_BGR2RGB)\n",
"\n",
" carplate_haar_cascade = cv2.CascadeClassifier('haarcascade_russian_plate_number.xml')\n",
" detected_carplate_img = carplate_detect(carplate_img_rgb, carplate_haar_cascade)\n",
" enlarge_plt_display(detected_carplate_img, 1.2) \n",
"\n",
"\n",
" #OCR\n",
" carplate_extract_img = carplate_extract(carplate_img_rgb, carplate_haar_cascade)\n",
" carplate_extract_img = enlarge_img(carplate_extract_img, 150)\n",
" carplate_extract_img_gray = cv2.cvtColor(carplate_extract_img, cv2.COLOR_RGB2GRAY)\n",
" carplate_extract_img_gray_blur = cv2.medianBlur(carplate_extract_img_gray,3) # Kernel size 3\n",
"\n",
" text = pytesseract.image_to_string(carplate_extract_img_gray_blur, \n",
" config = f'--psm 8 --oem 3 -c tessedit_char_whitelist=ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')\n",
"\n",
"\n",
" print(\"text : \", text)\n",
" text = re.sub('[^A-Za-z0-9]+', '', text)\n",
" return text\n",
"\n",
"# carplate_img = cv2.imread('Cars19.png')\n",
"# print(convertToText(carplate_img))"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"text : 0200163\n",
"\n",
"0200163\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABOQAAAOwCAYAAACeeatzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9SZcsubbnh/02ADNz92hOl81tXleNSEoqNjOJkyJXzcQJ1+Kn4EDfQR+AA4454EwacCRpyJlYEiUNuCiSYoksktW89+rdJk+eJjp3NzMAW4MNMzf38DgnTt7Me18V8V95MiLczWAwGAzY+OO/9xZVVSoqKioqKioqKioqKioqKioqKip+L3B/6ApUVFRUVFRUVFRUVFRUVFRUVFT8zwmVkKuoqKioqKioqKioqKioqKioqPg9ohJyFRUVFRUVFRUVFRUVFRUVFRUVv0dUQq6ioqKioqKioqKioqKioqKiouL3iErIVVRUVFRUVFRUVFRUVFRUVFRU/B5RCbmKioqKioqKioqKioqKioqKiorfIyohV1FRUVFRUVFRUVFRUVFRUVFR8XtEJeQqKioqKioqKioqKioqKioqKip+j6iEXEVFRUVFRUVFRUVFRUVFRUVFxe8R4acrWkEzkO1PsY9A7Hek/CsfL/6ekOfvHmM62qHlWlPZpUw9HKgn5Zxe6fiqFRUVFRUVFRUVFRUVFRUVFRUVPx1+QkJugjz6VT8rzDP6zDHReY8pMzdRbLqk22T+VY4/XZT6I+BXv4J/+A9/rNIqKj4PEfi3/q2ZcK6oqKioqKioqKioqKioqPjnF6KqPxpPdQpdaNOUiRh7rIQ7PQvyglBbkndLak0R8kEKpwDhEWGhp6eewbMpju0W/uP/GP7z/xz+k//kuWdVVPzuEIH/8D+EP/5j+Pf+vT90bSoqKioqKioqKioqKioqKn4H/GSEnHLsKnog5E4q8OhzRUh2hpZv5ERRp9P/JqfW6Vh/RkFk1J7inmTenk3IffcdfPvtc4+uqPjx8Xf/Lvxn/9kfuhYVFRUVFRUVFRUVFRUVFRW/A37ypA6npNyXsX/yxL9zxy0u8qgGIGREFdHPl3YWOcO/++8+v+oVFT8F/qv/Cv6D/+APXYuKioqKioqKioqKioqKiorfAb+HGHLHOJtQYSbRFEFR8fOXh4QPB5WdzEkhSpQ5VRBBdCp9wcrpIqkE8NgF9gticv0X/8Xzj62o+Clwewv/+B//oWtRUVFRUVFRUVFRUVFRUVHxO+AnJ+SOyTdTqMFxTLklKaciZIVcXE/zE6W6UorgZmrukMR1yrSaS/k/WZi8ioqKioqKioqKioqKioqKioqKL8JPRshNmjYj27QQcIJRbCZ9m0i58gmCkDOMQK/KOEZiSliYOyniN/M5dSIE5wg+EILQiJ3buMP1j9R3P2Vyyr/392r2y4ofH3/1V/Df//d/6FpUVFRUVFRUVFRUVFRUVFT8yPgJFXKKqAJLQu3gLioiIELOiqrinJCysh8zt0PiZpe5ub3jYbslpgQ4RIScEs6B95511/LyxTUvX67ZNELrIQI5gXcQxCOiFv/tuGr2Y8oZ8bvcpnPwn/6nEH7v3r8V/6LjP/qP4N//9//QtaioqKioqKioqKioqKioqPiR8ZOySEqGnBAnBzdScajKIQurE2KCh4fE/f2W24c9N/uRfRJiTCQVoLFjswIep5CHzG7ck11D9oE7L1y0nk3nWbVWdlRwKnjnjJibKyaTdq+ioqKioqKioqKioqKioqKiouL3ip9e1iVThDeIWdGSqCELpAT3O+X9zR3vP96z2/XEDMl5U7qpmoxNMCWamJrO48iarEz19KOjHxLj6Nj1ifVKuFg5Vt4un1QIjzJJVFRUVFRUVFRUVFRUVFRUVFRU/P7xkxJyUtxUFfMazThEQFF2Pby92fPdu1tuH7aMScA1uBBw3hFQYs6oKqrm3oqAZItHFxpH17asNxvaLpCSZ8wQh8x+TGx3cLnyvFg7Wj9HszvNwVpRUVFRUVFRUVFRUVFRUVFRUfF7xRcQcp/JjrD4ekl4Ta6qOIcgRIWPD5HffH/H9x8f2A4JXAPdiowjIeRhh9OI+AbnPDjQbC6r4oSUM4IQQkBEGGImxQzOE5wHhIf9jqGP5HHNi6uGdTNlZj2t8Pl7+tJcEE+RfI/O15OS5Ynr6ckvR0kjzl1NTr7WJyr/+Tt69r1UVFRUVFRUVFRUVFRUVFRUVHwxnk/IaUmMIA6QE85nkTJ1OhwWcjSHiqNPyve3e/7yVx+4edihEhAfEN8QNRNzRsQRQoNTTwZyzua6WrKsCgIpggTatsF7j3cOvMWFy4VNCqFF1HPfJ3bDwNevL+iCo3Hg1O5AmJI9+FJ3OSKjPkN7PTrmc8q7Qxi7jLnxlmyzcmg+PTpOF8cuPpufxaFWWn4e7kAX5N+y8ouy9AmK7bQ+Z+7v+MzfRyrbioqKioqKioqKioqKioqKin8x8AUKOct0ag6nJ8SLAiWmG2I0l+KMWCqMTh+V397s+fNffc/DPkFYoZpRzeQ0IHiCCEq2pA/iEEDEIaqHq6rinSN4T9d15doWW07E9HiCkAvhlTQDwq8+jry8bLnuhM5B0IzlZJ1YRf/4lhZ/f45qmqizJeTsebr4J0d/nZ67PFZ1IiSnz3IpYkpQ4csZh3Y6PIDlHTyPPFueeXrG764nrKioqKioqKioqKioqKioqPifL74whtw5DVihboRCAMms01J1qCpRHb9+d8ef/+Yd21EgdGjRqCkZZMrCWhRuCnm6lipZMbLNCWhGxeN8axlbUXJRz0ERfaklkkiigCWDyBne3w3kHHi98XjxYGc+iS+hlw6pK843kTw68uCy+oj4mpWFduwRAXqkahMeX1lOxYqLYw/fs2izc96tX0atVSKuoqKioqKioqKioqKioqKi4rn4AkJOFmqsM99Nmq5CoKkIKpAQ3t3s+Kvfvuf2YcRvrlA8Y4w4EUQcTqQkbyglCaZ6YyLUhCzmvmpX8vjQHJxOC3k10YFHKj6xhA7iYD9EbreJNjhcC400+CMV2fEdPbNVCif5KXLvQJjpUjf3iYsciMVCbsqkTxSk/HYg62SmFidqc9GQRxc6+/wWpODTKrhPoRJyFRUVFRUVFRUVFRUVFRUVFc/FFxByB/LnmLyRQ9Az1cKNGQGVgdtd5C9/e8PNPuPWF6hvGWMiiyACXjCX1GyaOOccrrBcWRVxDhFTzsVsTqHihKZtSp4DMfptii93EjdNMTLPATQt+5T4+JDwznPZSLmzWdM3E2yK+wJWLs/3f/LFImlDaRM5pciW7qn2V1588cgZVKZa+sWZS8JtIu9+mObNYtgVN9lZqXdyvlQCrqKioqKioqKioqKioqKiouKH4tmEnMpCrVU+O41zBiXmW3FH3Y2ZX3+/5f19D77FhRXbfkDE0YYGyRFyBE04igIsRTTb39550GiqMHGEQvQ5EVZtIIiW3AUZ52SR66AQSgc2jIjgPaQsPPSRtnG0wVxhHfpIJCfzZ3JEjD1qFwB1C5pyotOm8w5JMFQo8fUOTqsHzeF0nivlylFKB3fU6Mc6tmWsunzy85iY05NfP63UOyYKp5+TOk8etUml6SoqKioqKioqKioqKioqKio+jy9UyD1FTJVPZTpOGLNyc9/z7uMtSYXsPJoswYKIgiZy3KNjz7oLvLy+Zr0KoIpopnGWmOHufsvN3QNDzIS2I2fF4eiIhCSoZiP01JPFFZfOQy2zehCHKuQsiHOk5HnYZ9rgCWsIcww7OFBZBwZskZvi/N2XpBBykhFVJ3fbhertOFHEFEkvc0x8HUg5Tn5DpVzvOEmDlEy0Tg5k3rlndCAan7yRc18sfs+zK21l4CoqKioqKioqKioqKioqKiq+HF+Y1OEERy6NlOyoRmntY+bt+1v2YwIXLEZcNqINMsSetYdV2/H65SW/+PY1q5VJ0ZxCIxCz8v2Hhs4p/RhR8SiCD46rFrxXcsyIZsQ71AlRi/pMhJiVfhxRPE48mh3OWZKHYRzZ7h0rD21XCCxdkFYoiCPnDO6pRAkGyz/rSpbXA32
"text/plain": [
"<Figure size 1700x1200 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"carplate_img = cv2.imread('img/26417135.jpg')\n",
"print(convertToText(carplate_img))"
]
2024-11-29 07:15:25 +01:00
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAnwAAAHiCAYAAAByaRH4AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz915dlSXbeCf5MHHGVy9CZkZGisrI0gAKKkOxiL7Ib3WiySQ67Z9Y8jPizZvU8cdZMvwwXV/c0yQWABBUEgQIKQKFQKisrMyNFKPdwcfU5x8Q8bLNzr3tEZkUBILsH41YrKyLc7z3SbNtn3/72ZyrGGLlqV+2qXbWrdtWu2lW7an9tm/5f+wKu2lW7alftql21q3bVrtp/3HYF+K7aVbtqV+2qXbWrdtX+mrcrwHfVrtpVu2pX7apdtav217xdAb6rdtWu2lW7alftql21v+btCvBdtat21a7aVbtqV+2q/TVvV4Dvql21q3bVrtpVu2pX7a95uwJ8V+2qXbWrdtWu2lW7an/N2xXgu2pX7apdtat21a7aVftr3q4A31W7alftql21q3bVrtpf82b/cl+PECMQ0r8UKJV+p4j9pzZ/10RUjKhLnw/97/O3r9pVu2pX7apdtat21a7aX0VTf5mt1WL6/+cdQPWfiOlfCcLFgCJAVKDM1nEgKoW68P2rdtWu2lW7alftql21q/aXbX9Jhk9YuszOZebuAlhT8n+x/6ei/1SM/b+iAhUz43cF967aVbtqV+2qXbWrdtX+qtpfGvApIrqn+CI9RxfBo1h2kda1ZB7RakVVWAqt0EgKV6U0bwaHV+2qXbWrdtWu2lW7alftr679pQCfyhq+6EBpwBAA5yPLdcfTWcPRvGGxaghRPltYzc3rBxxMasalojQKGxVKhcT+5WNftat21a7aVbtqV+2qXbW/ivYXAnzbmj0h5RQhKhwwXTkeHC04Pl/S+IiLkYASQBjBopg2ii4EZqVip4bdWlMpTVTbZ7gM+Z6FgllDuPmJeq6ecPsYfbHIc74RnwMznw88n3d9f0Xtwm1evhv1nJ99UruCzP+/0j7pjf6neYM/rj9d9aO/Xm3rfeeAq+InfUJ+/al9IKYvbH3m0vE+6TxXfeuq/f9n+1+P1nphwCcQJxdoKHIGVn6iaaPi4cmK+4+nzJqIowAFBiffj3KUqqwobIFDM2+haVuaVnFtUlIZhb5Q6JHOvP1cYr4S+UgGfRev9VlAl4+4KTS5cNCtu9v+7mVwGy99Z/sD6uLPP+ld9u96m81MVxUvX9PmT3X5auLWnSdwKIyrJMmfDa7bh/txHS0+5yM/BmzGTz7mp57yeae6fNqtD78I3FWfdMIfM2/95dqzN5Lf3At++0L7SaD9p1zC5tjPvYx+NH/C4S6OhUtfe+bTmy4gf9nugv2c/9wzXbW/svbManz7R/FCDO9/FgEV2FZgx624E7elOp94snjhr1Fd+MvmI32neF6s4zm/e9HfXDV47uv/9A+9SLtAxPxF23+qN/e8xcsnxLHLX/lJL/FTH8ePm9ief+JPHwmfdsIXu/gXZ/gujmcCEZNSuj4oPjpa8u7jBSuviUSiCgIFk+2KTliqLEtUKsyIgKPgpImsoufmxLBjQwpA2+eOF+JGDkkhIT6FJ6LTES8/opg+q9FIhXBEE9FsT2nbQXH7b0pdfBnqGUAKz3/YP+nL2a553gTKXBITVQJyl4DgBvXKW5Fm+iNdCOyfeO7LV/JpYeOTeNDL13Xp059YDP7prKzq3/bmiD++/UWCyyfd84/jjDef3+hQ8y8Um3fyKWf8hDnx2TM/n/V+obuNzzvf897l84/842Li8xdSVxP0/6badpxQADotEHPsUCBR/UIMUKjnvsjnLba3f65iPiZITGITB9Tz+/JVj/mLtedFqb88PHjRM/249p/ynX4SZHrxRcV/vGvJP77U19XlOe4nPN72sX5M+wuldBVRhm+MhKh4eL7i/cdnrENJIGLx6BCISiVgpSWcKIMqSpzSGEDHBGaiYtlGjmeOcsdSm4u3kX38BGYFwAOgo5KApewWLEgFIPnbafY0/YynUM/EL5VJMiDQewsqjVjHqP5zz0yxz1Ax+Qef8AKex3T0l+vSdaSVdpQAfPHD+U631twxpE5TXDqmSj/fQNUXoo6eCcaXz/8cYK3Yem5sfUacFVV8PvDJVd7bTEK+PP3cwfoMbfGcYz7z6RdsW9/cAkXx0q8uts0yI6qLkFru7JPYs+df26cvI54HRJ+3ov2kK33eUfWnwL7nHf/H9Y2L9/aX4QSu2l91k3ipFMToAS8L2qggmn7sSfTRW0P9k995SGBwm7iLEXFciBqUA1w6kJVzPfdwV0DvJ21/4bH1InPA9mc//Qf/G2ufNv/GF/jMX/W1/PiffzIV8qLHe/H24oBPxZRy3NgoRzTTpuPdJ+csosFFKIkUsWM8KNBGoOGycaxdwOiC2iosAdNP9eDRBDTz1nG28hyODTZBxR7mRYF5Ck0OTSp2EA0OK5yWUqgEhPIrzVergbA1Iee/bYBkRKnEXW6BMhXTlL81SGIOcvkHG7R4sX0qURZ6iNODtygAScUMzrZYu3gJ+Km0Jo8RpRwoRYzZttpffAqXr+OyZmf7NvrryF/Pv3weRbT9XPL5zBaTsLlgr7ZQ/IWTb//xPBrBb127Ypvq3bB/l25Qxf66N7dyiUmMKYGvVP+IVA+Ot9mNrft4TvDLluP9RBm3r0hf5PfUc/964bo+dX6N2997Fpx+Ygy/8D114Q51fO5tbZ1j66iX0/aXc7Yx94Kw9ePtxdSnBDd1+V1uBtynaciyjegFJv4Sm9wf+zmf/XHH+rTPXTz+p8+gn0hsPeczm+MCqBe+rnSU5x9TbQ1TNCKeCUg2ZisaphiPyuPOPPfactPbryudftMtIkQLKi3dlO/Pf+lOt84hcfjy87o8Ii9M3peez7OX+uzkLne56V+Xb+PSGZ457oXPfcL7ed47f5F+8LzPflKffuZin7dofd7NqYv96tnjXz72ZinwzLH4lLHzzMt4cZh64bJ/ouel+x9c7BFxq+88G4wvvtNnfv3pneN519Jfz8XneyGeqcsxdoNNtq+4z4r+RLHg2fbiGr7gegimooMIXbR8/HTO2bLFqQqNZ2Ajrxzucuf6hLJQxBB5er5ivmqI2jIeRXTsQBsCCudh1jjaYPEozhYdo1IxKvI5PWCYrzpWzkNUVEVJVRfUBNqu4cG54+ZuyaC2fd9MUj8pIlY5UVoQUWg84NJzFoCVIB+giVonyBSxmaWJgc55YgRrSwB8cHjvKcoiDZDtXqAuDpQL/dynFTZ453FeUxQVMYI2S5RaEKmIYURQOnXcVhgvBTEBQulIMtiU1qkzeJSaohUQKwGKKXWjEhjKusvtSfXiMMwDZmvyzgAqblRA28AqP0v5S2a21AbBfIKmUPq76j/O5o80IQUupKCUZcvM5xI4Sf8IsQdxMUa2vR3j1qcz6ENt4DHP+XNz7ATN8ySUWUuVWeU0gcbt1Po2GFBcuM6LN0t/6E+afdSlb2/9I9/P5rjbEO9SzL4QckI/Vvoj5Qgb03tUWyePIodQ6X1G1Q+yrdce08cVXoHu48ale+I5k82F6Jrf4ydpyC627WCYj7sN9rZ/diEAK0UI4ScKos9OkvHSn5dB4Pa73zyIHtBsx/z+PjSXJ+V8jZ90L3KmuOkXveY5EpVDqw6tPDFqQizxlHR4fJQnbJRkb3reNwZUuPgM833KuT0+yqI9vyVDROuIVg5oIXYIu1cAZbr/KNcW071A8mG9/Cy3R2Hs40nudpuBkcf5s4PnUzWIW2Pi4q8im4yDfEhtf+4Tnn0+t9q6pM1HLt7PZSC43RfkWefzPR+Y5diWY5LanvTyc1QQQ0TpC2iPvHiLbI2Lracnn8pgJD+nPGc890nmI198pHHrmeWJOV9bnhueM0z667oce7fPtQkR/e8jajPmQ0gxNZ8rfzKNid73V36v9aafPzt+nwOy+sedxkV+H0pwT3prz8wAF/uEXFc/ztO7kXmE9N62F0DPA+cKa18Myr0w4PuNf/r
"text/plain": [
"<Figure size 1000x600 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAxoAAACoCAYAAABnqpixAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAWjElEQVR4nO3dwXLjPHYGUEqye5JK3jPLbPMMWWabx5xKVaZtUVn8yWb03Q7uAJTZnnOWNAWCIEgJVt1Pl8fj8dgAAAAWun51BwAAgO/HQgMAAFjOQgMAAFjOQgMAAFjOQgMAAFjOQgMAAFjOQgMAAFjOQgMAAFjOQgMAAFjubXTHf/33/zyyH383/BD7Hx6Py1d3oXS5nLdv31V1X3SuRdp39vXfxavHYbaNMzwertf8f7jOPJvd99XSfbjv+9TrV6jGJvWt6u/seVSvf6Q+3It9Q7udefb5+Zn33cI8i3vOPyevJ/gIs1/z+M7eQ2e4BzvO8HnyP/7tX/7ffXyjAQAALGehAQAALGehAQAALGehAQAALDdcDF6XFtHxuxUbHeXVRUzG/dyOKlZeUWR+lNSHFffFGc5tVmccDnuSVHNncNuKfatC4Y7WfEjFysXr0zWqjjQ7r2+3W9x+CYXY1+JYqZi76lc8t0Yx+LUoVo7tNsIB7vs97ptvguK6NbamNpYENYTDPRbcyZ3xHX39kb7Ds3qEbzQAAIDlLDQAAIDlLDQAAIDlLDQAAIDlFIPzRV77C7Kc26uv2yt/wfh3m5Nn+LXZqjA0bT1DMXi1byrmLmdDKmRt/Gp0pWpjtA+dcSjnzmwx+Fv+qJIKv6tf8E7F3EcVg6dtZbud50NZOJ4OlpvI51yW8Yct88+zTuF3/DX1zrEWPM+mr9tBzvCsHuEbDQAAYDkLDQAAYDkLDQAAYDkLDQAAYDkLDQAAYLlG6hSsc4bEhsor+/a7pEYcrTPmR6WIVF45H858X3TE8S1OLV2Je+P6HHUty/SiRrJYaqPTh/v9Pt2Ho/ZNDktzuxXpW2nbI++bEqoqMWWokeJUjUOcD0Uf0va3In0rtVvN34+Pj6dtj71I3wpt3B/j90Up7HopRuIa2819OEo6t1e/DyW/y/uFbzQAAIDlLDQAAIDlLDQAAIDlLDQAAIDlFIPzRV5bxHTWoqmz9uv18jjMltuVr+8UuIa+1a8ORYPF3me98ofFEyxoOBdPdoqV8/Z8H3bKdMf37fRhRW11p43Z59Fxz7Pi+ZAKsVc8SxrncUlV4tU1Tn1rXKC39x9xe6cYPI1PKvretm173DvtNrQm9nixfbJiTs6GJFTOUAz+ikAa32gAAADLWWgAAADLWWgAAADLWWgAAADLWWgAAADLSZ16sVdU+P8ejlnjSnH6PVVJMXnfo/qQpaSYug8poeqYRK2jnOEZ1RnfJcd74SlXz6jUh33PHbte55O2Rh31TO20W51Dem7s5QnPJmqVfxneN24u+vsI1/79/T3uO506FdKltm3b9sv9eeM9bNuKBLDq3OIdXuzbaDcnt43v25mTPmuM840GAACwnIUGAACwnIUGAACwnIUGAACw3MuKwRXO/OEMhZZncOb58Mq+mQ99dTHt14/lbB9WzL0zjMOsMzwfqj7MFoy++hrPzodOMW0lFumWO8cGho91XfB8iAXIB83Jqnj9vj8XXf+4/Ij7vr2Nf5T78eO5jWpsUkH5/edH3PceisTTtm3bts/Pz+c+bEXx+uN53D/+ktu9xPOoAhXm/s++4r6Y1QtUmOvvzHPENxoAAMByFhoAAMByFhoAAMByFhoAAMByFhoAAMByL0ud4g9nSFP5e2Tcv7+zXuMzpJP8bs4wNrOpSivaWJEgdlQKWWq3d92qfoWkrkarnZ3LoYnJV51OFMcL26rUqbS9cyWv19zhlLZUzZHrI/wvuhq0cLxLleyU+lZejJBGtRf7PlJy1TEJbSvuq98poWqmr77RAAAAlrPQAAAAlrPQAAAAlrPQAAAAllMMzrdyhiJS+n63wroVjirS/Q5WXMujxne2b6+ep0cVuHaK19O+l6JYOb6+8YfqDOL2ouGjbs1U4F0Vg9/358LmR3F2cU4V8+x6u/2ih391vFSQXhRip8Lv+/WeG06F49W5he37Z9FuKAZ/xALxbdvD+FbiOJzg+X3Us2R1MIVvNAAAgOUsNAAAgOUsNAAAgOUsNAAAgOUsNAAAgOWkTr3YqZNxGgkel8nAhRWBDaceS9aIE62aPM9pKuUUSe0+xv/vUiVwnDXp6HdzhvOt+jDbt3KGpOCgKhYpbN6Llq9hc5V0VKUazbqGRKLLozGORUJQTK4qrs9s+lZnPlT73u/PaUlp26+2Z+ncijGLfWtkdaUJteVhv4Xrvm3b9uPH80fPNEeq7X/603vcd78/n/NejOPPnz+ftlVj/vn5+bTtEba1NebO6OsrZSLc8KH+9ueebzQAAIDlLDQAAIDlLDQAAIDlLDQAAIDlxovBY/Fk51BFIUmjjdn6wK//wfhVxXbHFErGWqGjajJb1eRVQeTsWH59wSl/6FyJVq1c6wFTbI+FocWu04XC40W6ZaHwKUyOwyMXsuZmxwumK+my1Zey8aAMz6j6PeC5jVYBczkMod1mG7P2dD3Hh2G7Vf8TDW1U1y0Vs9bj2yiYDm086odJOFRRxB8K3e+PXKz8SBeuCLfIbYwXgz+K/qZ5Xc71xn3xCEXt1XP2cns+5zSO27Ztb+merwrwQ7uPot1HCC4o51nYXgU1JEUXloTt/LWZR4NvNAAAgOUsNAAAgOUsNAAAgOUsNAAAgOUsNAAAgOWGU6di6kkjWKH18+dlxfxkJMYJQlo6iQKV2fStSupZ1d2jVqj5Z+6rNIrZY002sMQZstC+XhUg1rtGjWdUnu1FJ8Z7MDul7p2UoRPPnct2m3p9mTqVRriYPOlZkp8v44eqjT+jqmSneD0b41DnJ41Hah01o2LqVCV04nZ7n3r9tjWvfUpQKs4hHq4KGQoJSmVKVniTve9FKlLaufrMFdroPCc7yXidfbcwNtu2bXu4YcqEtWtInSpO7i0kSXVSp/bi3PaQOnUv2n18Pm8vxyxdzyJZrCUmt40/X0b4RgMAAFjOQgMAAFjOQgMAAFjOQgMAAFhuuBg8Fta1DlX8ZHyrjdcpfzJ+to0TnHCjLO+wNm4HVWL3Cv6+/ngr5hn/J83KzvWZvzMeqUqxLP49xqvvgSjVLTbGoSrgjFWr5a6TxeCF6TYaL3+EItRus4+D5kMOD6mqoBshB2HfVGC7bdt2DcW/r9aZD6m/nfeAahxyF4qC/0cqTF5xX8SjxX33y/N5lNcyZhnksInZsIfq5bfr8x/+oehvZ/5+fnwM7xu334uAgtCHap7FoIbFn0u+/i4FAAC+HQsNAABgOQsNAABgOQsNAABgOQsNAABgufHUqVSi3yhML9MAUhtlcMAxOS2d1Ifix+EbW+d9i5yiyzFr3Fcn+cwe73GGGLKXex61enzH900Pjur6dMJJWgEcjcdk3L7gvmh197CEqvF2Y5BUq19VouH4xeik9qQ2Wt0tJ2XadOLnQ0y2Ka5FSiErBuKS2qjGbA9/CAlBv24k9KGztZE0dw3/292LtLuYvFY9jNI4dD5zVe2On1q+L4r/ZecU09xyGrPO+2Z5D6VzK+bONT6Xc/JVJ/HpFs7tfk+pYNt235+371veNyVU1YlwYVPc9W//tOMbDQAAYDkLDQAAYDkLDQAAYDkLDQAAYLnhYvCtKPKadlRRZnz9fKlwVcA2uu+K4r7vUAx+5iLoVKh2XGH/ecdhXictYvwPdYH3+Fi2rmesyZwNkNi2PbRxOSgk4agwg/qAxzTQucatZ22spZ17faVs9jdLD4m3QKPgtLo+8d5Kxc7blgu/y33z5lFVf+M8aYz5tRqH9Oyrzi01kWvM8/ZyTqbPMJXx+yUVeJfjG7d39h3vQ/nyGFgxfKjy/eIeGr5fc4H35+fn07aP4hqn95FUIL5t2/a4xxv5ecvEc8Q3GgAAwHIWGgAAwHIWGgAAwHIWGgAAwHIWGgAAwHLjqVOTViQ+zWql0izobzpeK8mkcNR
"text/plain": [
"<Figure size 1000x600 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Import dependencies\n",
"import cv2 \n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# Resize image for display\n",
"def resize_for_display(image, max_width=1000, max_height=1000):\n",
" height, width = image.shape[:2]\n",
" if width > max_width or height > max_height:\n",
" scale = min(max_width / width, max_height / height)\n",
" new_width = int(width * scale)\n",
" new_height = int(height * scale)\n",
" image = cv2.resize(image, (new_width, new_height), interpolation=cv2.INTER_AREA)\n",
" return image\n",
"\n",
"# Setup function to detect car plate\n",
"def carplate_detect(image, carplate_haar_cascade):\n",
" carplate_overlay = image.copy() # Create overlay to display red rectangle of detected car plate\n",
" carplate_rects = carplate_haar_cascade.detectMultiScale(carplate_overlay, scaleFactor=1.1, minNeighbors=5)\n",
"\n",
" for x, y, w, h in carplate_rects: \n",
" cv2.rectangle(carplate_overlay, (x, y), (x + w, y + h), (255, 0, 0), 5)\n",
" \n",
" return carplate_overlay\n",
"\n",
"# Display image with resizing to avoid oversized figures\n",
"def display_image(image, max_width=1000, max_height=1000):\n",
" resized_image = resize_for_display(image, max_width, max_height)\n",
" plt.figure(figsize=(10, 6))\n",
" plt.axis('off')\n",
" plt.imshow(cv2.cvtColor(resized_image, cv2.COLOR_BGR2RGB))\n",
" plt.show()\n",
"\n",
"# Extract car plate region\n",
"def carplate_extract(image, carplate_haar_cascade):\n",
" carplate_rects = carplate_haar_cascade.detectMultiScale(image, scaleFactor=1.1, minNeighbors=1, minSize=(40, 40))\n",
"\n",
" for x, y, w, h in carplate_rects: \n",
" carplate_img_extract = image[y + 15:y + h - 10, x + 15:x + w - 20]\n",
" \n",
" return carplate_img_extract\n",
"\n",
"# Resize image for further processing\n",
"def enlarge_img(image, scale_percent):\n",
" width = int(image.shape[1] * scale_percent / 100)\n",
" height = int(image.shape[0] * scale_percent / 100)\n",
" dim = (width, height)\n",
" resized_image = cv2.resize(image, dim, interpolation=cv2.INTER_AREA)\n",
" return resized_image\n",
"\n",
"# Process and display car plate image\n",
"def process_carplate_image(carplate_img_path, haar_cascade_path):\n",
" carplate_img = cv2.imread(carplate_img_path)\n",
" if carplate_img is None:\n",
" raise IOError(f\"Image at {carplate_img_path} not found.\")\n",
" \n",
" carplate_haar_cascade = cv2.CascadeClassifier(haar_cascade_path)\n",
" if carplate_haar_cascade.empty():\n",
" raise IOError(\"Error loading cascade classifier. Please check the path.\")\n",
"\n",
" # Detect car plate\n",
" detected_carplate_img = carplate_detect(carplate_img, carplate_haar_cascade)\n",
" display_image(detected_carplate_img)\n",
"\n",
" # Extract and display car plate region\n",
" carplate_extract_img = carplate_extract(carplate_img, carplate_haar_cascade)\n",
" carplate_extract_img = enlarge_img(carplate_extract_img, 150)\n",
" display_image(carplate_extract_img)\n",
"\n",
"# Example usage\n",
"process_carplate_image('img/car3.jpg', 'data/haarcascade_russian_plate_number.xml')\n"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"\n",
"# Загрузка каскада для распознавания номерных знаков\n",
"cascade_path = 'data/haarcascade_russian_plate_number1.xml'\n",
"plate_cascade = cv2.CascadeClassifier(cascade_path)\n",
"\n",
"# Проверка корректности загрузки каскада\n",
"if plate_cascade.empty():\n",
" result = \"Ошибка загрузки каскада!\"\n",
"else:\n",
" # Загрузка изображения\n",
" image_path = 'img/car2.jpg'\n",
" image = cv2.imread(image_path)\n",
"\n",
" # Преобразование изображения в оттенки серого\n",
" gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)\n",
"\n",
" # Обнаружение номерных знаков\n",
" plates = plate_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=1, minSize=(40, 40))\n",
"\n",
" # Выделение найденных номерных знаков на изображении\n",
" if len(plates) > 0:\n",
" for (x, y, w, h) in plates:\n",
" # Обрезка области номерного знака\n",
" cropped_plate = image[y:y+h, x:x+w]\n",
" cropped_image_path = 'cropped_plate.jpg'\n",
" cv2.imwrite(cropped_image_path, cropped_plate)\n",
" cv2.imshow('Обнаружение номерного знака', cropped_plate)\n",
" cv2.waitKey(0)\n",
" cv2.destroyAllWindows()"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"cropped_plate = None"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Ultralytics 8.3.39 Python-3.9.13 torch-2.0.1+cu117 CUDA:0 (NVIDIA GeForce RTX 3070 Ti, 8191MiB)\n",
"engine\\trainer: task=obb, mode=train, model=yolo11n-obb1.pt, data=dataset-yolo8obb/data.yaml, epochs=40, time=None, patience=100, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=train2, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, freeze=None, multi_scale=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, vid_stride=1, stream_buffer=False, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, embed=None, show=False, save_frames=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, show_boxes=True, line_width=None, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=True, opset=None, workspace=None, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, bgr=0.0, mosaic=1.0, mixup=0.0, copy_paste=0.0, copy_paste_mode=flip, auto_augment=randaugment, erasing=0.4, crop_fraction=1.0, cfg=None, tracker=botsort.yaml, save_dir=runs\\obb\\train2\n",
"\n",
" from n params module arguments \n",
" 0 -1 1 464 ultralytics.nn.modules.conv.Conv [3, 16, 3, 2] \n",
" 1 -1 1 4672 ultralytics.nn.modules.conv.Conv [16, 32, 3, 2] \n",
" 2 -1 1 6640 ultralytics.nn.modules.block.C3k2 [32, 64, 1, False, 0.25] \n",
" 3 -1 1 36992 ultralytics.nn.modules.conv.Conv [64, 64, 3, 2] \n",
" 4 -1 1 26080 ultralytics.nn.modules.block.C3k2 [64, 128, 1, False, 0.25] \n",
" 5 -1 1 147712 ultralytics.nn.modules.conv.Conv [128, 128, 3, 2] \n",
" 6 -1 1 87040 ultralytics.nn.modules.block.C3k2 [128, 128, 1, True] \n",
" 7 -1 1 295424 ultralytics.nn.modules.conv.Conv [128, 256, 3, 2] \n",
" 8 -1 1 346112 ultralytics.nn.modules.block.C3k2 [256, 256, 1, True] \n",
" 9 -1 1 164608 ultralytics.nn.modules.block.SPPF [256, 256, 5] \n",
" 10 -1 1 249728 ultralytics.nn.modules.block.C2PSA [256, 256, 1] \n",
" 11 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n",
" 12 [-1, 6] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 13 -1 1 111296 ultralytics.nn.modules.block.C3k2 [384, 128, 1, False] \n",
" 14 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n",
" 15 [-1, 4] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 16 -1 1 32096 ultralytics.nn.modules.block.C3k2 [256, 64, 1, False] \n",
" 17 -1 1 36992 ultralytics.nn.modules.conv.Conv [64, 64, 3, 2] \n",
" 18 [-1, 13] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 19 -1 1 86720 ultralytics.nn.modules.block.C3k2 [192, 128, 1, False] \n",
" 20 -1 1 147712 ultralytics.nn.modules.conv.Conv [128, 128, 3, 2] \n",
" 21 [-1, 10] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 22 -1 1 378880 ultralytics.nn.modules.block.C3k2 [384, 256, 1, True] \n",
" 23 [16, 19, 22] 1 502534 ultralytics.nn.modules.head.OBB [1, 1, [64, 128, 256]] \n",
"YOLO11n-obb summary: 344 layers, 2,661,702 parameters, 2,661,686 gradients, 6.7 GFLOPs\n",
"\n",
"Transferred 541/541 items from pretrained weights\n",
"Freezing layer 'model.23.dfl.conv.weight'\n",
"AMP: running Automatic Mixed Precision (AMP) checks...\n",
"AMP: checks passed \n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"train: Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels...: 0%| | 0/978 [00:00<?, ?it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 46 images, 0 backgrounds, 0 corrupt: 5%|▍ | 46/978 [00:00<00:02, 446.19it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 115 images, 0 backgrounds, 0 corrupt: 12%|█▏ | 115/978 [00:00<00:01, 584.10it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 174 images, 3 backgrounds, 0 corrupt: 18%|█▊ | 174/978 [00:00<00:01, 549.27it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 230 images, 6 backgrounds, 0 corrupt: 24%|██▎ | 230/978 [00:00<00:01, 538.91it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 304 images, 27 backgrounds, 0 corrupt: 31%|███ | 304/978 [00:00<00:01, 597.63it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 364 images, 27 backgrounds, 0 corrupt: 37%|███▋ | 364/978 [00:00<00:01, 588.63it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 427 images, 30 backgrounds, 0 corrupt: 44%|████▎ | 427/978 [00:00<00:00, 595.83it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 497 images, 41 backgrounds, 0 corrupt: 51%|█████ | 497/978 [00:00<00:00, 627.69it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 565 images, 57 backgrounds, 0 corrupt: 58%|█████▊ | 565/978 [00:00<00:00, 639.57it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 634 images, 66 backgrounds, 0 corrupt: 65%|██████▍ | 634/978 [00:01<00:00, 644.88it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 709 images, 81 backgrounds, 0 corrupt: 72%|███████▏ | 709/978 [00:01<00:00, 665.35it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 787 images, 108 backgrounds, 0 corrupt: 80%|████████ | 787/978 [00:01<00:00, 692.89it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 857 images, 108 backgrounds, 0 corrupt: 88%|████████▊ | 857/978 [00:01<00:00, 684.70it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 927 images, 123 backgrounds, 0 corrupt: 95%|█████████▍| 927/978 [00:01<00:00, 689.00it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels... 978 images, 129 backgrounds, 0 corrupt: 100%|██████████| 978/978 [00:01<00:00, 636.17it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"train: New cache created: C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\train\\labels.cache\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"val: Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\valid\\labels...: 0%| | 0/86 [00:00<?, ?it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\valid\\labels... 25 images, 1 backgrounds, 0 corrupt: 29%|██▉ | 25/86 [00:00<00:00, 225.08it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\valid\\labels... 53 images, 2 backgrounds, 0 corrupt: 62%|██████▏ | 53/86 [00:00<00:00, 253.92it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\valid\\labels... 82 images, 6 backgrounds, 0 corrupt: 95%|█████████▌| 82/86 [00:00<00:00, 268.62it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\valid\\labels... 86 images, 7 backgrounds, 0 corrupt: 100%|██████████| 86/86 [00:00<00:00, 260.91it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"val: New cache created: C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo8obb\\valid\\labels.cache\n",
"Plotting labels to runs\\obb\\train2\\labels.jpg... \n",
"optimizer: 'optimizer=auto' found, ignoring 'lr0=0.01' and 'momentum=0.937' and determining best 'optimizer', 'lr0' and 'momentum' automatically... \n",
"optimizer: AdamW(lr=0.002, momentum=0.9) with parameter groups 87 weight(decay=0.0), 97 weight(decay=0.0005), 96 bias(decay=0.0)\n",
"Image sizes 640 train, 640 val\n",
"Using 8 dataloader workers\n",
"Logging results to runs\\obb\\train2\n",
"Starting training for 40 epochs...\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 1/40 2.76G 1.417 0.9024 2.795 3 640: 100%|██████████| 62/62 [00:09<00:00, 6.27it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 8.71it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.922 0.977 0.983 0.747\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 2/40 2.5G 0.9859 0.5919 2.483 3 640: 100%|██████████| 62/62 [00:07<00:00, 8.75it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.13it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.966 0.998 0.993 0.794\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 3/40 2.47G 0.8825 0.5739 2.466 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.20it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.08it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.973 0.976 0.99 0.804\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 4/40 2.47G 0.8594 0.5557 2.402 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.38it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.08it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 1 0.992 0.828\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 5/40 2.46G 0.8171 0.532 2.323 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.26it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.06it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.987 0.987 0.833\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 6/40 2.47G 0.8318 0.5435 2.304 4 640: 100%|██████████| 62/62 [00:06<00:00, 9.34it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.61it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.976 0.987 0.82\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 7/40 2.46G 0.8094 0.5355 2.214 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.06it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.39it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.988 0.994 0.824\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 8/40 2.47G 0.8011 0.5398 2.212 6 640: 100%|██████████| 62/62 [00:06<00:00, 9.30it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.57it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.963 0.986 0.835\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 9/40 2.46G 0.7838 0.5157 2.145 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.19it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.33it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.986 0.976 0.989 0.829\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 10/40 2.47G 0.777 0.5086 2.139 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.35it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.27it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.973 0.988 0.828\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 11/40 2.46G 0.7478 0.4966 2.13 4 640: 100%|██████████| 62/62 [00:06<00:00, 9.20it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.61it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.974 0.976 0.988 0.849\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 12/40 2.47G 0.7394 0.4889 2.084 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.29it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.02it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.986 0.976 0.988 0.833\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 13/40 2.46G 0.7649 0.585 2.102 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.34it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.33it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.995 0.994 0.842\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 14/40 2.47G 0.7371 0.4955 2.118 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.23it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.42it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.986 0.976 0.987 0.831\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 15/40 2.46G 0.707 0.476 2.091 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.33it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.38it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.986 0.989 0.839\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 16/40 2.47G 0.7257 0.4896 2.082 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.24it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.05it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.988 0.988 0.863\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 17/40 2.46G 0.6905 0.4736 2.054 6 640: 100%|██████████| 62/62 [00:06<00:00, 9.17it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.94it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.985 0.988 0.989 0.865\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 18/40 2.47G 0.6737 0.4596 2.047 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.26it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.01it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.975 0.989 0.869\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 19/40 2.46G 0.6846 0.4535 2.027 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.36it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.63it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.976 0.989 0.849\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 20/40 2.47G 0.6712 0.4394 2.033 4 640: 100%|██████████| 62/62 [00:06<00:00, 9.37it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.64it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.976 0.987 0.851\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 21/40 2.46G 0.7038 0.5231 1.992 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.12it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.84it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.986 0.976 0.989 0.861\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 22/40 2.47G 0.649 0.4409 2.018 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.28it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.74it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.975 0.976 0.989 0.87\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 23/40 2.46G 0.6147 0.4151 2.06 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.17it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.82it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.988 0.989 0.873\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 24/40 2.47G 0.6579 0.438 1.984 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.46it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.44it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.976 0.989 0.867\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 25/40 2.46G 0.6414 0.43 1.98 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.27it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.78it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.974 0.989 0.869\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 26/40 2.47G 0.6217 0.4282 2.012 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.20it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.66it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.991 0.871\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 27/40 2.46G 0.6309 0.4295 2.007 3 640: 100%|██████████| 62/62 [00:06<00:00, 9.14it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.68it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.988 0.991 0.876\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 28/40 2.47G 0.6248 0.4343 1.992 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.20it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.26it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.985 0.988 0.99 0.869\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 29/40 2.46G 0.6181 0.4243 1.995 1 640: 100%|██████████| 62/62 [00:06<00:00, 9.23it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.13it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.988 0.987 0.989 0.887\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 30/40 2.47G 0.6217 0.422 1.931 4 640: 100%|██████████| 62/62 [00:06<00:00, 9.42it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 10.31it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.976 0.984 0.855\n",
"Closing dataloader mosaic\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 31/40 2.59G 0.5429 0.3744 1.989 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.05it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.01it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.989 0.877\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 32/40 2.47G 0.5562 0.3759 1.988 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.89it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.27it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.874\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 33/40 2.46G 0.5417 0.3629 1.986 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.90it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.80it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.876\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 34/40 2.47G 0.5255 0.3612 1.996 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.91it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.33it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.991 0.882\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 35/40 2.46G 0.5199 0.3626 1.99 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.78it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.13it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.892\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 36/40 2.47G 0.52 0.3623 1.878 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.80it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.33it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.887\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 37/40 2.46G 0.5158 0.3514 2.01 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.83it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.50it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.899\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 38/40 2.47G 0.5037 0.3461 1.922 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.93it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.66it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.986 0.988 0.99 0.898\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 39/40 2.46G 0.4891 0.3438 1.953 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.97it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.02it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.898\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 40/40 2.47G 0.4885 0.3452 1.989 2 640: 100%|██████████| 62/62 [00:06<00:00, 9.96it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:00<00:00, 11.64it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.895\n",
"\n",
"40 epochs completed in 0.135 hours.\n",
"Optimizer stripped from runs\\obb\\train2\\weights\\last.pt, 5.7MB\n",
"Optimizer stripped from runs\\obb\\train2\\weights\\best.pt, 5.7MB\n",
"\n",
"Validating runs\\obb\\train2\\weights\\best.pt...\n",
"Ultralytics 8.3.39 Python-3.9.13 torch-2.0.1+cu117 CUDA:0 (NVIDIA GeForce RTX 3070 Ti, 8191MiB)\n",
"YOLO11n-obb summary (fused): 257 layers, 2,653,918 parameters, 0 gradients, 6.6 GFLOPs\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 6/6 [00:01<00:00, 4.72it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 86 85 0.987 0.988 0.99 0.899\n",
"Speed: 1.2ms preprocess, 2.5ms inference, 0.0ms loss, 3.8ms postprocess per image\n",
"Results saved to runs\\obb\\train2\n"
]
}
],
"source": [
"from ultralytics import YOLO\n",
"\n",
"model1 = YOLO(\"yolo11n-obb1.pt\")\n",
"\n",
"results1 = model1.train(data=\"dataset-yolo8obb/data.yaml\", epochs=40, imgsz=640)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Ultralytics 8.3.39 Python-3.9.13 torch-2.0.1+cu117 CUDA:0 (NVIDIA GeForce RTX 3070 Ti, 8191MiB)\n",
"engine\\trainer: task=detect, mode=train, model=runs/detect/train4/weights/best.pt, data=dataset-yolo/data.yaml, epochs=40, time=None, patience=100, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=train, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, freeze=None, multi_scale=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, vid_stride=1, stream_buffer=False, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, embed=None, show=False, save_frames=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, show_boxes=True, line_width=None, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=True, opset=None, workspace=None, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, bgr=0.0, mosaic=1.0, mixup=0.0, copy_paste=0.0, copy_paste_mode=flip, auto_augment=randaugment, erasing=0.4, crop_fraction=1.0, cfg=None, tracker=botsort.yaml, save_dir=runs\\detect\\train\n",
"\n",
" from n params module arguments \n",
" 0 -1 1 464 ultralytics.nn.modules.conv.Conv [3, 16, 3, 2] \n",
" 1 -1 1 4672 ultralytics.nn.modules.conv.Conv [16, 32, 3, 2] \n",
" 2 -1 1 6640 ultralytics.nn.modules.block.C3k2 [32, 64, 1, False, 0.25] \n",
" 3 -1 1 36992 ultralytics.nn.modules.conv.Conv [64, 64, 3, 2] \n",
" 4 -1 1 26080 ultralytics.nn.modules.block.C3k2 [64, 128, 1, False, 0.25] \n",
" 5 -1 1 147712 ultralytics.nn.modules.conv.Conv [128, 128, 3, 2] \n",
" 6 -1 1 87040 ultralytics.nn.modules.block.C3k2 [128, 128, 1, True] \n",
" 7 -1 1 295424 ultralytics.nn.modules.conv.Conv [128, 256, 3, 2] \n",
" 8 -1 1 346112 ultralytics.nn.modules.block.C3k2 [256, 256, 1, True] \n",
" 9 -1 1 164608 ultralytics.nn.modules.block.SPPF [256, 256, 5] \n",
" 10 -1 1 249728 ultralytics.nn.modules.block.C2PSA [256, 256, 1] \n",
" 11 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n",
" 12 [-1, 6] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 13 -1 1 111296 ultralytics.nn.modules.block.C3k2 [384, 128, 1, False] \n",
" 14 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n",
" 15 [-1, 4] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 16 -1 1 32096 ultralytics.nn.modules.block.C3k2 [256, 64, 1, False] \n",
" 17 -1 1 36992 ultralytics.nn.modules.conv.Conv [64, 64, 3, 2] \n",
" 18 [-1, 13] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 19 -1 1 86720 ultralytics.nn.modules.block.C3k2 [192, 128, 1, False] \n",
" 20 -1 1 147712 ultralytics.nn.modules.conv.Conv [128, 128, 3, 2] \n",
" 21 [-1, 10] 1 0 ultralytics.nn.modules.conv.Concat [1] \n",
" 22 -1 1 378880 ultralytics.nn.modules.block.C3k2 [384, 256, 1, True] \n",
" 23 [16, 19, 22] 1 430867 ultralytics.nn.modules.head.Detect [1, [64, 128, 256]] \n",
"YOLO11n summary: 319 layers, 2,590,035 parameters, 2,590,019 gradients, 6.4 GFLOPs\n",
"\n",
"Transferred 94/499 items from pretrained weights\n",
"Freezing layer 'model.23.dfl.conv.weight'\n",
"AMP: running Automatic Mixed Precision (AMP) checks...\n",
"AMP: checks passed \n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"train: Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo\\train\\labels.cache... 7057 images, 5 backgrounds, 0 corrupt: 100%|██████████| 7057/7057 [00:00<?, ?it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo\\train\\labels.cache... 7057 images, 5 backgrounds, 0 corrupt: 100%|██████████| 7057/7057 [00:00<?, ?it/s]\n",
"val: Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo\\valid\\labels.cache... 2048 images, 3 backgrounds, 0 corrupt: 100%|██████████| 2048/2048 [00:00<?, ?it/s]Scanning C:\\Users\\leonk\\Documents\\code\\number-plate-study\\dataset-yolo\\valid\\labels.cache... 2048 images, 3 backgrounds, 0 corrupt: 100%|██████████| 2048/2048 [00:00<?, ?it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Plotting labels to runs\\detect\\train\\labels.jpg... \n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"Exception ignored in: <function _MultiProcessingDataLoaderIter.__del__ at 0x0000022F6296E1F0>\n",
"Traceback (most recent call last):\n",
" File \"c:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\torch\\utils\\data\\dataloader.py\", line 1478, in __del__\n",
" self._shutdown_workers()\n",
" File \"c:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\torch\\utils\\data\\dataloader.py\", line 1436, in _shutdown_workers\n",
" if self._persistent_workers or self._workers_status[worker_id]:\n",
"AttributeError: '_MultiProcessingDataLoaderIter' object has no attribute '_workers_status'\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"optimizer: 'optimizer=auto' found, ignoring 'lr0=0.01' and 'momentum=0.937' and determining best 'optimizer', 'lr0' and 'momentum' automatically... \n",
"optimizer: AdamW(lr=0.002, momentum=0.9) with parameter groups 81 weight(decay=0.0), 88 weight(decay=0.0005), 87 bias(decay=0.0)\n",
"Image sizes 640 train, 640 val\n",
"Using 8 dataloader workers\n",
"Logging results to runs\\detect\\train\n",
"Starting training for 40 epochs...\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 1/40 2.44G 1.26 0.7447 1.155 2 640: 100%|██████████| 442/442 [00:48<00:00, 9.20it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.05it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.961 0.903 0.943 0.644\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 2/40 2.44G 1.168 0.6194 1.088 1 640: 100%|██████████| 442/442 [00:45<00:00, 9.65it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.18it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.954 0.924 0.948 0.642\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 3/40 2.44G 1.178 0.6252 1.095 1 640: 100%|██████████| 442/442 [00:45<00:00, 9.69it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.32it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.971 0.912 0.947 0.628\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 4/40 2.44G 1.171 0.61 1.094 1 640: 100%|██████████| 442/442 [00:42<00:00, 10.49it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.35it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.972 0.921 0.955 0.65\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 5/40 2.44G 1.15 0.5874 1.08 1 640: 100%|██████████| 442/442 [00:41<00:00, 10.68it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.52it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.978 0.92 0.958 0.649\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 6/40 2.44G 1.133 0.5735 1.077 2 640: 100%|██████████| 442/442 [00:41<00:00, 10.69it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.49it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.963 0.924 0.951 0.652\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 7/40 2.44G 1.134 0.561 1.072 6 640: 100%|██████████| 442/442 [00:41<00:00, 10.64it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:08<00:00, 7.54it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.977 0.926 0.96 0.661\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 8/40 2.44G 1.123 0.5541 1.069 2 640: 100%|██████████| 442/442 [00:49<00:00, 8.93it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.19it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.975 0.918 0.958 0.662\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 9/40 2.44G 1.126 0.5547 1.069 1 640: 100%|██████████| 442/442 [00:43<00:00, 10.11it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.70it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.978 0.927 0.96 0.66\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 10/40 2.44G 1.114 0.5393 1.063 1 640: 100%|██████████| 442/442 [00:45<00:00, 9.75it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.73it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.923 0.96 0.67\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 11/40 2.44G 1.11 0.536 1.06 1 640: 100%|██████████| 442/442 [00:42<00:00, 10.28it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.70it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.977 0.927 0.962 0.669\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 12/40 2.44G 1.1 0.5293 1.057 0 640: 100%|██████████| 442/442 [00:41<00:00, 10.66it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.26it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.982 0.918 0.961 0.676\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 13/40 2.44G 1.109 0.5245 1.06 3 640: 100%|██████████| 442/442 [00:44<00:00, 9.95it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.72it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.976 0.926 0.962 0.683\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 14/40 2.44G 1.094 0.5136 1.055 1 640: 100%|██████████| 442/442 [00:41<00:00, 10.77it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.55it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.93 0.961 0.675\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 15/40 2.44G 1.098 0.5153 1.06 3 640: 100%|██████████| 442/442 [00:47<00:00, 9.35it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.27it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.982 0.928 0.963 0.68\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 16/40 2.44G 1.087 0.5037 1.048 1 640: 100%|██████████| 442/442 [00:46<00:00, 9.44it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.73it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.988 0.928 0.964 0.678\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 17/40 2.44G 1.075 0.4926 1.047 1 640: 100%|██████████| 442/442 [00:41<00:00, 10.65it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.59it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.977 0.936 0.962 0.675\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 18/40 2.44G 1.077 0.4942 1.049 2 640: 100%|██████████| 442/442 [00:41<00:00, 10.57it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.63it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.981 0.931 0.965 0.683\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 19/40 2.44G 1.08 0.4941 1.046 2 640: 100%|██████████| 442/442 [00:45<00:00, 9.73it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:08<00:00, 7.84it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.978 0.931 0.963 0.685\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 20/40 2.44G 1.069 0.4849 1.046 1 640: 100%|██████████| 442/442 [00:43<00:00, 10.18it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.62it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.983 0.933 0.966 0.689\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 21/40 2.44G 1.061 0.48 1.04 1 640: 100%|██████████| 442/442 [00:43<00:00, 10.12it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.79it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.934 0.965 0.68\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 22/40 2.44G 1.065 0.4761 1.039 1 640: 100%|██████████| 442/442 [00:44<00:00, 9.86it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.73it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.981 0.934 0.963 0.689\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 23/40 2.44G 1.057 0.473 1.035 0 640: 100%|██████████| 442/442 [00:41<00:00, 10.61it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.59it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.984 0.931 0.963 0.686\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 24/40 2.44G 1.047 0.4681 1.034 1 640: 100%|██████████| 442/442 [00:41<00:00, 10.71it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.58it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.982 0.932 0.966 0.691\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 25/40 2.44G 1.05 0.4699 1.036 2 640: 100%|██████████| 442/442 [00:45<00:00, 9.77it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.04it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.98 0.938 0.967 0.69\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 26/40 2.44G 1.048 0.4649 1.035 2 640: 100%|██████████| 442/442 [00:45<00:00, 9.78it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.51it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.975 0.942 0.969 0.694\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 27/40 2.44G 1.042 0.4581 1.026 1 640: 100%|██████████| 442/442 [00:45<00:00, 9.69it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.11it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.986 0.933 0.967 0.691\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 28/40 2.44G 1.04 0.4596 1.026 2 640: 100%|██████████| 442/442 [00:42<00:00, 10.34it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.69it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.986 0.936 0.967 0.696\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 29/40 2.44G 1.024 0.4472 1.025 2 640: 100%|██████████| 442/442 [00:42<00:00, 10.51it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.62it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.941 0.968 0.695\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 30/40 2.44G 1.033 0.4506 1.027 1 640: 100%|██████████| 442/442 [00:40<00:00, 10.86it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.60it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.937 0.969 0.698\n",
"Closing dataloader mosaic\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 31/40 2.44G 1.026 0.4073 1.038 1 640: 100%|██████████| 442/442 [00:39<00:00, 11.26it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.76it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.945 0.97 0.702\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 32/40 2.44G 1.016 0.401 1.029 1 640: 100%|██████████| 442/442 [00:41<00:00, 10.54it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.41it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.942 0.97 0.701\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 33/40 2.44G 1.017 0.3978 1.029 1 640: 100%|██████████| 442/442 [00:39<00:00, 11.31it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.68it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.984 0.937 0.968 0.692\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 34/40 2.44G 1.005 0.3928 1.028 1 640: 100%|██████████| 442/442 [00:38<00:00, 11.46it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.76it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.981 0.939 0.969 0.703\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 35/40 2.44G 1.002 0.3908 1.021 1 640: 100%|██████████| 442/442 [00:39<00:00, 11.32it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.47it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.981 0.942 0.97 0.703\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 36/40 2.44G 0.9966 0.3847 1.017 1 640: 100%|██████████| 442/442 [00:39<00:00, 11.20it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.28it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.989 0.936 0.969 0.703\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 37/40 2.44G 0.988 0.3819 1.017 1 640: 100%|██████████| 442/442 [00:41<00:00, 10.54it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:08<00:00, 8.00it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.979 0.943 0.969 0.704\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 38/40 2.44G 0.9849 0.379 1.017 1 640: 100%|██████████| 442/442 [00:42<00:00, 10.52it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.64it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.98 0.942 0.968 0.704\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 39/40 2.44G 0.9778 0.3728 1.017 1 640: 100%|██████████| 442/442 [00:40<00:00, 10.98it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.68it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.987 0.94 0.969 0.705\n",
"\n",
" Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" 40/40 2.44G 0.9761 0.3699 1.014 1 640: 100%|██████████| 442/442 [00:39<00:00, 11.27it/s]\n",
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:07<00:00, 8.67it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.988 0.941 0.969 0.704\n",
"\n",
"40 epochs completed in 0.570 hours.\n",
"Optimizer stripped from runs\\detect\\train\\weights\\last.pt, 5.5MB\n",
"Optimizer stripped from runs\\detect\\train\\weights\\best.pt, 5.5MB\n",
"\n",
"Validating runs\\detect\\train\\weights\\best.pt...\n",
"Ultralytics 8.3.39 Python-3.9.13 torch-2.0.1+cu117 CUDA:0 (NVIDIA GeForce RTX 3070 Ti, 8191MiB)\n",
"YOLO11n summary (fused): 238 layers, 2,582,347 parameters, 0 gradients, 6.3 GFLOPs\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
" Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 64/64 [00:08<00:00, 7.83it/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
" all 2048 2195 0.987 0.939 0.969 0.705\n",
"Speed: 0.1ms preprocess, 0.8ms inference, 0.0ms loss, 0.7ms postprocess per image\n",
"Results saved to runs\\detect\\train\n"
]
}
],
"source": [
"from ultralytics import YOLO\n",
"\n",
"#model = YOLO(\"yolo11n.pt\")\n",
"\n",
"results = model.train(data=\"dataset-yolo/data.yaml\", epochs=40, imgsz=640)"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"image 1/1 c:\\Users\\leonk\\Documents\\code\\number-plate-study\\img\\2.jpg: 480x640 9.0ms\n",
"Speed: 2.0ms preprocess, 9.0ms inference, 2.0ms postprocess per image at shape (1, 3, 480, 640)\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAA0kAAAJ8CAYAAAAruerqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9S49t25LnCf1sPOZay30/zuM+IzKiMpKsKPLBI4sqVVUfQQtQIXogIfENkKCD+DIIIYFEEz4AiFSCRKeyqrIq65UZETfujbiPc/bZ232tNed4GA2zMeb0fW9EnqADDR/n+t17uy9fa84xx7Bh9re//U1UVXkdr+N1vI7X8Tpex+t4Ha/jdbyO1wFA+P/1BbyO1/E6XsfreB2v43W8jtfxOl7H/z+N1yDpdbyO1/E6XsfreB2v43W8jtfxOg7jNUh6Ha/jdbyO1/E6XsfreB2v43W8jsN4DZJex+t4Ha/jdbyO1/E6XsfreB2v4zBeg6TX8Tpex+t4Ha/jdbyO1/E6XsfrOIzXIOl1vI7X8Tpex+t4Ha/jdbyO1/E6DuM1SHodr+N1vI7X8Tpex+t4Ha/jdbyOw3gNkl7H63gdr+N1vI7X8Tpex+t4Ha/jMF6DpNfxOl7H63gdr+N1vI7X8Tpex+s4jPR9X/h1fKD0yorQCDSAEEGF/9rf+wf89/8H/0P+3X/33yEH4cOvf831wzf85Kuv+ertA+X5ym19QgKoKB0QEUKMxJQIMSIxgAitd7be6KooIDGyLAspJVIItK0hCto7rTVaa/TeESCGgIhQawUF8WsXEUQEVaX3Tk6RGO21MUaC/974eWuN+/0OgKrOORCR+VoCSLB/x5jss7q9NklERHjz8GDv0dSutVb/6mhTosJJItE/u/Zu779kYox0ERAovdOw+SDYPCFCF0VEuZwzMQZijMQYAeZ9jD9LKdRaERFyzpxOJ0IIlFJoW+WST2hVaq12zyGwLAvL+URKCUKg1sq6bdTWSCmRzydijP7+KwKcTpnn243L5cJ9XblcLtTeqa3aXIjQVSnV/n1588jbt28JIdBL5frxI20rAJxOJ5Zlmc9GVeezDCGQc55fMdqcd4Ft21j9fnvvaO90XysxRk6nEzlEtm2ztQLzPQC7n22bn5FSQlX3tSZi6zHZ9qm1zvlVVUII80tV0d6JCilGzstCDAHq/mxOpxMPDw/kZQERSquUWunaaXRaaLTe2LbKtm22xlJCu92rKuS8cFpOpJQRwpwvEHIQHi4LaGPbNv8dW6sNe+Zj7ajIvK5aK613FEghsJB5uFwAuK8rpTVyznSBtRRijKSc0eDr09d0rZXSKtI678IZauPh4WGux/P5POdaVSmlcL5cWC4XiBHdNm7Pz1yvV9ayUXun+97svc+9GWMkpYSIzGd13Ae9d5sXGmgnxUBwO9B7o2ub75NzBuyZ17bNz5n7XyKld0oD7QqqiARisHkMIqjaz1QV9WuAQIrJ13YiRkA627ZyvV6JMfL09MTDwwPPz89zj441N9b//AqCiq3ZqnVeYw7Znm+1+U+SfN+fSTGhrVO2wrqutk+0IzFyvV65XC7knDmfz5zPZ67XKyJi+2pdaa0RQuB8PvPw8EBKiVIK9/udst5prb2wm6pKbZXW6vw8EVDtXK9X/vIv/5Kf//znlLqRHy784Pd/TIuBN2/e8/HTJ969/4o3b96ynM58++EDv/n1r/n44Rv+7E//JdD5yQ++5P2bR/6tf+u/zQ/ef8Hf+wf/kA/frPzf/u//D372539K75W//w//mH/tj/4WnY1f/vrn/ObDX3BvV/75f/4fsrUbP/rxlxA6/+v/xf+S/+v/6//Cv/Pf+ff4evkjEl/QVNl6RVhoDW7bxuX8lm8/fsu7h7e0dYO6kQUyQtRGADvXeqK38GItzvVYbY4eH99wv9+p6+a2G7PLrSFRkCxUNZsQgPV+5+2bN9xuN84ps64r52UxW1b2NRBsktHaCKpcYkJUuV6v5GVB3XalZTGbIFBbs3M5RkqtVLeZw5Y9PDxwL9s8I7oq27bR0HkWqCqCEHBb7SdxUoiqcL8TRcah6vtJ6EAX28v3srHVynKyM4Zgc/j8/Mzp8QICjfZi/x/P8PHvMdfDvojbt1orpddpp+17jVYbwfeo7eNodqMpvY81LSQJEGCTjU3LtJOM91OldjvzRSGIkEIgIwQRpCvSO6gSSKjCw+VCDIGnpyeWZeGbb77h6y+/QlVJwfDsDx8+mJ3zaz6dTgBEgYD7L34tMSU0BpRu/hqdKmbzW++UZvu+2y0RSUSN3K53crZ19dWXX/L09MTpdLJ9q+ZnjTWB29RpGzXQSiXGPP0Qapv3kFIiiVC3QnZ71n19tta4PFy4b3dSTlTt3O933n39FU/3G2FZuF6vEISYMzGleU4tOVPvK4+Pj2z3OykltlbcPzPfoLofJQIMu9k7pTdKrW5D25xDmhJWOEkmiFC2De2NFO0+gggxCA+nk69zACXYdNrnJuHWG2uvbLXQVW1/5UzK5vt2EbZS6JhvbO/i55u/13q7gSrJ96aI0JvSWqc3tV/oNoeC3fMSl+krxRwp2llbobRGHXskRlSVbbM9oc1sD9V9agmUdeO7Dx/4+JtvuUjgD77+MffnZyQJ/+9/+h/w7uuv+V/9b/83fP13/wg5Lfyf/0//R/7xP/kn/OxXf8n7H3yF+Jn57ssvOS0L1A1uT/zsT/4l/+K/+K8oW+OXP/8Lfv/3/oC3D28JEnj/7ktbg2Xjtq603khL4suvvuRf+6M/4gdf/ZgvvvgR/+E//Wf8s//kP2ZdV4jwB3/wt2jaOZ9P/Ef/yX/Ef/yf/jM+fvpI7dXij247QbKvT7e5f9343kFSp9FoZvgQC2AQVIRLjLwPgXfukNUo9BgIVLQ3qm4giiKAEIQ5cWMjeQhgo3U6Cr7whsM3NqP6gfO5gYSXQc3xZ+MrhkAMMn/+u5zv4ZAcnbDxmhe/19UP++EI7Ut8BGsiNl/jvTUENNqalv7irm0uBKJduF1/jOZUa7fAcQRJQfx1+uJgmO/zmXMITMOWUpqHQ86ZRAAVJAaWEGhjnse94QGnH2YIVO2IBxj73Ch6mMMZtAVB/QkrIKoQIx0Iag6edHPWQwjgRiD4wXA0wikllmXZ14Lugd1xDdhBbanSzss11HtHJbx4vq21ffm19mJ9jvc+ronmgeKY17HWhpMwnscIPAIQowMCIqjsTjeYs9KxILJ1c6Rab3Tp9Kh0+ot1bh9on60qbjjtmoOMefMJp3O93Yiy3y9YoCU+j8PwjoN+BHv4NUVs0bZmAEatld1l8H0WAoTDXvxs3lElL5HazCAf57C19mKv9dagVgNDPAAdX6U1++zjXPiaH+9RSpnPoR8O8hgjEgKtFDjuXd+zY19MB0v7b32GPT9zZtXPp+hrM6fF5k3tAAsx+Hoz5wzd9585dZ1azSHOOdNa4927d5RSyDnPoHis/bHWjvYQhLhkaAfnust0bI9rprU2D9TjHkKZ9z7u8wiuDEdyBKLD+Tg+l8+Do7Enxp9H+9p7o7U6gYxxraUUm2MPDmNIBCyQvlwe6LVx/fTEB1UDcWKwwFyV5+dn/uAnPzWAKxjY8OHjJ96+f6ApdISO0BRCzNR7I+UL58uCkvnJj77iZ9tf8v79l9QmdIV7vRLjGVTs/IsLOSdK23i4XOy6xc5BN43MjdGV3iuqY/4N1BJRoBPEgYp6p9cyXaQQIATbL6pKKw2lE0UQVR4uF7Z1JfocJV9vHNZ/wM5Y3A6iylYLMQhNlUj3e+qE3im9klI28M/Xtx/X+99RAxL8LLK3NddOVFEVUrBf6F1Buxt8mfZh+A7q57uI2l7C79WdyPnZdKqC9E4d11krxP13xpi23df82CvH9Tz2QK2VEF/6BkECBJ3ntZiBQPyRjmf7wgSrEjG754YQxYMiCYSY0N73f4uf2+gL22OPr09bGg/gcPNzFphnajycsaKAtpd2Ydhy1P+nKP2F3QxBQIMf64FIQCsvfKDjOQxm5z4/l4/7HNRtRQME1Wb+i9h11LpBSLReid2vP0CQaOvfz5i
"text/plain": [
"<Figure size 1200x800 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"%matplotlib inline\n",
"from ultralytics import YOLO\n",
"import cv2\n",
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"# Загрузка модели\n",
"model = YOLO(\"runs/obb/train2/weights/best.pt\")\n",
"\n",
"# Путь к изображению\n",
"image_path = \"img/2.jpg\"\n",
"\n",
"# Выполнение инференса\n",
"results = model(image_path)\n",
"\n",
"# Получение первого результата\n",
"result = results[0]\n",
"\n",
"# Отрисовка детекций\n",
"annotated_frame = result.plot()\n",
"\n",
"# Отображение изображения\n",
"plt.figure(figsize=(12, 8))\n",
"plt.imshow(annotated_frame)\n",
"plt.axis('off')\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"ename": "RoboflowAPINotAuthorizedError",
"evalue": "Unauthorized access to roboflow API - check API key. Visit https://docs.roboflow.com/api-reference/authentication#retrieve-an-api-key to learn how to retrieve one.",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mHTTPError\u001b[0m Traceback (most recent call last)",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\roboflow_api.py:87\u001b[0m, in \u001b[0;36mwrap_roboflow_api_errors.<locals>.decorator.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 86\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m---> 87\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m function(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m 88\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (requests\u001b[38;5;241m.\u001b[39mexceptions\u001b[38;5;241m.\u001b[39mConnectionError, \u001b[38;5;167;01mConnectionError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m error:\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\roboflow_api.py:234\u001b[0m, in \u001b[0;36mget_roboflow_model_data\u001b[1;34m(api_key, model_id, endpoint_type, device_id)\u001b[0m\n\u001b[0;32m 230\u001b[0m api_url \u001b[38;5;241m=\u001b[39m _add_params_to_url(\n\u001b[0;32m 231\u001b[0m url\u001b[38;5;241m=\u001b[39m\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mAPI_BASE_URL\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m/\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mendpoint_type\u001b[38;5;241m.\u001b[39mvalue\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m/\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmodel_id\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m 232\u001b[0m params\u001b[38;5;241m=\u001b[39mparams,\n\u001b[0;32m 233\u001b[0m )\n\u001b[1;32m--> 234\u001b[0m api_data \u001b[38;5;241m=\u001b[39m \u001b[43m_get_from_url\u001b[49m\u001b[43m(\u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_url\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 235\u001b[0m cache\u001b[38;5;241m.\u001b[39mset(\n\u001b[0;32m 236\u001b[0m api_data_cache_key,\n\u001b[0;32m 237\u001b[0m api_data,\n\u001b[0;32m 238\u001b[0m expire\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m10\u001b[39m,\n\u001b[0;32m 239\u001b[0m )\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\roboflow_api.py:587\u001b[0m, in \u001b[0;36m_get_from_url\u001b[1;34m(url, json_response)\u001b[0m\n\u001b[0;32m 586\u001b[0m response \u001b[38;5;241m=\u001b[39m requests\u001b[38;5;241m.\u001b[39mget(wrap_url(url))\n\u001b[1;32m--> 587\u001b[0m \u001b[43mapi_key_safe_raise_for_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43mresponse\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mresponse\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 588\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m json_response:\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\utils\\requests.py:15\u001b[0m, in \u001b[0;36mapi_key_safe_raise_for_status\u001b[1;34m(response)\u001b[0m\n\u001b[0;32m 14\u001b[0m response\u001b[38;5;241m.\u001b[39murl \u001b[38;5;241m=\u001b[39m API_KEY_PATTERN\u001b[38;5;241m.\u001b[39msub(deduct_api_key, response\u001b[38;5;241m.\u001b[39murl)\n\u001b[1;32m---> 15\u001b[0m \u001b[43mresponse\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mraise_for_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\requests\\models.py:1024\u001b[0m, in \u001b[0;36mResponse.raise_for_status\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 1023\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m http_error_msg:\n\u001b[1;32m-> 1024\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m HTTPError(http_error_msg, response\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m)\n",
"\u001b[1;31mHTTPError\u001b[0m: 401 Client Error: Unauthorized for url: https://api.roboflow.com/ort/license-plate-recognition-rxg4e/6?nocache=true&device=DESKTOP-SAN4KOQ&dynamic=true",
"\nThe above exception was the direct cause of the following exception:\n",
"\u001b[1;31mRoboflowAPINotAuthorizedError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[1], line 10\u001b[0m\n\u001b[0;32m 7\u001b[0m image \u001b[38;5;241m=\u001b[39m cv2\u001b[38;5;241m.\u001b[39mimread(image_file)\n\u001b[0;32m 9\u001b[0m \u001b[38;5;66;03m# Загружаем предварительно обученную модель для распознавания номеров\u001b[39;00m\n\u001b[1;32m---> 10\u001b[0m model \u001b[38;5;241m=\u001b[39m \u001b[43mget_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlicense-plate-recognition-rxg4e/6\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 12\u001b[0m \u001b[38;5;66;03m# Запускаем инференс на выбранном изображении\u001b[39;00m\n\u001b[0;32m 13\u001b[0m results \u001b[38;5;241m=\u001b[39m model\u001b[38;5;241m.\u001b[39minfer(image)[\u001b[38;5;241m0\u001b[39m]\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\models\\utils.py:361\u001b[0m, in \u001b[0;36mget_model\u001b[1;34m(model_id, api_key, **kwargs)\u001b[0m\n\u001b[0;32m 360\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_model\u001b[39m(model_id, api_key\u001b[38;5;241m=\u001b[39mAPI_KEY, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Model:\n\u001b[1;32m--> 361\u001b[0m task, model \u001b[38;5;241m=\u001b[39m \u001b[43mget_model_type\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mapi_key\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_key\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 362\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ROBOFLOW_MODEL_TYPES[(task, model)](model_id, api_key\u001b[38;5;241m=\u001b[39mapi_key, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\registries\\roboflow.py:118\u001b[0m, in \u001b[0;36mget_model_type\u001b[1;34m(model_id, api_key)\u001b[0m\n\u001b[0;32m 111\u001b[0m save_model_metadata_in_cache(\n\u001b[0;32m 112\u001b[0m dataset_id\u001b[38;5;241m=\u001b[39mdataset_id,\n\u001b[0;32m 113\u001b[0m version_id\u001b[38;5;241m=\u001b[39mversion_id,\n\u001b[0;32m 114\u001b[0m project_task_type\u001b[38;5;241m=\u001b[39mproject_task_type,\n\u001b[0;32m 115\u001b[0m model_type\u001b[38;5;241m=\u001b[39mmodel_type,\n\u001b[0;32m 116\u001b[0m )\n\u001b[0;32m 117\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m project_task_type, model_type\n\u001b[1;32m--> 118\u001b[0m api_data \u001b[38;5;241m=\u001b[39m \u001b[43mget_roboflow_model_data\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 119\u001b[0m \u001b[43m \u001b[49m\u001b[43mapi_key\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mapi_key\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 120\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 121\u001b[0m \u001b[43m \u001b[49m\u001b[43mendpoint_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mModelEndpointType\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mORT\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 122\u001b[0m \u001b[43m \u001b[49m\u001b[43mdevice_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mGLOBAL_DEVICE_ID\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 123\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mort\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 124\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m api_data \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 125\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ModelArtefactError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mError loading model artifacts from Roboflow API.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\roboflow_api.py:100\u001b[0m, in \u001b[0;36mwrap_roboflow_api_errors.<locals>.decorator.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 98\u001b[0m error_handler \u001b[38;5;241m=\u001b[39m user_handler_override\u001b[38;5;241m.\u001b[39mget(status_code, default_handler)\n\u001b[0;32m 99\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m error_handler \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m--> 100\u001b[0m \u001b[43merror_handler\u001b[49m\u001b[43m(\u001b[49m\u001b[43merror\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 101\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m RoboflowAPIUnsuccessfulRequestError(\n\u001b[0;32m 102\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mUnsuccessful request to Roboflow API with response code: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mstatus_code\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 103\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01merror\u001b[39;00m\n\u001b[0;32m 104\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m requests\u001b[38;5;241m.\u001b[39mexceptions\u001b[38;5;241m.\u001b[39mInvalidJSONError \u001b[38;5;28;01mas\u001b[39;00m error:\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\roboflow_api.py:67\u001b[0m, in \u001b[0;36m<lambda>\u001b[1;34m(e)\u001b[0m\n\u001b[0;32m 60\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mraise_from_lambda\u001b[39m(\n\u001b[0;32m 61\u001b[0m inner_error: \u001b[38;5;167;01mException\u001b[39;00m, exception_type: Type[\u001b[38;5;167;01mException\u001b[39;00m], message: \u001b[38;5;28mstr\u001b[39m\n\u001b[0;32m 62\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m 63\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exception_type(message) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01minner_error\u001b[39;00m\n\u001b[0;32m 66\u001b[0m DEFAULT_ERROR_HANDLERS \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m---> 67\u001b[0m \u001b[38;5;241m401\u001b[39m: \u001b[38;5;28;01mlambda\u001b[39;00m e: \u001b[43mraise_from_lambda\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 68\u001b[0m \u001b[43m \u001b[49m\u001b[43me\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 69\u001b[0m \u001b[43m \u001b[49m\u001b[43mRoboflowAPINotAuthorizedError\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 70\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mUnauthorized access to roboflow API - check API key. Visit \u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\n\u001b[0;32m 71\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mhttps://docs.roboflow.com/api-reference/authentication#retrieve-an-api-key to learn how to retrieve one.\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 72\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m,\n\u001b[0;32m 73\u001b[0m \u001b[38;5;241m404\u001b[39m: \u001b[38;5;28;01mlambda\u001b[39;00m e: raise_from_lambda(\n\u001b[0;32m 74\u001b[0m e, RoboflowAPINotNotFoundError, NOT_FOUND_ERROR_MESSAGE\n\u001b[0;32m 75\u001b[0m ),\n\u001b[0;32m 76\u001b[0m }\n\u001b[0;32m 79\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrap_roboflow_api_errors\u001b[39m(\n\u001b[0;32m 80\u001b[0m http_errors_handlers: Optional[\n\u001b[0;32m 81\u001b[0m Dict[\u001b[38;5;28mint\u001b[39m, Callable[[Union[requests\u001b[38;5;241m.\u001b[39mexceptions\u001b[38;5;241m.\u001b[39mHTTPError]], \u001b[38;5;28;01mNone\u001b[39;00m]]\n\u001b[0;32m 82\u001b[0m ] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m 83\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28mcallable\u001b[39m:\n\u001b[0;32m 84\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mdecorator\u001b[39m(function: \u001b[38;5;28mcallable\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28mcallable\u001b[39m:\n",
"File \u001b[1;32mc:\\Users\\leonk\\Documents\\code\\number-plate-study\\.venv\\lib\\site-packages\\inference\\core\\roboflow_api.py:63\u001b[0m, in \u001b[0;36mraise_from_lambda\u001b[1;34m(inner_error, exception_type, message)\u001b[0m\n\u001b[0;32m 60\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mraise_from_lambda\u001b[39m(\n\u001b[0;32m 61\u001b[0m inner_error: \u001b[38;5;167;01mException\u001b[39;00m, exception_type: Type[\u001b[38;5;167;01mException\u001b[39;00m], message: \u001b[38;5;28mstr\u001b[39m\n\u001b[0;32m 62\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m---> 63\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exception_type(message) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01minner_error\u001b[39;00m\n",
"\u001b[1;31mRoboflowAPINotAuthorizedError\u001b[0m: Unauthorized access to roboflow API - check API key. Visit https://docs.roboflow.com/api-reference/authentication#retrieve-an-api-key to learn how to retrieve one."
]
}
],
"source": [
"from inference import get_model\n",
"import supervision as sv\n",
"import cv2\n",
"\n",
"# Задаем путь к изображению для обработки\n",
"image_file = \"img/car1.jpg\"\n",
"image = cv2.imread(image_file)\n",
"\n",
"# Загружаем предварительно обученную модель для распознавания номеров\n",
"model = get_model(model_id=\"license-plate-recognition-rxg4e/6\")\n",
"\n",
"# Запускаем инференс на выбранном изображении\n",
"results = model.infer(image)[0]\n",
"\n",
"# Загружаем результаты инференса в API Supervision Detections\n",
"detections = sv.Detections.from_inference(results)\n",
"\n",
"# Создаем аннотаторы для визуализации\n",
"bounding_box_annotator = sv.BoxAnnotator()\n",
"label_annotator = sv.LabelAnnotator()\n",
"\n",
"# Аннотируем изображение с использованием результатов инференса\n",
"annotated_image = bounding_box_annotator.annotate(\n",
" scene=image, detections=detections\n",
")\n",
"annotated_image = label_annotator.annotate(\n",
" scene=annotated_image, detections=detections\n",
")\n",
"\n",
"# Отображаем изображение\n",
"sv.plot_image(annotated_image)\n"
]
2024-11-28 18:44:43 +01:00
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}