Commit 9d1e08e0 authored by Paranagama R.P.S.D.'s avatar Paranagama R.P.S.D.

fix : Recorded video display error

parent 4832b594
...@@ -76,6 +76,44 @@ ...@@ -76,6 +76,44 @@
"cv2.destroyAllWindows()" "cv2.destroyAllWindows()"
] ]
}, },
{
"cell_type": "code",
"execution_count": 3,
"id": "93d1ae9d",
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"import numpy as np\n",
"\n",
"\n",
"IMG_SIZE = 224\n",
"\n",
"frame = cv2.imread('D:/RP/project/2023-029/Project/Backend/ML_Models/sign_language_to_text/test_image.png')\n",
"\n",
"def extract_hand_shape(image):\n",
" gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)\n",
" blurred = cv2.GaussianBlur(gray, (5, 5), 0)\n",
" _, thresholded = cv2.threshold(blurred, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n",
" contours, _ = cv2.findContours(thresholded, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
" contours = sorted(contours, key=cv2.contourArea, reverse=True)\n",
" hand_contour = contours[0]\n",
" hand_shape = np.zeros_like(image)\n",
" cv2.drawContours(hand_shape, [hand_contour], 0, (255, 255, 255), thickness=cv2.FILLED)\n",
" return hand_shape\n",
"\n",
"frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n",
"frame = cv2.resize(frame, (IMG_SIZE, IMG_SIZE))\n",
"# frame = extract_hand_shape(frame)\n",
"# frame = np.array([frame], dtype=np.float32) / 255.0\n",
"\n",
"\n",
"# Display the hand shape\n",
"cv2.imshow('Hand Shape', frame)\n",
"cv2.waitKey(0)\n",
"cv2.destroyAllWindows()"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 4, "execution_count": 4,
...@@ -95,14 +133,6 @@ ...@@ -95,14 +133,6 @@
} }
], ],
"source": [] "source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "93d1ae9d",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
......
...@@ -52,7 +52,7 @@ ...@@ -52,7 +52,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 62, "execution_count": 19,
"id": "e7e05125", "id": "e7e05125",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
...@@ -83,7 +83,7 @@ ...@@ -83,7 +83,7 @@
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"id": "02a9f711", "id": "ae0a7cb5",
"metadata": {}, "metadata": {},
"source": [ "source": [
"### Capture Hand" "### Capture Hand"
...@@ -91,8 +91,8 @@ ...@@ -91,8 +91,8 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 66, "execution_count": 2,
"id": "7aa4a681", "id": "1c04daef",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
...@@ -102,7 +102,7 @@ ...@@ -102,7 +102,7 @@
"traceback": [ "traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31merror\u001b[0m Traceback (most recent call last)", "\u001b[1;31merror\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_16084\\2889311055.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 29\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[1;31m# Display the result\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 31\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Area Above Elbow\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0marea_above_elbow\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 32\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdestroyAllWindows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_3024\\2889311055.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 29\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[1;31m# Display the result\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 31\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Area Above Elbow\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0marea_above_elbow\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 32\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdestroyAllWindows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31merror\u001b[0m: OpenCV(4.7.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:971: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n" "\u001b[1;31merror\u001b[0m: OpenCV(4.7.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:971: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n"
] ]
} }
...@@ -112,8 +112,8 @@ ...@@ -112,8 +112,8 @@
"import numpy as np\n", "import numpy as np\n",
"import mediapipe as mp\n", "import mediapipe as mp\n",
"# Load the image\n", "# Load the image\n",
"# image = cv2.imread('C:/Users/HP Pavilion/Downloads/test_sign.jpeg')\n", "image = cv2.imread('C:/Users/HP Pavilion/Downloads/test_sign.jpeg')\n",
"image = cv2.imread('D:/RP/project/2023-029/Project/Backend\\ML_Models/sign_language_to_text/scene00001.png')\n", "\n",
"image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n", "image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n",
"\n", "\n",
"# Initialize Mediapipe solutions\n", "# Initialize Mediapipe solutions\n",
...@@ -142,6 +142,52 @@ ...@@ -142,6 +142,52 @@
"cv2.waitKey(0)\n", "cv2.waitKey(0)\n",
"cv2.destroyAllWindows()" "cv2.destroyAllWindows()"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3ead61b7",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"# This works for any background, but does not hapture the fist area\n",
"import cv2\n",
"import numpy as np\n",
"\n",
"IMG_SIZE = 224 # image size\n",
"\n",
"# image = cv2.imread('C:/Users/HP Pavilion/Downloads/images (1).jpg')\n",
"\n",
"\n",
"image = cv2.imread('D:/RP/project/2023-029/Project/Backend/ML_Models/sign_language_to_text/test_image.png')\n",
"\n",
"def extract_hand_shape(image):\n",
" gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)\n",
" blurred = cv2.GaussianBlur(gray, (5, 5), 0)\n",
" _, thresholded = cv2.threshold(blurred, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n",
" contours, _ = cv2.findContours(thresholded, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
" contours = sorted(contours, key=cv2.contourArea, reverse=True)\n",
" hand_contour = contours[0]\n",
" hand_shape = np.zeros_like(image)\n",
" cv2.drawContours(hand_shape, [hand_contour], 0, (255, 255, 255), thickness=cv2.FILLED)\n",
" return hand_shape\n",
"\n",
"\n",
"frame = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n",
"frame = cv2.resize(frame, (IMG_SIZE, IMG_SIZE))\n",
"frame = extract_hand_shape(frame)\n",
"frame = np.array([frame], dtype=np.float32) / 255.0\n",
"\n",
"\n",
"\n",
"\n",
"# Display the hand shape\n",
"cv2.imshow('Hand Shape', hand_shape)\n",
"cv2.waitKey(0)\n",
"cv2.destroyAllWindows()"
]
} }
], ],
"metadata": { "metadata": {
......
...@@ -78,31 +78,53 @@ ...@@ -78,31 +78,53 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 4, "execution_count": 1,
"id": "e7e05125", "id": "93d1ae9d",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
"ename": "error", "ename": "error",
"evalue": "OpenCV(4.7.0) D:/a/opencv-python/opencv-python/opencv/modules/highgui/src/precomp.hpp:155: error: (-215:Assertion failed) src_depth != CV_16F && src_depth != CV_32S in function 'convertToShow'\n", "evalue": "OpenCV(4.7.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\highgui\\src\\window_w32.cpp:124: error: (-215:Assertion failed) bmi && width >= 0 && height >= 0 && (bpp == 8 || bpp == 24 || bpp == 32) in function 'FillBitmapInfo'\n",
"output_type": "error", "output_type": "error",
"traceback": [ "traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31merror\u001b[0m Traceback (most recent call last)", "\u001b[1;31merror\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_19904\\1480809324.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 31\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[1;31m# Display the hand shape with white background\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 33\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Hand Shape with White Background'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mhand_shape_with_white_bg\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 34\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 35\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdestroyAllWindows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_16572\\1170496581.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 25\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 26\u001b[0m \u001b[1;31m# Display the hand shape\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 27\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Hand Shape'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mframe\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 28\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 29\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdestroyAllWindows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31merror\u001b[0m: OpenCV(4.7.0) D:/a/opencv-python/opencv-python/opencv/modules/highgui/src/precomp.hpp:155: error: (-215:Assertion failed) src_depth != CV_16F && src_depth != CV_32S in function 'convertToShow'\n" "\u001b[1;31merror\u001b[0m: OpenCV(4.7.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\highgui\\src\\window_w32.cpp:124: error: (-215:Assertion failed) bmi && width >= 0 && height >= 0 && (bpp == 8 || bpp == 24 || bpp == 32) in function 'FillBitmapInfo'\n"
] ]
} }
], ],
"source": [] "source": [
}, "import cv2\n",
{ "import numpy as np\n",
"cell_type": "code", "\n",
"execution_count": null, "\n",
"id": "93d1ae9d", "IMG_SIZE = 224\n",
"metadata": {}, "\n",
"outputs": [], "frame = cv2.imread('D:/RP/project/2023-029/Project/Backend/ML_Models/sign_language_to_text/test_image.png')\n",
"source": [] "\n",
"def extract_hand_shape(image):\n",
" gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)\n",
" blurred = cv2.GaussianBlur(gray, (5, 5), 0)\n",
" _, thresholded = cv2.threshold(blurred, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n",
" contours, _ = cv2.findContours(thresholded, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
" contours = sorted(contours, key=cv2.contourArea, reverse=True)\n",
" hand_contour = contours[0]\n",
" hand_shape = np.zeros_like(image)\n",
" cv2.drawContours(hand_shape, [hand_contour], 0, (255, 255, 255), thickness=cv2.FILLED)\n",
" return hand_shape\n",
"\n",
"frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n",
"frame = cv2.resize(frame, (IMG_SIZE, IMG_SIZE))\n",
"frame = extract_hand_shape(frame)\n",
"frame = np.array([frame], dtype=np.float32) / 255.0\n",
"\n",
"\n",
"# Display the hand shape\n",
"cv2.imshow('Hand Shape', frame)\n",
"cv2.waitKey(0)\n",
"cv2.destroyAllWindows()"
]
} }
], ],
"metadata": { "metadata": {
......
...@@ -52,7 +52,7 @@ ...@@ -52,7 +52,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 62, "execution_count": 19,
"id": "e7e05125", "id": "e7e05125",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
...@@ -91,7 +91,7 @@ ...@@ -91,7 +91,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 68, "execution_count": 2,
"id": "1c04daef", "id": "1c04daef",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
...@@ -102,7 +102,7 @@ ...@@ -102,7 +102,7 @@
"traceback": [ "traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31merror\u001b[0m Traceback (most recent call last)", "\u001b[1;31merror\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_16084\\10235205.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 29\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[1;31m# Display the result\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 31\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Area Above Elbow\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0marea_above_elbow\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 32\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdestroyAllWindows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_3024\\2889311055.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 29\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 30\u001b[0m \u001b[1;31m# Display the result\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 31\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Area Above Elbow\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0marea_above_elbow\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 32\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdestroyAllWindows\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31merror\u001b[0m: OpenCV(4.7.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:971: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n" "\u001b[1;31merror\u001b[0m: OpenCV(4.7.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:971: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n"
] ]
} }
...@@ -142,6 +142,52 @@ ...@@ -142,6 +142,52 @@
"cv2.waitKey(0)\n", "cv2.waitKey(0)\n",
"cv2.destroyAllWindows()" "cv2.destroyAllWindows()"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3ead61b7",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"# This works for any background, but does not hapture the fist area\n",
"import cv2\n",
"import numpy as np\n",
"\n",
"IMG_SIZE = 224 # image size\n",
"\n",
"# image = cv2.imread('C:/Users/HP Pavilion/Downloads/images (1).jpg')\n",
"\n",
"\n",
"image = cv2.imread('D:/RP/project/2023-029/Project/Backend/ML_Models/sign_language_to_text/test_image.png')\n",
"\n",
"def extract_hand_shape(image):\n",
" gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)\n",
" blurred = cv2.GaussianBlur(gray, (5, 5), 0)\n",
" _, thresholded = cv2.threshold(blurred, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n",
" contours, _ = cv2.findContours(thresholded, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
" contours = sorted(contours, key=cv2.contourArea, reverse=True)\n",
" hand_contour = contours[0]\n",
" hand_shape = np.zeros_like(image)\n",
" cv2.drawContours(hand_shape, [hand_contour], 0, (255, 255, 255), thickness=cv2.FILLED)\n",
" return hand_shape\n",
"\n",
"\n",
"frame = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n",
"frame = cv2.resize(frame, (IMG_SIZE, IMG_SIZE))\n",
"frame = extract_hand_shape(frame)\n",
"frame = np.array([frame], dtype=np.float32) / 255.0\n",
"\n",
"\n",
"\n",
"\n",
"# Display the hand shape\n",
"cv2.imshow('Hand Shape', hand_shape)\n",
"cv2.waitKey(0)\n",
"cv2.destroyAllWindows()"
]
} }
], ],
"metadata": { "metadata": {
......
...@@ -174,8 +174,8 @@ class SignLanguagePredictionService: ...@@ -174,8 +174,8 @@ class SignLanguagePredictionService:
if not success: if not success:
break break
# TODO Add to COnfig # TODO Add to Config
if frame_count >= 250: if frame_count >= 500:
break break
# Preprocess the frame # Preprocess the frame
...@@ -195,7 +195,7 @@ class SignLanguagePredictionService: ...@@ -195,7 +195,7 @@ class SignLanguagePredictionService:
predictions.append(sinhala_letter) predictions.append(sinhala_letter)
frame_count += 1 frame_count += 1
# TODO Add to COnfig # TODO Add to Config
threshold_percentage = 60 threshold_percentage = 60
# Check if the required number of frames per sign has been reached # Check if the required number of frames per sign has been reached
......
...@@ -63,6 +63,7 @@ export default function AboutPage() { ...@@ -63,6 +63,7 @@ export default function AboutPage() {
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [value, setValue] = useState(''); const [value, setValue] = useState('');
const [speed, setSpeed] = useState(0); const [speed, setSpeed] = useState(0);
const [recordedVideoUrl, setRecordedVideoUrl] = useState(null);
const handleDropSingleFile = useCallback(async (acceptedFiles: File[]) => { const handleDropSingleFile = useCallback(async (acceptedFiles: File[]) => {
const file = acceptedFiles[0]; const file = acceptedFiles[0];
...@@ -114,7 +115,8 @@ export default function AboutPage() { ...@@ -114,7 +115,8 @@ export default function AboutPage() {
if (file) { if (file) {
setLoading(true); setLoading(true);
const formData = new FormData(); const formData = new FormData();
formData.append('video_request', file, 'test_video.mp4');
formData.append('video_request', file, file.name);
try { try {
const response = await SignLanguageToTextService.predictSignLanguageVideo(speed, formData); const response = await SignLanguageToTextService.predictSignLanguageVideo(speed, formData);
...@@ -135,11 +137,42 @@ export default function AboutPage() { ...@@ -135,11 +137,42 @@ export default function AboutPage() {
} }
}; };
const translateSignLanguageToTextRecord = async () => {
console.log('TEST TES ');
console.log(recordedVideoUrl);
if (recordedVideoUrl) {
setLoading(true);
const formData = new FormData();
formData.append('video_request', recordedVideoUrl, recordedVideoUrl.name);
try {
const response = await SignLanguageToTextService.predictSignLanguageVideo(speed, formData);
if (response.status == 200) {
console.log(response.data);
setValue(response.data.predictions);
} else {
enqueueSnackbar('Something went Wrong!', { variant: 'error' });
}
setLoading(false);
} catch (error) {
console.log(error);
setLoading(false);
enqueueSnackbar('Something went Wrong!', { variant: 'error' });
}
} else {
enqueueSnackbar('Please record a video.', { variant: 'warning' });
}
};
function valuetext(value: number) { function valuetext(value: number) {
setSpeed(value); setSpeed(value);
return `$${value}°C`; return `$${value}°C`;
} }
const handleVideoRecorded = (url) => {
setRecordedVideoUrl(url);
};
return ( return (
<> <>
<Head> <Head>
...@@ -290,8 +323,15 @@ export default function AboutPage() { ...@@ -290,8 +323,15 @@ export default function AboutPage() {
<Grid item xs={12} md={6}> <Grid item xs={12} md={6}>
<Card> <Card>
<CardContent> <CardContent>
<WebcamStreamCapture /> <WebcamStreamCapture onVideoRecorded={handleVideoRecorded} />
</CardContent> </CardContent>
{recordedVideoUrl && (
<div>
<h2>Recorded Video</h2>
<video src={recordedVideoUrl} controls autoPlay />
</div>
)}
</Card> </Card>
</Grid> </Grid>
<Grid item xs={12} md={6}> <Grid item xs={12} md={6}>
...@@ -330,7 +370,7 @@ export default function AboutPage() { ...@@ -330,7 +370,7 @@ export default function AboutPage() {
}} }}
disabled={loading} disabled={loading}
onClick={() => { onClick={() => {
translateSignLanguageToText(); translateSignLanguageToTextRecord();
}} }}
> >
Translate Translate
......
import React, { useEffect, useState } from 'react'; import React, { useEffect, useRef, useState } from 'react';
import Webcam from 'react-webcam'; import { Button, Grid } from '@mui/material';
import { Box, Button, Container, Grid, Stack } from '@mui/material';
import StopIcon from '@mui/icons-material/Stop'; import StopIcon from '@mui/icons-material/Stop';
import RadioButtonCheckedIcon from '@mui/icons-material/RadioButtonChecked'; import RadioButtonCheckedIcon from '@mui/icons-material/RadioButtonChecked';
import styles from './WebcamStreamCapture.module.css'; import Webcam from 'react-webcam';
const WebcamStreamCapture = () => {
const webcamRef = React.useRef(null);
const mediaRecorderRef = React.useRef(null);
const [capturing, setCapturing] = React.useState(false);
const [recordedChunks, setRecordedChunks] = React.useState([]);
const [mediaBlobUrl, setMediaBlobUrl] = React.useState([]);
const handleStartCaptureClick = React.useCallback(() => { const WebcamStreamCapture = ({ onVideoRecorded }) => {
const webcamRef = useRef(null);
const mediaRecorderRef = useRef(null);
const [capturing, setCapturing] = useState(false);
const [recordedChunks, setRecordedChunks] = useState([]);
const handleStartCaptureClick = () => {
setRecordedChunks([]); setRecordedChunks([]);
setMediaBlobUrl([]);
setCapturing(true); setCapturing(true);
mediaRecorderRef.current = new MediaRecorder(webcamRef.current.stream, { mediaRecorderRef.current = new MediaRecorder(webcamRef.current.stream, {
...@@ -21,112 +19,81 @@ const WebcamStreamCapture = () => { ...@@ -21,112 +19,81 @@ const WebcamStreamCapture = () => {
}); });
mediaRecorderRef.current.addEventListener('dataavailable', handleDataAvailable); mediaRecorderRef.current.addEventListener('dataavailable', handleDataAvailable);
mediaRecorderRef.current.start(); mediaRecorderRef.current.start();
}, [webcamRef, setCapturing, mediaRecorderRef]); };
const handleDataAvailable = React.useCallback( const handleDataAvailable = ({ data }) => {
({ data }) => { if (data.size > 0) {
if (data.size > 0) { setRecordedChunks((prev) => prev.concat(data));
setRecordedChunks((prev) => prev.concat(data)); }
} };
},
[setRecordedChunks]
);
const handleStopCaptureClick = React.useCallback(async () => { const handleStopCaptureClick = () => {
mediaRecorderRef.current.stop(); mediaRecorderRef.current.stop();
const blob = new Blob(recordedChunks, {
type: 'video/mp4',
});
const url = await URL.createObjectURL(blob);
console.log(url);
await setMediaBlobUrl(url);
setCapturing(false); setCapturing(false);
}, [mediaRecorderRef, webcamRef, setCapturing]); };
const handleDownload = React.useCallback(() => { const handleDownload = () => {
if (recordedChunks.length) { if (recordedChunks.length) {
const blob = new Blob(recordedChunks, { const blob = new Blob(recordedChunks, {
type: 'video/mp4', type: 'video/webm', // Use 'video/webm' to match MediaRecorder mimeType
}); });
const url = URL.createObjectURL(blob); const url = URL.createObjectURL(blob);
const a = document.createElement('a'); onVideoRecorded(url); // Pass the blob URL to the parent component
document.body.appendChild(a);
a.style = 'display: none';
a.href = url;
a.download = 'user-recording.mp4';
a.click();
window.URL.revokeObjectURL(url);
setRecordedChunks([]); setRecordedChunks([]);
} }
}, [recordedChunks]);
// Styles for camera
const [width, setWidth] = useState(window.innerWidth);
const handleResize = () => {
setWidth(window.innerWidth);
}; };
useEffect(() => {
window.addEventListener('resize', handleResize);
return () => window.removeEventListener('resize', handleResize);
}, []);
return ( return (
<> <Grid container spacing={2}>
<Grid container spacing={2}> <Grid item xs={12}>
<Grid item xs={12}> <center>
<center> <Webcam audio={false} ref={webcamRef} style={{ width: '100%', maxWidth: '500px' }} />
<Webcam audio={false} ref={webcamRef} style={{ width: '100%', maxWidth: '500px' }} /> </center>
</center> </Grid>
</Grid> <Grid item xs={12}>
<Grid item xs={12}> <center>
<center> {capturing ? (
{capturing ? ( <Button
<Button onClick={handleStopCaptureClick}
onClick={handleStopCaptureClick} startIcon={<StopIcon />}
startIcon={<StopIcon />} color="error"
color="error" variant="contained"
variant="contained" >
> Stop Capture
Stop Capture </Button>
</Button> ) : (
) : ( <Button
<Button onClick={handleStartCaptureClick}
onClick={handleStartCaptureClick} startIcon={<RadioButtonCheckedIcon />}
startIcon={<RadioButtonCheckedIcon />} color="error"
color="error" variant="contained"
variant="contained" >
> Start Capture
Start Capture </Button>
</Button> )}
)}
{recordedChunks.length > 0 && (
<Button
onClick={handleDownload}
variant="contained"
sx={{
ml: 1,
}}
>
Download
</Button>
)}
</center>
</Grid>
<Grid item xs={12}>
{recordedChunks.length > 0 && ( {recordedChunks.length > 0 && (
<center> <Button onClick={handleDownload} variant="contained" sx={{ ml: 1 }}>
<video src={mediaBlobUrl} controls autoPlay /> Download
</center> </Button>
)} )}
</Grid> </center>
</Grid>
<Grid item xs={12}>
{recordedChunks.length > 0 && (
<center>
<video
src={
recordedChunks.length > 0
? URL.createObjectURL(new Blob(recordedChunks, { type: 'video/webm' }))
: null
}
controls
autoPlay
/>
</center>
)}
</Grid> </Grid>
</> </Grid>
); );
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment