Commit 699f7153 authored by Ridma Dilshan's avatar Ridma Dilshan

Merge branch 'master' into IT20005276

parents 2863b5ab cbd507de
models/*
!models/
DataSet/Sn_sign_language_dataset/
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "f47e929b",
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"import asyncio\n",
"import torch.nn.functional as F\n",
"import torch.nn as nn\n",
"import torchvision.transforms as transforms\n",
"import numpy as np\n",
"import io\n",
"import uvicorn\n",
"from fastapi import FastAPI, UploadFile\n",
"from PIL import Image\n",
"import torch"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "13b1d58b",
"metadata": {},
"outputs": [],
"source": [
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "35f4adc0",
"metadata": {},
"outputs": [],
"source": [
"app = FastAPI()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "a5aba4be",
"metadata": {},
"outputs": [],
"source": [
"transform = transforms.Compose([\n",
" transforms.Resize((300, 300)),\n",
" transforms.Grayscale(num_output_channels=1),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5), std=(0.5))\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "11ec2fae",
"metadata": {},
"outputs": [],
"source": [
"class theCNN(nn.Module):\n",
" def __init__(self):\n",
" super(theCNN, self).__init__()\n",
" \n",
" self.conv01 = nn.Conv2d(\n",
" in_channels=1,\n",
" out_channels=10,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" self.conv02 = nn.Conv2d(\n",
" in_channels=10,\n",
" out_channels=20,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" expectedSize = int(np.floor((73 + 2 * 0 - 1) / 1) + 1)\n",
" expectedSize = 20 * int(expectedSize ** 2)\n",
" \n",
" self.fc01 = nn.Linear(expectedSize, 50)\n",
" self.output = nn.Linear(50, 16)\n",
"\n",
" def forward(self, x):\n",
" x = F.relu(F.max_pool2d(self.conv01(x), 2))\n",
" x = F.relu(F.max_pool2d(self.conv02(x), 2))\n",
" nUnits = x.shape.numel() / x.shape[0]\n",
" x = x.view(-1, int(nUnits))\n",
" x = F.relu(self.fc01(x))\n",
" return torch.softmax(self.output(x), axis=1)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "353a4725",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"theCNN(\n",
" (conv01): Conv2d(1, 10, kernel_size=(5, 5), stride=(1, 1), padding=(1, 1))\n",
" (conv02): Conv2d(10, 20, kernel_size=(5, 5), stride=(1, 1), padding=(1, 1))\n",
" (fc01): Linear(in_features=106580, out_features=50, bias=True)\n",
" (output): Linear(in_features=50, out_features=16, bias=True)\n",
")"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model = theCNN()\n",
"model.load_state_dict(torch.load(\"model.pth\"))\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "bb87b7f0",
"metadata": {},
"outputs": [],
"source": [
"@app.post(\"/score\")\n",
"async def calculate_score(image_file: UploadFile):\n",
" image = Image.open(io.BytesIO(await image_file.read())).convert(\"L\")\n",
" image = transform(image).unsqueeze(0)\n",
"\n",
" with torch.no_grad():\n",
" output = model(image)\n",
"\n",
" probabilities = torch.softmax(output, dim=1)[0]\n",
" predicted_class = torch.argmax(probabilities).item()\n",
"\n",
" # Get the actual number corresponding to the hand sign\n",
" actual_number = get_actual_number_from_image(image)\n",
" \n",
" print(actual_number)\n",
"\n",
" # Compare predicted class with actual number and calculate correctness percentage\n",
" correct = int(predicted_class + 1 == actual_number)\n",
" print(correct)\n",
" correctness_percentage = correct / 1.0 * 100.0\n",
"\n",
" return {\"predicted_class\": predicted_class, \"correctness_percentage\": correctness_percentage}"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "05646e93",
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"import numpy as np\n",
"\n",
"def get_actual_number_from_image(image):\n",
" # Convert the image to numpy array\n",
" image_array = np.array(image)\n",
"\n",
" # Apply image processing techniques to detect and recognize digits\n",
" # Example steps: thresholding, contour detection, character segmentation, digit recognition\n",
"\n",
" # Apply thresholding\n",
" _, binary_image = cv2.threshold(image_array, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)\n",
"\n",
" # Find contours\n",
" contours, _ = cv2.findContours(binary_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
"\n",
" # Sort contours based on their x-coordinate\n",
" contours = sorted(contours, key=lambda cnt: cv2.boundingRect(cnt)[0])\n",
"\n",
" # Initialize the recognized digit sequence\n",
" digit_sequence = \"\"\n",
"\n",
" # Iterate over the contours and recognize digits\n",
" for contour in contours:\n",
" # Get the bounding box of the contour\n",
" x, y, w, h = cv2.boundingRect(contour)\n",
"\n",
" # Crop the digit region from the image\n",
" digit_image = binary_image[y:y + h, x:x + w]\n",
"\n",
" # Resize the digit image to a fixed size (e.g., 28x28)\n",
" resized_digit_image = cv2.resize(digit_image, (28, 28))\n",
"\n",
" # Preprocess the resized digit image (e.g., normalize pixel values)\n",
" preprocessed_digit_image = resized_digit_image / 255.0\n",
"\n",
" # Flatten the preprocessed digit image\n",
" flattened_digit_image = preprocessed_digit_image.flatten()\n",
"\n",
" # Pass the flattened digit image to your digit recognition model\n",
" # to get the predicted digit (e.g., using a separate model or the same model you used for training)\n",
"\n",
" # Here, let's assume you have a function `predict_digit` that takes the flattened digit image\n",
" # and returns the predicted digit as an integer\n",
" predicted_digit = predict_digit(flattened_digit_image)\n",
"\n",
" # Add the predicted digit to the digit sequence\n",
" digit_sequence += str(predicted_digit)\n",
"\n",
" # Convert the digit sequence to an integer\n",
" actual_number = int(digit_sequence)\n",
"\n",
" return actual_number\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ee993fc1",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO: Started server process [36312]\n",
"INFO: Waiting for application startup.\n",
"INFO: Application startup complete.\n",
"INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"5\n",
"0\n",
"INFO: 127.0.0.1:60625 - \"POST /score HTTP/1.1\" 200 OK\n"
]
}
],
"source": [
"if __name__ == \"__main__\":\n",
" loop = asyncio.get_event_loop()\n",
" loop.create_task(uvicorn.run(app, host=\"127.0.0.1\", port=8001))\n",
" loop.run_forever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "34c5efea",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "c898b57c",
"metadata": {},
"outputs": [],
"source": [
"pip install python-multipart"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "a99b4156",
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"import asyncio\n",
"import torch.nn.functional as F\n",
"\n",
"# Apply the patch\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "01b6e57c",
"metadata": {},
"outputs": [],
"source": [
"from fastapi import FastAPI, UploadFile\n",
"from PIL import Image\n",
"import torch\n",
"import torchvision.transforms as transforms\n",
"import numpy as np\n",
"import io\n",
"import torch.nn as nn\n",
"import asyncio\n",
"import uvicorn "
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "2e561f13",
"metadata": {},
"outputs": [],
"source": [
"app = FastAPI()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "1e9e4208",
"metadata": {},
"outputs": [],
"source": [
"transform = transforms.Compose([\n",
" transforms.Resize((300, 300)),\n",
" transforms.Grayscale(num_output_channels=1),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5), std=(0.5))\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "4d86d2b8",
"metadata": {},
"outputs": [],
"source": [
"class theCNN(nn.Module):\n",
" def __init__(self):\n",
" super(theCNN, self).__init__()\n",
" \n",
" self.conv01 = nn.Conv2d(\n",
" in_channels=1,\n",
" out_channels=10,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" self.conv02 = nn.Conv2d(\n",
" in_channels=10,\n",
" out_channels=20,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" expectedSize = int(np.floor((73 + 2 * 0 - 1) / 1) + 1)\n",
" expectedSize = 20 * int(expectedSize ** 2)\n",
" \n",
" self.fc01 = nn.Linear(expectedSize, 50)\n",
" self.output = nn.Linear(50, 16)\n",
"\n",
" def forward(self, x):\n",
" x = F.relu(F.max_pool2d(self.conv01(x), 2))\n",
" x = F.relu(F.max_pool2d(self.conv02(x), 2))\n",
" nUnits = x.shape.numel() / x.shape[0]\n",
" x = x.view(-1, int(nUnits))\n",
" x = F.relu(self.fc01(x))\n",
" return torch.softmax(self.output(x), axis=1)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "9379bf73",
"metadata": {},
"outputs": [
{
"ename": "TypeError",
"evalue": "Expected state_dict to be dict-like, got <class '__main__.theCNN'>.",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[6], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m model \u001b[38;5;241m=\u001b[39m theCNN()\n\u001b[1;32m----> 2\u001b[0m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload_state_dict\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmodel1.pth\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 3\u001b[0m model\u001b[38;5;241m.\u001b[39meval()\n",
"File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python310\\site-packages\\torch\\nn\\modules\\module.py:1994\u001b[0m, in \u001b[0;36mModule.load_state_dict\u001b[1;34m(self, state_dict, strict)\u001b[0m\n\u001b[0;32m 1971\u001b[0m \u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"Copies parameters and buffers from :attr:`state_dict` into\u001b[39;00m\n\u001b[0;32m 1972\u001b[0m \u001b[38;5;124;03mthis module and its descendants. If :attr:`strict` is ``True``, then\u001b[39;00m\n\u001b[0;32m 1973\u001b[0m \u001b[38;5;124;03mthe keys of :attr:`state_dict` must exactly match the keys returned\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1991\u001b[0m \u001b[38;5;124;03m ``RuntimeError``.\u001b[39;00m\n\u001b[0;32m 1992\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 1993\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(state_dict, Mapping):\n\u001b[1;32m-> 1994\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExpected state_dict to be dict-like, got \u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(\u001b[38;5;28mtype\u001b[39m(state_dict)))\n\u001b[0;32m 1996\u001b[0m missing_keys: List[\u001b[38;5;28mstr\u001b[39m] \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m 1997\u001b[0m unexpected_keys: List[\u001b[38;5;28mstr\u001b[39m] \u001b[38;5;241m=\u001b[39m []\n",
"\u001b[1;31mTypeError\u001b[0m: Expected state_dict to be dict-like, got <class '__main__.theCNN'>."
]
}
],
"source": [
"model = theCNN()\n",
"model.load_state_dict(torch.load(\"model.pth\"))\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18b869d3",
"metadata": {},
"outputs": [],
"source": [
"@app.post(\"/score\")\n",
"async def calculate_score(image_file: UploadFile):\n",
" image = Image.open(io.BytesIO(await image_file.read())).convert(\"L\")\n",
" image = transform(image).unsqueeze(0)\n",
"\n",
" with torch.no_grad():\n",
" output = model(image)\n",
"\n",
" probabilities = torch.softmax(output, dim=1)[0]\n",
" similarity_scores = probabilities.numpy()\n",
"\n",
" return {\"similarity_scores\": similarity_scores.tolist()}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4a99a8b6",
"metadata": {},
"outputs": [],
"source": [
"@app.get(\"/\")\n",
"async def hello_world(): \n",
"\n",
" return {\"Hello World\"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7b30f5c6",
"metadata": {},
"outputs": [],
"source": [
"if __name__ == \"__main__\":\n",
" loop = asyncio.get_event_loop()\n",
" loop.create_task(uvicorn.run(app, host=\"127.0.0.1\", port=8001))\n",
" loop.run_forever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3e2e07d8",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "60c58fd2",
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"import asyncio\n",
"import torch.nn.functional as F\n",
"\n",
"# Apply the patch\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "c1b7af91",
"metadata": {},
"outputs": [],
"source": [
"from fastapi import FastAPI, UploadFile\n",
"from PIL import Image\n",
"import torch\n",
"import torchvision.transforms as transforms\n",
"import numpy as np\n",
"import io\n",
"import torch.nn as nn\n",
"import asyncio\n",
"import uvicorn \n",
"from io import BytesIO"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "7f1cd8a4",
"metadata": {},
"outputs": [],
"source": [
"app = FastAPI()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "2035acf5",
"metadata": {},
"outputs": [],
"source": [
"transform = transforms.Compose([\n",
" transforms.Resize((300, 300)),\n",
" transforms.Grayscale(num_output_channels=1),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5), std=(0.5))\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "2cfca47d",
"metadata": {},
"outputs": [],
"source": [
"class theCNN(nn.Module):\n",
" def __init__(self):\n",
" super(theCNN, self).__init__()\n",
" \n",
" self.conv01 = nn.Conv2d(\n",
" in_channels=1,\n",
" out_channels=10,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" self.conv02 = nn.Conv2d(\n",
" in_channels=10,\n",
" out_channels=20,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" expectedSize = int(np.floor((73 + 2 * 0 - 1) / 1) + 1)\n",
" expectedSize = 20 * int(expectedSize ** 2)\n",
" \n",
" self.fc01 = nn.Linear(expectedSize, 50)\n",
" self.output = nn.Linear(50, 16)\n",
"\n",
" def forward(self, x):\n",
" x = F.relu(F.max_pool2d(self.conv01(x), 2))\n",
" x = F.relu(F.max_pool2d(self.conv02(x), 2))\n",
" nUnits = x.shape.numel() / x.shape[0]\n",
" x = x.view(-1, int(nUnits))\n",
" x = F.relu(self.fc01(x))\n",
" return torch.softmax(self.output(x), axis=1)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "2786c4df",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"theCNN(\n",
" (conv01): Conv2d(1, 10, kernel_size=(5, 5), stride=(1, 1), padding=(1, 1))\n",
" (conv02): Conv2d(10, 20, kernel_size=(5, 5), stride=(1, 1), padding=(1, 1))\n",
" (fc01): Linear(in_features=106580, out_features=50, bias=True)\n",
" (output): Linear(in_features=50, out_features=16, bias=True)\n",
")"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model = theCNN()\n",
"model.load_state_dict(torch.load(\"model.pth\"))\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "30569105",
"metadata": {},
"outputs": [],
"source": [
"# @app.post(\"/predict-similarity\")\n",
"# async def predict_similarity(image: UploadFile):\n",
"# image_bytes = await image.read()\n",
"# img = Image.open(BytesIO(image_bytes))\n",
"# img = transform(img).unsqueeze(0)\n",
"# output = model(img)\n",
"# similarity_score = torch.max(output).item() * 100 # Get the maximum predicted probability as the similarity score\n",
"# return {\"similarity_score\": similarity_score}\n",
"\n",
"\n",
"@app.post(\"/predict-similarity\")\n",
"async def predict_similarity(sign: str, image: UploadFile):\n",
" image_bytes = await image.read()\n",
" img = Image.open(BytesIO(image_bytes))\n",
" img = transform(img).unsqueeze(0)\n",
" output = model(img)\n",
" similarity_score = torch.max(output).item() * 100 # Get the maximum predicted probability as the similarity score\n",
" return {\"sign\": sign, \"similarity_score\": similarity_score}"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "5c19e31c",
"metadata": {},
"outputs": [],
"source": [
"@app.get(\"/\")\n",
"async def hello_world(): \n",
"\n",
" return {\"Hello World\"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bea39dc9",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO: Started server process [36440]\n",
"INFO: Waiting for application startup.\n",
"INFO: Application startup complete.\n",
"INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"INFO: 127.0.0.1:63408 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63440 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63461 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63484 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n"
]
}
],
"source": [
"if __name__ == \"__main__\":\n",
" loop = asyncio.get_event_loop()\n",
" loop.create_task(uvicorn.run(app, host=\"127.0.0.1\", port=8001))\n",
" loop.run_forever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "52ede8b7",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "3d7bb1db",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "49b49c05",
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"import asyncio\n",
"import torch.nn.functional as F\n",
"import torch.nn as nn\n",
"import torchvision.transforms as transforms\n",
"import numpy as np\n",
"import io\n",
"import uvicorn\n",
"from fastapi import FastAPI, UploadFile\n",
"from PIL import Image\n",
"import torch"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3f0148a4",
"metadata": {},
"outputs": [],
"source": [
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "26e5f198",
"metadata": {},
"outputs": [],
"source": [
"app = FastAPI()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d4359663",
"metadata": {},
"outputs": [],
"source": [
"transform = transforms.Compose([\n",
" transforms.Resize((300, 300)),\n",
" transforms.Grayscale(num_output_channels=1),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5), std=(0.5))\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "308a25d3",
"metadata": {},
"outputs": [],
"source": [
"class theCNN(nn.Module):\n",
" def __init__(self):\n",
" super(theCNN, self).__init__()\n",
" \n",
" self.conv01 = nn.Conv2d(\n",
" in_channels=1,\n",
" out_channels=10,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" self.conv02 = nn.Conv2d(\n",
" in_channels=10,\n",
" out_channels=20,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" expectedSize = int(np.floor((73 + 2 * 0 - 1) / 1) + 1)\n",
" expectedSize = 20 * int(expectedSize ** 2)\n",
" \n",
" self.fc01 = nn.Linear(expectedSize, 50)\n",
" self.output = nn.Linear(50, 16)\n",
"\n",
" def forward(self, x):\n",
" x = F.relu(F.max_pool2d(self.conv01(x), 2))\n",
" x = F.relu(F.max_pool2d(self.conv02(x), 2))\n",
" nUnits = x.shape.numel() / x.shape[0]\n",
" x = x.view(-1, int(nUnits))\n",
" x = F.relu(self.fc01(x))\n",
" return torch.softmax(self.output(x), axis=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4e3544d2",
"metadata": {},
"outputs": [],
"source": [
"model = theCNN()\n",
"model.load_state_dict(torch.load(\"model.pth\"))\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d521a8ca",
"metadata": {},
"outputs": [],
"source": [
"@app.post(\"/score\")\n",
"async def calculate_score(image_file: UploadFile):\n",
" image = Image.open(io.BytesIO(await image_file.read())).convert(\"L\")\n",
" image = transform(image).unsqueeze(0)\n",
"\n",
" with torch.no_grad():\n",
" output = model(image)\n",
"\n",
" probabilities = torch.softmax(output, dim=1)[0]\n",
" predicted_class = torch.argmax(probabilities).item()\n",
"\n",
" # Get the actual number corresponding to the hand sign\n",
" actual_number = get_actual_number_from_image(image)\n",
" \n",
" print(actual_number)\n",
"\n",
" # Compare predicted class with actual number and calculate correctness percentage\n",
" correct = int(predicted_class + 1 == actual_number)\n",
" print(correct)\n",
" correctness_percentage = correct / 1.0 * 100.0\n",
"\n",
" return {\"predicted_class\": predicted_class, \"correctness_percentage\": correctness_percentage}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "030c5fbc",
"metadata": {},
"outputs": [],
"source": [
"import cv2\n",
"import numpy as np\n",
"\n",
"def get_actual_number_from_image(image):\n",
" # Convert the image to numpy array\n",
" image_array = np.array(image)\n",
"\n",
" # Apply image processing techniques to detect and recognize digits\n",
" # Example steps: thresholding, contour detection, character segmentation, digit recognition\n",
"\n",
" # Apply thresholding\n",
" _, binary_image = cv2.threshold(image_array, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)\n",
"\n",
" # Find contours\n",
" contours, _ = cv2.findContours(binary_image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n",
"\n",
" # Sort contours based on their x-coordinate\n",
" contours = sorted(contours, key=lambda cnt: cv2.boundingRect(cnt)[0])\n",
"\n",
" # Initialize the recognized digit sequence\n",
" digit_sequence = \"\"\n",
"\n",
" # Iterate over the contours and recognize digits\n",
" for contour in contours:\n",
" # Get the bounding box of the contour\n",
" x, y, w, h = cv2.boundingRect(contour)\n",
"\n",
" # Crop the digit region from the image\n",
" digit_image = binary_image[y:y + h, x:x + w]\n",
"\n",
" # Resize the digit image to a fixed size (e.g., 28x28)\n",
" resized_digit_image = cv2.resize(digit_image, (28, 28))\n",
"\n",
" # Preprocess the resized digit image (e.g., normalize pixel values)\n",
" preprocessed_digit_image = resized_digit_image / 255.0\n",
"\n",
" # Flatten the preprocessed digit image\n",
" flattened_digit_image = preprocessed_digit_image.flatten()\n",
"\n",
" # Pass the flattened digit image to your digit recognition model\n",
" # to get the predicted digit (e.g., using a separate model or the same model you used for training)\n",
"\n",
" # Here, let's assume you have a function `predict_digit` that takes the flattened digit image\n",
" # and returns the predicted digit as an integer\n",
" predicted_digit = predict_digit(flattened_digit_image)\n",
"\n",
" # Add the predicted digit to the digit sequence\n",
" digit_sequence += str(predicted_digit)\n",
"\n",
" # Convert the digit sequence to an integer\n",
" actual_number = int(digit_sequence)\n",
"\n",
" return actual_number\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5035c79a",
"metadata": {},
"outputs": [],
"source": [
"if __name__ == \"__main__\":\n",
" loop = asyncio.get_event_loop()\n",
" loop.create_task(uvicorn.run(app, host=\"127.0.0.1\", port=8001))\n",
" loop.run_forever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c0449757",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "c898b57c",
"metadata": {},
"outputs": [],
"source": [
"pip install python-multipart"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a99b4156",
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"import asyncio\n",
"import torch.nn.functional as F\n",
"\n",
"# Apply the patch\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "01b6e57c",
"metadata": {},
"outputs": [],
"source": [
"from fastapi import FastAPI, UploadFile\n",
"from PIL import Image\n",
"import torch\n",
"import torchvision.transforms as transforms\n",
"import numpy as np\n",
"import io\n",
"import torch.nn as nn\n",
"import asyncio\n",
"import uvicorn "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2e561f13",
"metadata": {},
"outputs": [],
"source": [
"app = FastAPI()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1e9e4208",
"metadata": {},
"outputs": [],
"source": [
"transform = transforms.Compose([\n",
" transforms.Resize((300, 300)),\n",
" transforms.Grayscale(num_output_channels=1),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5), std=(0.5))\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4d86d2b8",
"metadata": {},
"outputs": [],
"source": [
"class theCNN(nn.Module):\n",
" def __init__(self):\n",
" super(theCNN, self).__init__()\n",
" \n",
" self.conv01 = nn.Conv2d(\n",
" in_channels=1,\n",
" out_channels=10,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" self.conv02 = nn.Conv2d(\n",
" in_channels=10,\n",
" out_channels=20,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" expectedSize = int(np.floor((73 + 2 * 0 - 1) / 1) + 1)\n",
" expectedSize = 20 * int(expectedSize ** 2)\n",
" \n",
" self.fc01 = nn.Linear(expectedSize, 50)\n",
" self.output = nn.Linear(50, 16)\n",
"\n",
" def forward(self, x):\n",
" x = F.relu(F.max_pool2d(self.conv01(x), 2))\n",
" x = F.relu(F.max_pool2d(self.conv02(x), 2))\n",
" nUnits = x.shape.numel() / x.shape[0]\n",
" x = x.view(-1, int(nUnits))\n",
" x = F.relu(self.fc01(x))\n",
" return torch.softmax(self.output(x), axis=1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9379bf73",
"metadata": {},
"outputs": [],
"source": [
"model = theCNN()\n",
"model.load_state_dict(torch.load(\"model.pth\"))\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18b869d3",
"metadata": {},
"outputs": [],
"source": [
"@app.post(\"/score\")\n",
"async def calculate_score(image_file: UploadFile):\n",
" image = Image.open(io.BytesIO(await image_file.read())).convert(\"L\")\n",
" image = transform(image).unsqueeze(0)\n",
"\n",
" with torch.no_grad():\n",
" output = model(image)\n",
"\n",
" probabilities = torch.softmax(output, dim=1)[0]\n",
" similarity_scores = probabilities.numpy()\n",
"\n",
" return {\"similarity_scores\": similarity_scores.tolist()}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4a99a8b6",
"metadata": {},
"outputs": [],
"source": [
"@app.get(\"/\")\n",
"async def hello_world(): \n",
"\n",
" return {\"Hello World\"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7b30f5c6",
"metadata": {},
"outputs": [],
"source": [
"if __name__ == \"__main__\":\n",
" loop = asyncio.get_event_loop()\n",
" loop.create_task(uvicorn.run(app, host=\"127.0.0.1\", port=8001))\n",
" loop.run_forever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3e2e07d8",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "3d735e52",
"metadata": {},
"outputs": [],
"source": [
"import nest_asyncio\n",
"import asyncio\n",
"import torch.nn.functional as F\n",
"\n",
"# Apply the patch\n",
"nest_asyncio.apply()"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "db756418",
"metadata": {},
"outputs": [],
"source": [
"from fastapi import FastAPI, UploadFile\n",
"from PIL import Image\n",
"import torch\n",
"import torchvision.transforms as transforms\n",
"import numpy as np\n",
"import io\n",
"import torch.nn as nn\n",
"import asyncio\n",
"import uvicorn \n",
"from io import BytesIO"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "fc83d1b8",
"metadata": {},
"outputs": [],
"source": [
"app = FastAPI()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "63dbfc01",
"metadata": {},
"outputs": [],
"source": [
"transform = transforms.Compose([\n",
" transforms.Resize((300, 300)),\n",
" transforms.Grayscale(num_output_channels=1),\n",
" transforms.ToTensor(),\n",
" transforms.Normalize(mean=(0.5), std=(0.5))\n",
"])"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "f7e5cdba",
"metadata": {},
"outputs": [],
"source": [
"class theCNN(nn.Module):\n",
" def __init__(self):\n",
" super(theCNN, self).__init__()\n",
" \n",
" self.conv01 = nn.Conv2d(\n",
" in_channels=1,\n",
" out_channels=10,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" self.conv02 = nn.Conv2d(\n",
" in_channels=10,\n",
" out_channels=20,\n",
" kernel_size=5,\n",
" stride=1,\n",
" padding=1\n",
" )\n",
" \n",
" expectedSize = int(np.floor((73 + 2 * 0 - 1) / 1) + 1)\n",
" expectedSize = 20 * int(expectedSize ** 2)\n",
" \n",
" self.fc01 = nn.Linear(expectedSize, 50)\n",
" self.output = nn.Linear(50, 16)\n",
"\n",
" def forward(self, x):\n",
" x = F.relu(F.max_pool2d(self.conv01(x), 2))\n",
" x = F.relu(F.max_pool2d(self.conv02(x), 2))\n",
" nUnits = x.shape.numel() / x.shape[0]\n",
" x = x.view(-1, int(nUnits))\n",
" x = F.relu(self.fc01(x))\n",
" return torch.softmax(self.output(x), axis=1)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "d86a9515",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"theCNN(\n",
" (conv01): Conv2d(1, 10, kernel_size=(5, 5), stride=(1, 1), padding=(1, 1))\n",
" (conv02): Conv2d(10, 20, kernel_size=(5, 5), stride=(1, 1), padding=(1, 1))\n",
" (fc01): Linear(in_features=106580, out_features=50, bias=True)\n",
" (output): Linear(in_features=50, out_features=16, bias=True)\n",
")"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"model = theCNN()\n",
"model.load_state_dict(torch.load(\"model.pth\"))\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "4b527135",
"metadata": {},
"outputs": [],
"source": [
"# @app.post(\"/predict-similarity\")\n",
"# async def predict_similarity(image: UploadFile):\n",
"# image_bytes = await image.read()\n",
"# img = Image.open(BytesIO(image_bytes))\n",
"# img = transform(img).unsqueeze(0)\n",
"# output = model(img)\n",
"# similarity_score = torch.max(output).item() * 100 # Get the maximum predicted probability as the similarity score\n",
"# return {\"similarity_score\": similarity_score}\n",
"\n",
"\n",
"@app.post(\"/predict-similarity\")\n",
"async def predict_similarity(sign: str, image: UploadFile):\n",
" image_bytes = await image.read()\n",
" img = Image.open(BytesIO(image_bytes))\n",
" img = transform(img).unsqueeze(0)\n",
" output = model(img)\n",
" similarity_score = torch.max(output).item() * 100 # Get the maximum predicted probability as the similarity score\n",
" return {\"sign\": sign, \"similarity_score\": similarity_score}"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "df08611e",
"metadata": {},
"outputs": [],
"source": [
"@app.get(\"/\")\n",
"async def hello_world(): \n",
"\n",
" return {\"Hello World\"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "84b9601b",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO: Started server process [36440]\n",
"INFO: Waiting for application startup.\n",
"INFO: Application startup complete.\n",
"INFO: Uvicorn running on http://127.0.0.1:8001 (Press CTRL+C to quit)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"INFO: 127.0.0.1:63408 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63440 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63461 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63484 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63502 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63517 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63523 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63536 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63536 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63536 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n",
"INFO: 127.0.0.1:63546 - \"POST /predict-similarity HTTP/1.1\" 422 Unprocessable Entity\n"
]
}
],
"source": [
"if __name__ == \"__main__\":\n",
" loop = asyncio.get_event_loop()\n",
" loop.create_task(uvicorn.run(app, host=\"127.0.0.1\", port=8001))\n",
" loop.run_forever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ab358400",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "504cf81d",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
...@@ -2,7 +2,6 @@ import bcrypt from 'bcryptjs'; ...@@ -2,7 +2,6 @@ import bcrypt from 'bcryptjs';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import mongoose from 'mongoose'; import mongoose from 'mongoose';
import nodemailer from "nodemailer"; import nodemailer from "nodemailer";
import { v4 as uuidv4 } from 'uuid';
import User from '../models/user.model.js'; import User from '../models/user.model.js';
export const signIn = async (req, res) => { export const signIn = async (req, res) => {
...@@ -28,73 +27,47 @@ export const signIn = async (req, res) => { ...@@ -28,73 +27,47 @@ export const signIn = async (req, res) => {
export const signUp = async (req, res) => { export const signUp = async (req, res) => {
const { const {
firstName,
lastName,
email, email,
contactNumber,
password, password,
confirmPassword, confirmPassword,
type, type
userFirstName,
userLastName,
userContactNumber,
userAddressLine1,
userAddressLine2,
userAddressLine3,
} = req.body; } = req.body;
try { try {
if (!type) return res.status(400).json({ code: "02", message: "Type Field Required" }) if (type === null || typeof type == "undefined") return res.status(400).json({ code: "02", message: "Type Field Required" })
if (!email) return res.status(400).json({ code: "02", message: "Email Field Required" }) if (email === null || typeof email == "undefined") return res.status(400).json({ code: "02", message: "Email Field Required" })
if (!userFirstName) return res.status(400).json({ code: "02", message: "User First Name Field Required" }) if (firstName === null || typeof firstName == "undefined") return res.status(400).json({ code: "02", message: "First Name Field Required" })
if (!userLastName) return res.status(400).json({ code: "02", message: "User Last Name Field Required" }) if (lastName === null || typeof lastName == "undefined") return res.status(400).json({ code: "02", message: "Last Name Field Required" })
if (!userContactNumber) return res.status(400).json({ code: "02", message: "User Contact Number Field Required" }) if (contactNumber === null || typeof contactNumber == "undefined") return res.status(400).json({ code: "02", message: "Contact Number Field Required" })
const existingUser = await User.findOne({ email })
if (existingUser) return res.status(400).json({ code: "02", message: "User already exists" })
if (type === "buyer") {
if (!password) return res.status(400).json({ code: "02", message: "Password Field Required" })
if (password !== confirmPassword) return res.status(400).json({ code: "02", message: "Passwords do not match" })
const hashedPassword = await bcrypt.hash(password, 12)
const userDetails = new User({
email,
password: hashedPassword,
type,
userDetails: {
userQNumber: uuidv4(),
userEmail: email,
userName: `${userFirstName} ${userLastName}`,
userContactNumber,
userAddress: `${userAddressLine1}, ${userAddressLine2}, ${userAddressLine3}`,
userType: type,
}
})
const userResult = await userDetails.save() const existingUserByEmail = await User.findOne({ email: email })
if (existingUserByEmail) return res.status(400).json({ code: "02", message: `For the Email : ${email} User already exist` })
const token = jwt.sign({ email: userResult.email, id: userResult._id }, 'test', { expiresIn: "1h" })
res.status(200).json({ code: "01", result: userResult, token }) const existingUserByContactNo = await User.findOne({ contactNumber: contactNumber })
} else if (type === "trader") { if (existingUserByContactNo) return res.status(400).json({ code: "02", message: `For the Contact No : ${contactNumber} User already exist` })
const userDetails = new User({
email,
type,
userDetails: {
userQNumber: uuidv4(),
userEmail: email,
userName: `${userFirstName} ${userLastName}`,
userContactNumber,
userAddress: `${userAddressLine1}, ${userAddressLine2}, ${userAddressLine3}`,
userType: type,
},
states: 2
})
const userResult = await userDetails.save() if (password === null || typeof password == "undefined") return res.status(400).json({ code: "02", message: "Password Field Required" })
if (password !== confirmPassword) return res.status(400).json({ code: "02", message: "Password doesn't match" })
const hashPassword = await bcrypt.hash(password, 12)
const token = jwt.sign({ email: userResult.email, id: userResult._id }, 'test', { expiresIn: "1h" }) const userDetails = new User({
firstName,
lastName,
email,
contactNumber,
type,
password: hashPassword,
})
res.status(200).json({ code: "01", result: userResult, token }) const userResult = await userDetails.save()
}
const token = jwt.sign({ email: userResult.email, id: userResult._id }, 'test', { expiresIn: "1h" })
res.status(200).json({ code: "01", result: userResult, token })
} catch (error) { } catch (error) {
res.status(500).json({ code: "00", message: "Something went wrong" }) res.status(500).json({ code: "00", message: "Something went wrong" })
...@@ -226,3 +199,4 @@ export const deleteUser = async (req, res) => { ...@@ -226,3 +199,4 @@ export const deleteUser = async (req, res) => {
res.json({ code: "00", "message": error.message }); res.json({ code: "00", "message": error.message });
} }
} }
const curriculums = [
{
"curriculumCode": "01",
"curriculumLevel": "Base Level",
"curriculumName": "Learn Sign Language",
"curriculumImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorials": [
{
"tutorialCode": "01",
"tutorialTitle": "Numbers and Counting in Sign Language",
"tutorialImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"taskItems": [
{
"title": "Learn Number One",
"description": "Learn how to sign the number one in sign language.",
"howToDo": "- Extend your index finger straight up.\n- Keep the rest of your fingers closed.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_one.jpg",
"referenceVideo": "https://example.com/number_one_video.mp4"
},
{
"title": "Learn Number Two",
"description": "Learn how to sign the number two in sign language.",
"howToDo": "- Extend your index and middle fingers straight up.\n- Keep the rest of your fingers closed.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_two.jpg",
"referenceVideo": "https://example.com/number_two_video.mp4"
},
{
"title": "Learn Number Three",
"description": "Learn how to sign the number three in sign language.",
"howToDo": "- Extend your index, middle, and ring fingers straight up.\n- Keep the rest of your fingers closed.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_three.jpg",
"referenceVideo": "https://example.com/number_three_video.mp4"
},
{
"title": "Learn Number Four",
"description": "Learn how to sign the number four in sign language.",
"howToDo": "- Extend your thumb, index, middle, and ring fingers straight up.\n- Keep your pinky finger folded.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_four.jpg",
"referenceVideo": "https://example.com/number_four_video.mp4"
},
{
"title": "Learn Number Five",
"description": "Learn how to sign the number five in sign language.",
"howToDo": "- Extend all your fingers straight up.\n- Keep your thumb resting on the side of your palm.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_five.jpg",
"referenceVideo": "https://example.com/number_five_video.mp4"
},
{
"title": "Learn Number Six",
"description": "Learn how to sign the number six in sign language.",
"howToDo": "- Extend your thumb and pinky finger straight up.\n- Keep the rest of your fingers closed.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_six.jpg",
"referenceVideo": "https://example.com/number_six_video.mp4"
},
{
"title": "Learn Number Seven",
"description": "Learn how to sign the number seven in sign language.",
"howToDo": "- Extend your index, middle, and ring fingers straight up.\n- Keep your thumb, pinky, and pinky finger folded.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_seven.jpg",
"referenceVideo": "https://example.com/number_seven_video.mp4"
},
{
"title": "Learn Number Eight",
"description": "Learn how to sign the number eight in sign language.",
"howToDo": "- Extend all your fingers straight up.\n- Cross your index and middle fingers over your ring and pinky fingers.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_eight.jpg",
"referenceVideo": "https://example.com/number_eight_video.mp4"
},
{
"title": "Learn Number Nine",
"description": "Learn how to sign the number nine in sign language.",
"howToDo": "- Extend your thumb and all your fingers straight up.\n- Keep your pinky finger folded.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_nine.jpg",
"referenceVideo": "https://example.com/number_nine_video.mp4"
},
{
"title": "Learn Number Ten",
"description": "Learn how to sign the number ten in sign language.",
"howToDo": "- Extend your thumb, index, and middle fingers straight up.\n- Keep the rest of your fingers closed.\n- Hold your hand in front of your chest.",
"referenceImage": "https://example.com/number_ten.jpg",
"referenceVideo": "https://example.com/number_ten_video.mp4"
}
]
},
{
"tutorialCode": "02",
"tutorialTitle": "Learn the Basics of Sign Language",
"tutorialImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorialContent": "Introduce the concept of sign language and its importance.\nTeach basic greetings and expressions, such as hello, goodbye, thank you, and sorry.\nProvide visual demonstrations and practice exercises for learners to practice these basic signs."
},
{
"tutorialCode": "03",
"tutorialTitle": "Family Signs in Sign Language",
"tutorialImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorialContent": "Teach signs for family members, such as mother, father, sister, brother, etc.\nIntroduce signs for common family-related words, such as family, love, and home.\nProvide visual demonstrations and practice exercises for learners to practice these family signs."
},
{
"tutorialCode": "04",
"tutorialTitle": "Everyday Vocabulary in Sign Language",
"tutorialImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorialContent": "Teach signs for everyday objects and activities, such as eat, drink, sleep, book, pen, etc.\nIntroduce signs for common words used in daily life.\nProvide visual demonstrations and interactive exercises for learners to practice using these signs."
},
{
"tutorialCode": "05",
"tutorialTitle": "Basic Conversational Phrases in Sign Language",
"tutorialImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorialContent": "Teach simple conversational phrases, such as \"What is your name?\" or \"How are you?\"\nIntroduce signs for common question words and phrases.\nProvide visual demonstrations and practice exercises for learners to practice these conversational phrases."
}
]
},
{
"curriculumCode": "02",
"curriculumLevel": "Medium Level",
"curriculumName": "Learn Sign Language",
"curriculumImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorials": []
},
{
"curriculumCode": "03",
"curriculumLevel": "Advance Level",
"curriculumName": "Learn Sign Language",
"curriculumImage": "https://drive.google.com/uc?export=view&id=1YACBlu7X-O7-DKv5DoW3AM9kgfT7Yhdc",
"tutorials": []
}
]
import mongoose from "mongoose"; import mongoose from "mongoose";
const userSchema = mongoose.Schema({ const userSchema = mongoose.Schema({
firstName: {
type: String,
required: true,
},
lastName: {
type: String,
required: true,
},
email: { email: {
type: String, type: String,
required: true, required: true,
unique: true unique: true
}, },
contactNumber: {
type: String,
required: true,
},
password: { password: {
type: String type: String,
required: true,
}, },
type: { type: {
type: String, type: String, // administrator/ member
required: true required: true
}, },
userDetails: {
userQNumber: {
type: String,
required: true,
unique: true
},
userEmail: {
type: String,
required: true,
},
userName: {
type: String,
required: true
},
userContactNumber: {
type: String,
required: true
},
userAddress: {
type: String
},
userType: {
type: String,
required: true
},
},
states: { states: {
type: String, type: String,
default: "1" default: "1"
}, },
createdAt: { createdAt: {
type: Date type: Date,
default: new Date()
}, },
updatedAt: { updatedAt: {
type: Date type: Date,
default: new Date()
} }
}); });
const User = mongoose.model("Users", userSchema); const User = mongoose.model("Users", userSchema);
export default User; export default User;
...@@ -5,8 +5,8 @@ import express from "express"; ...@@ -5,8 +5,8 @@ import express from "express";
import mongoose from "mongoose"; import mongoose from "mongoose";
//import routes //import routes
import userRoutes from "./routes/user.routes.js";
import translateRoutes from "./routes/translate.routes.js"; import translateRoutes from "./routes/translate.routes.js";
import userRoutes from "./routes/user.routes.js";
dotenv.config(); dotenv.config();
const app = express(); const app = express();
...@@ -20,11 +20,11 @@ app.get("/", (req, res) => { ...@@ -20,11 +20,11 @@ app.get("/", (req, res) => {
res.json({ message: "Welcome to Server Node" }); res.json({ message: "Welcome to Server Node" });
}); });
//implement routes //implement routes
app.use("/rest_node/user", userRoutes);
app.use("/rest_node/ssl", translateRoutes); app.use("/rest_node/ssl", translateRoutes);
app.use("/rest_node/user", userRoutes);
const CONNECTION_URL = `mongodb+srv://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@cluster0.dmza8yi.mongodb.net/?retryWrites=true&w=majority`; const CONNECTION_URL = `mongodb+srv://${process.env.DB_USERNAME}:${process.env.DB_PASSWORD}@researchmanagement-appl.vzhn4.mongodb.net/?retryWrites=true&w=majority`;
const PORT = process.env.PORT || 5000; const PORT = process.env.PORT || 5000;
mongoose mongoose
......
# TMP-23-029
SLIIT Final Year Project
\ No newline at end of file
# TMP-23-029
SLIIT Final Year Project
\ No newline at end of file
REACT_APP_VERSION = v2.1.0
GENERATE_SOURCEMAP = false
## Backend API URL
REACT_APP_API_URL_MOCK=https://mock-data-api-nextjs.vercel.app/
REACT_APP_API_URL=http://localhost:5000/
## Google Map Key
REACT_APP_GOOGLE_MAPS_API_KEY=
## Firebase - Google Auth
REACT_APP_FIREBASE_API_KEY=
REACT_APP_FIREBASE_AUTH_DOMAIN=
REACT_APP_FIREBASE_PROJECT_ID=
REACT_APP_FIREBASE_STORAGE_BUCKET=
REACT_APP_FIREBASE_MESSAGING_SENDER_ID=
REACT_APP_FIREBASE_APP_ID=
REACT_APP_FIREBASE_MEASUREMENT_ID=
## AWS
REACT_APP_AWS_POOL_ID=
REACT_APP_AWS_APP_CLIENT_ID=
## Auth0
REACT_APP_AUTH0_CLIENT_ID=
REACT_APP_AUTH0_DOMAIN=
\ No newline at end of file
{
"plugins": ["prettier", "@typescript-eslint"],
"extends": ["react-app", "prettier"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": "./tsconfig.json"
},
"settings": {
"import/resolver": {
"node": {
"moduleDirectory": ["node_modules", "src/"]
},
"typescript": {
"alwaysTryTypes": true
}
}
},
"rules": {
"react/jsx-filename-extension": "off",
"no-param-reassign": "off",
"react/prop-types": "off",
"react/require-default-props": "off",
"react/no-array-index-key": "off",
"react/react-in-jsx-scope": "off",
"react/jsx-props-no-spreading": "off",
"import/order": "off",
"no-console": "off",
"no-shadow": "off",
"@typescript-eslint/naming-convention": "off",
"@typescript-eslint/no-shadow": "off",
"import/no-cycle": "off",
"import/no-extraneous-dependencies": "off",
"jsx-a11y/label-has-associated-control": "off",
"jsx-a11y/no-autofocus": "off",
"no-restricted-imports": [
"error",
{
"patterns": ["@mui/*/*/*", "!@mui/material/test-utils/*"]
}
],
"@typescript-eslint/no-unused-vars": [
"error",
{
"vars": "all",
"args": "none"
}
],
"prettier/prettier": [
"warn",
{
"bracketSpacing": true,
"printWidth": 140,
"singleQuote": true,
"trailingComma": "none",
"tabWidth": 2,
"useTabs": false,
"endOfLine": "auto"
}
]
}
}
...@@ -5,7 +5,6 @@ npm-debug.log* ...@@ -5,7 +5,6 @@ npm-debug.log*
yarn-debug.log* yarn-debug.log*
yarn-error.log* yarn-error.log*
lerna-debug.log* lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html) # Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
...@@ -37,13 +36,14 @@ bower_components ...@@ -37,13 +36,14 @@ bower_components
# Compiled binary addons (https://nodejs.org/api/addons.html) # Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release build/Release
build
# Dependency directories # Dependency directories
node_modules/ node_modules/
jspm_packages/ jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/) # TypeScript v1 declaration files
web_modules/ typings/
# TypeScript cache # TypeScript cache
*.tsbuildinfo *.tsbuildinfo
...@@ -54,9 +54,6 @@ web_modules/ ...@@ -54,9 +54,6 @@ web_modules/
# Optional eslint cache # Optional eslint cache
.eslintcache .eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache # Microbundle cache
.rpt2_cache/ .rpt2_cache/
.rts2_cache_cjs/ .rts2_cache_cjs/
...@@ -72,20 +69,15 @@ web_modules/ ...@@ -72,20 +69,15 @@ web_modules/
# Yarn Integrity file # Yarn Integrity file
.yarn-integrity .yarn-integrity
# dotenv environment variable files # dotenv environment variables file
.env # .env
.env.development.local .env.test
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/) # parcel-bundler cache (https://parceljs.org/)
.cache .cache
.parcel-cache
# Next.js build output # Next.js build output
.next .next
out
# Nuxt.js build / generate output # Nuxt.js build / generate output
.nuxt .nuxt
...@@ -93,20 +85,13 @@ dist ...@@ -93,20 +85,13 @@ dist
# Gatsby files # Gatsby files
.cache/ .cache/
# Comment in the public line in if your project uses Gatsby and not Next.js # Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support # https://nextjs.org/blog/next-9-1#public-directory-support
# public # public
# vuepress build output # vuepress build output
.vuepress/dist .vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories # Serverless directories
.serverless/ .serverless/
...@@ -119,12 +104,5 @@ dist ...@@ -119,12 +104,5 @@ dist
# TernJS port file # TernJS port file
.tern-port .tern-port
# Stores VSCode versions used for testing VSCode extensions # wincompare file
.vscode-test .bak
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
\ No newline at end of file
{
"bracketSpacing": true,
"printWidth": 140,
"singleQuote": true,
"trailingComma": "none",
"tabWidth": 2,
"useTabs": false
}
{
"cSpell.words": [
"Appstore",
"CARB",
"Formik",
"nutritions"
]
}
\ No newline at end of file
# FitsPro ERP Platform
Welcome to the FitsPro ERP Platform repository! This project aims to revolutionize the meal delivery experience through a robust and efficient backend system for the FitsPro dieting app. By seamlessly integrating meal planning and delivery coordination, we strive to enhance customer satisfaction and streamline business operations.
## Getting started
#### update packages
```
source-folder > yarn
```
#### Start the project
```
source-folder > yarn start
```
## Environments (Demonstration)
- [Development_Env](https://fit-pro-erp.netlify.app/)
- [Production_Env](https://fit-pro-erp.netlify.app/)
## Technology stack
- [Material UI V5](https://mui.com/core/)
- Built with React Hooks API.
- Redux & React context API for state management.
- Redux toolkit.
- React Router for navigation routing.
- Support for react-script.
- Code splitting.
- CSS-in-JS.
const webpack = require('webpack');
const WorkBoxPlugin = require('workbox-webpack-plugin');
module.exports = function override(config) {
config.resolve.fallback = {
process: require.resolve('process/browser'),
// zlib: require.resolve('browserify-zlib'),
stream: require.resolve('stream-browserify'),
crypto: require.resolve('crypto-browserify'),
util: require.resolve('util'),
buffer: require.resolve('buffer')
// asset: require.resolve('assert')
};
// https://stackoverflow.com/questions/69135310/workaround-for-cache-size-limit-in-create-react-app-pwa-service-worker
config.plugins.forEach((plugin) => {
if (plugin instanceof WorkBoxPlugin.InjectManifest) {
plugin.config.maximumFileSizeToCacheInBytes = 50 * 1024 * 1024;
}
});
config.plugins = [
...config.plugins,
new webpack.ProvidePlugin({
process: 'process/browser.js',
Buffer: ['buffer', 'Buffer']
})
];
return config;
};
This diff is collapsed.
<svg width="36" height="35" viewBox="0 0 36 35" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4.64931 15.8644L6.96164 13.552L6.96405 13.5496H11.3143L9.58336 15.2806L9.13646 15.7275L7.36391 17.5L7.58344 17.7201L17.5137 27.6498L27.6634 17.5L25.8903 15.7275L25.7654 15.602L23.7131 13.5496H28.0633L28.0657 13.552L29.8781 15.3644L32.0137 17.5L17.5137 32L3.01367 17.5L4.64931 15.8644ZM17.5137 3L25.8921 11.3784H21.5419L17.5137 7.35024L13.4855 11.3784H9.13525L17.5137 3Z" fill="#096DD9"/>
<path d="M7.36453 17.4999L9.13708 15.7274L9.58398 15.2805L7.85366 13.5496H6.96467L6.96226 13.552L4.64993 15.8643L6.86938 18.0729L7.36453 17.4999Z" fill="url(#paint0_linear_112117_33940)"/>
<path d="M25.8911 15.7274L27.6643 17.4999L27.4888 17.6754L27.4894 17.676L29.8789 15.3643L28.0666 13.552L28.0641 13.5496H27.888L25.7663 15.6019L25.8911 15.7274Z" fill="url(#paint1_linear_112117_33940)"/>
<path d="M6.95946 13.5496L6.96187 13.552L9.13669 15.7274L17.5139 24.104L28.0684 13.5496H6.95946Z" fill="#1890FF"/>
<defs>
<linearGradient id="paint0_linear_112117_33940" x1="8.63954" y1="14.0887" x2="5.58137" y2="17.1469" gradientUnits="userSpaceOnUse">
<stop stop-color="#023B95"/>
<stop offset="0.9637" stop-color="#096CD9" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint1_linear_112117_33940" x1="26.282" y1="14.1278" x2="28.7548" y2="16.9379" gradientUnits="userSpaceOnUse">
<stop stop-color="#023B95"/>
<stop offset="1" stop-color="#096DD9" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="apple-touch-icon" sizes="180x180" href="/resources/apple-touch-icon.png">
<link rel="icon" type="image/png" sizes="32x32" href="/resources/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="16x16" href="/resources/favicon-16x16.png">
<link rel="manifest" href="/resources/site.webmanifest">
<link rel="mask-icon" href="/resources/safari-pinned-tab.svg" color="#5bbad5">
<meta name="msapplication-TileColor" content="#da532c">
<meta name="theme-color" content="#000000" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="description" content="Web site created using create-react-app" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>SignConnect Plus</title>
<link rel="preconnect" href="https://fonts.gstatic.com" />
<link
href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap&family=Public+Sans:wght@400;500;600;700"
rel="stylesheet" />
<!-- this is to resolve issue in old safari browser in tablet -->
<script src="https://cdn.jsdelivr.net/npm/resize-observer-polyfill@1.5.1/dist/ResizeObserver.min.js"></script>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<browserconfig>
<msapplication>
<tile>
<square150x150logo src="/mstile-150x150.png"/>
<TileColor>#da532c</TileColor>
</tile>
</msapplication>
</browserconfig>
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment