Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
22_23-J 65
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
22_23-J 65
22_23-J 65
Commits
c2927854
Commit
c2927854
authored
Jan 30, 2023
by
Manukalpani G.S. IT19111698
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
side view model added
parent
8ff9e9dc
Changes
2
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
261 additions
and
1024 deletions
+261
-1024
Backend/.ipynb_checkpoints/SF_Weed_identification_Finalized-checkpoint.ipynb
...kpoints/SF_Weed_identification_Finalized-checkpoint.ipynb
+50
-1023
Backend/SF_Weed_identification_Finalized.ipynb
Backend/SF_Weed_identification_Finalized.ipynb
+211
-1
No files found.
Backend/.ipynb_checkpoints/SF_Weed_identification_Finalized-checkpoint.ipynb
View file @
c2927854
This diff is collapsed.
Click to expand it.
Backend/SF_Weed_identification_Finalized.ipynb
View file @
c2927854
...
@@ -26,7 +26,7 @@
...
@@ -26,7 +26,7 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"dataset_location = '/content/drive/MyDrive/
RP_SmartFarmer/Sandhini Gamage - Weed identification /Dataset/
FinalizedWeedDataSet.zip'"
"dataset_location = '/content/drive/MyDrive/FinalizedWeedDataSet.zip'"
]
]
},
},
{
{
...
@@ -55,6 +55,216 @@
...
@@ -55,6 +55,216 @@
"for dirpath , dirnames , filenames in os.walk(FILE_DIR):\n",
"for dirpath , dirnames , filenames in os.walk(FILE_DIR):\n",
" print(f\"There are {len(dirnames)} directories and {len(filenames)} images in '{dirpath}'.\")"
" print(f\"There are {len(dirnames)} directories and {len(filenames)} images in '{dirpath}'.\")"
]
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"✅ 01- A Model for Side View"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"train_dir_side = \"/content/content/Weed Dataset/Side view/training\" #Training Directory path of the Side View Image Dataset\n",
"test_dir_side = \"/content/content/Weed Dataset/Side view/testing\"\n",
"\n",
"#lets get the class names\n",
"import pathlib\n",
"import numpy as np\n",
"\n",
"data_dir = pathlib.Path(train_dir_side)\n",
"class_names = np.array(sorted([item.name for item in data_dir.glob('*')]))\n",
"print(class_names)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"🟨 Data Visualization"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import matplotlib.image as mpimg\n",
"import random\n",
"import os\n",
"\n",
"def view_random_image(target_dir , target_class):\n",
" ''' \n",
" This Function will displays a random Image from the dataset\n",
" '''\n",
" target_folder = target_dir+\"/\"+target_class\n",
" random_image = random.sample(os.listdir(target_folder) ,1)\n",
"\n",
" img = mpimg.imread(target_folder+\"/\"+random_image[0])\n",
" plt.imshow(img)\n",
" plt.title(target_class)\n",
" plt.axis(\"off\")\n",
"\n",
" print(f\"Image Shape:{img.shape}\") \n",
"\n",
" return img"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import random\n",
"img = view_random_image(target_dir=train_dir_side , \n",
" target_class = random.choice(class_names))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"🟨 DATA Preprocessing"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
"\n",
"#Creating Image Data Generators for Training Data with augmentation\n",
"train_data_gen = ImageDataGenerator(rescale=1/255.,\n",
" #preprocessing_function=to_grayscale_then_rgb,\n",
" rotation_range = 0.2,\n",
" shear_range = 0.2,\n",
" zoom_range = 0.2,\n",
" width_shift_range=0.3,\n",
" height_shift_range = 0.3,\n",
" horizontal_flip= True)\n",
"\n",
"#Create ImageDatagenerator for testing data\n",
"test_data_gen = ImageDataGenerator(rescale=1/255.)\n",
"\n",
"#Import and Transform/pre process the data\n",
"train_data_multi = train_data_gen.flow_from_directory(train_dir_side,\n",
" target_size = (224,224),\n",
" batch_size = 32,\n",
" class_mode = 'categorical',\n",
" shuffle = True)\n",
"test_data_multi = test_data_gen.flow_from_directory(test_dir_side,\n",
" target_size = (224, 224),\n",
" batch_size = 32,\n",
" class_mode = 'categorical', #This will gives us one-hot encoded data\n",
" shuffle = True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"🟨 EfficientNetB0 Model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"efficientnet_url = \"https://tfhub.dev/tensorflow/efficientnet/b0/feature-vector/1\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#Import Dependencies\n",
"import tensorflow as tf\n",
"import tensorflow_hub as hub\n",
"from tensorflow.keras import layers"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#Lets make a create_model() function to create a model from a URL\n",
"def create_model(model_url , num_classes=7):\n",
"\n",
" feature_extractor_layer = hub.KerasLayer(model_url,\n",
" trainable = False, #freeze the already learned patterns \n",
" name = \"feature_extraction_layer\",\n",
" input_shape = (224, 224,3)) \n",
" #Create our own model\n",
" model = tf.keras.Sequential([\n",
" feature_extractor_layer,\n",
" layers.Dense(num_classes , activation=\"softmax\" , name=\"output_layer\")\n",
" ])\n",
"\n",
" return model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#Create Resnet Model\n",
"efficientnet_model_side = create_model(efficientnet_url , \n",
" num_classes = 3)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#Compile our resnet model\n",
"efficientnet_model_side.compile(loss='categorical_crossentropy',\n",
" optimizer = tf.keras.optimizers.Adam(),\n",
" metrics=[\"accuracy\"])\n",
"#Fitting the model\n",
"side_vw_model_hist = efficientnet_model_side.fit(train_data_multi,\n",
" epochs=8,\n",
" steps_per_epoch=len(train_data_multi),\n",
" validation_data = test_data_multi,\n",
" validation_steps = len(test_data_multi)\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#Saving the h5 model back to the google drive\n",
"efficientnet_model_side.save('/content/drive/MyDrive/RP_SmartFarmer/Sandhini Gamage - Weed identification /private/saved_models/Weed_side_view2.h5')"
]
}
}
],
],
"metadata": {
"metadata": {
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment