Commit f651dd48 authored by Senarathna B W E K's avatar Senarathna B W E K

object detection app

parent c72a395f
*.iml
.gradle
/local.properties
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
.DS_Store
/build
/captures
.externalNativeBuild
/.gradle/
/.idea/
/build
/build/
\ No newline at end of file
apply plugin: 'com.android.application'
apply plugin: 'de.undercouch.download'
android {
compileSdkVersion 28
buildToolsVersion '28.0.3'
defaultConfig {
applicationId "org.tensorflow.lite.examples.detection"
minSdkVersion 21
targetSdkVersion 28
versionCode 1
versionName "1.0"
ndk {
abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86'
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
aaptOptions {
noCompress "tflite"
}
compileOptions {
sourceCompatibility = '1.8'
targetCompatibility = '1.8'
}
lintOptions {
abortOnError false
}
}
// import DownloadModels task
project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
// Download default models; if you wish to use your own models then
// place them in the "assets" directory and comment out this line.
//apply from: "download_model.gradle"
//apply from: 'download_model.gradle'
dependencies {
// implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
// implementation files('bin/gdbserver')
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0'
implementation 'com.google.android.material:material:1.1.0'
implementation 'org.tensorflow:tensorflow-lite:2.4.0'
implementation 'org.tensorflow:tensorflow-lite-gpu:2.4.0'
// implementation 'org.tensorflow:tensorflow-lite-select-tf-ops:2.3.0'
// implementation 'org.tensorflow:tensorflow-lite:2.2.0'
// implementation 'org.tensorflow:tensorflow-lite-gpu:2.2.0'
// implementation 'org.tensorflow:tensorflow-lite:1.14.0'
// implementation(name:'tensorflow-lite', ext:'aar')
// implementation(name:'tensorflow-lite-gpu', ext:'aar')
// implementation 'org.tensorflow:tensorflow-lite-gpu:1.14.0'
// implementation 'org.tensorflow:tensorflow-lite:0.0.0-gpu-experimental'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
implementation 'com.google.code.gson:gson:2.8.6'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'com.android.support.test:rules:1.0.2'
androidTestImplementation 'com.google.truth:truth:1.0.1'
}
task downloadZipFile(type: Download) {
src 'http://storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip'
dest new File(buildDir, 'zips/')
overwrite false
}
task downloadAndUnzipFile(dependsOn: downloadZipFile, type: Copy) {
from zipTree(downloadZipFile.dest)
into project.ext.ASSET_DIR
}
task extractModels(type: Copy) {
dependsOn downloadAndUnzipFile
}
tasks.whenTaskAdded { task ->
if (task.name == 'assembleDebug') {
task.dependsOn 'extractModels'
}
if (task.name == 'assembleRelease') {
task.dependsOn 'extractModels'
}
}
\ No newline at end of file
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.tensorflow.lite.examples.detection">
<uses-sdk />
</manifest>
\ No newline at end of file
/*
* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tensorflow.lite.examples.detection;
import static com.google.common.truth.Truth.assertThat;
import static java.lang.Math.abs;
import static java.lang.Math.max;
import static java.lang.Math.min;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.util.Size;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.tensorflow.lite.examples.detection.env.ImageUtils;
import org.tensorflow.lite.examples.detection.tflite.Classifier;
import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;
/** Golden test for Object Detection Reference app. */
@RunWith(AndroidJUnit4.class)
public class DetectorTest {
private static final int MODEL_INPUT_SIZE = 300;
private static final boolean IS_MODEL_QUANTIZED = true;
private static final String MODEL_FILE = "detect.tflite";
private static final String LABELS_FILE = "file:///android_asset/coco.txt";
private static final Size IMAGE_SIZE = new Size(640, 480);
private Classifier detector;
private Bitmap croppedBitmap;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
@Before
public void setUp() throws IOException {
AssetManager assetManager =
InstrumentationRegistry.getInstrumentation().getContext().getAssets();
detector =
TFLiteObjectDetectionAPIModel.create(
assetManager,
MODEL_FILE,
LABELS_FILE,
MODEL_INPUT_SIZE,
IS_MODEL_QUANTIZED);
int cropSize = MODEL_INPUT_SIZE;
int previewWidth = IMAGE_SIZE.getWidth();
int previewHeight = IMAGE_SIZE.getHeight();
int sensorOrientation = 0;
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, false);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
}
@Test
public void detectionResultsShouldNotChange() throws Exception {
Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null);
final List<Recognition> results = detector.recognizeImage(croppedBitmap);
final List<Recognition> expected = loadRecognitions("table_results.txt");
for (Recognition target : expected) {
// Find a matching result in results
boolean matched = false;
for (Recognition item : results) {
RectF bbox = new RectF();
cropToFrameTransform.mapRect(bbox, item.getLocation());
if (item.getTitle().equals(target.getTitle())
&& matchBoundingBoxes(bbox, target.getLocation())
&& matchConfidence(item.getConfidence(), target.getConfidence())) {
matched = true;
break;
}
}
assertThat(matched).isTrue();
}
}
// Confidence tolerance: absolute 1%
private static boolean matchConfidence(float a, float b) {
return abs(a - b) < 0.01;
}
// Bounding Box tolerance: overlapped area > 95% of each one
private static boolean matchBoundingBoxes(RectF a, RectF b) {
float areaA = a.width() * a.height();
float areaB = b.width() * b.height();
RectF overlapped =
new RectF(
max(a.left, b.left), max(a.top, b.top), min(a.right, b.right), min(a.bottom, b.bottom));
float overlappedArea = overlapped.width() * overlapped.height();
return overlappedArea > 0.95 * areaA && overlappedArea > 0.95 * areaB;
}
private static Bitmap loadImage(String fileName) throws Exception {
AssetManager assetManager =
InstrumentationRegistry.getInstrumentation().getContext().getAssets();
InputStream inputStream = assetManager.open(fileName);
return BitmapFactory.decodeStream(inputStream);
}
// The format of result:
// category bbox.left bbox.top bbox.right bbox.bottom confidence
// ...
// Example:
// Apple 99 25 30 75 80 0.99
// Banana 25 90 75 200 0.98
// ...
private static List<Recognition> loadRecognitions(String fileName) throws Exception {
AssetManager assetManager =
InstrumentationRegistry.getInstrumentation().getContext().getAssets();
InputStream inputStream = assetManager.open(fileName);
Scanner scanner = new Scanner(inputStream);
List<Recognition> result = new ArrayList<>();
while (scanner.hasNext()) {
String category = scanner.next();
category = category.replace('_', ' ');
if (!scanner.hasNextFloat()) {
break;
}
float left = scanner.nextFloat();
float top = scanner.nextFloat();
float right = scanner.nextFloat();
float bottom = scanner.nextFloat();
RectF boundingBox = new RectF(left, top, right, bottom);
float confidence = scanner.nextFloat();
Recognition recognition = new Recognition(null, category, confidence, boundingBox);
result.add(recognition);
}
return result;
}
}
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.tensorflow.lite.examples.detection">
<!-- Tell the system this app requires OpenGL ES 3.1. -->
<uses-feature android:glEsVersion="0x00030001" android:required="true" />
<uses-sdk />
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:allowBackup="false"
android:icon="@mipmap/ic_launcher"
android:label="@string/tfe_od_app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme.ObjectDetection"
android:hardwareAccelerated="true"
android:debuggable="true"
android:installLocation="internalOnly">
<activity
android:name=".DetectorActivity"
android:label="@string/tfe_od_app_name"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
person
bicycle
car
motorbike
aeroplane
bus
train
truck
boat
traffic light
fire hydrant
stop sign
parking meter
bench
bird
cat
dog
horse
sheep
cow
elephant
bear
zebra
giraffe
backpack
umbrella
handbag
tie
suitcase
frisbee
skis
snowboard
sports ball
kite
baseball bat
baseball glove
skateboard
surfboard
tennis racket
bottle
wine glass
cup
fork
knife
spoon
bowl
banana
apple
sandwich
orange
broccoli
carrot
hot dog
pizza
donut
cake
chair
sofa
potted plant
bed
dining table
toilet
tvmonitor
laptop
mouse
remote
keyboard
cell phone
microwave
oven
toaster
sink
refrigerator
book
clock
vase
scissors
teddy bear
hair drier
toothbrush
package org.tensorflow.lite.examples.detection;
/*
* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
import org.tensorflow.lite.examples.detection.env.ImageUtils;
import org.tensorflow.lite.examples.detection.env.Logger;
public class LegacyCameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private Camera camera;
private Camera.PreviewCallback imageListener;
private Size desiredSize;
/** The layout identifier to inflate for this Fragment. */
private int layout;
/** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
* TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
Size[] sizes = new Size[cameraSizes.size()];
int i = 0;
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(s.height, s.width);
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread;
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
camera.startPreview();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
/** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/** Stops the background thread and its {@link Handler}. */
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
}
return -1; // No camera found
}
}