Commit 381a488f authored by samesh97's avatar samesh97

Merge branch 'master' into dinushaCombined

# Conflicts:
#	.idea/misc.xml
#	app/src/main/java/com/app/smartphotoeditor/activities/CameraView.java
parents 9c1ffa66 463c836a
......@@ -16,7 +16,6 @@
</set>
</option>
<option name="resolveModulePerSourceSet" value="false" />
<option name="useQualifiedModuleNames" value="true" />
</GradleProjectSettings>
</option>
</component>
......
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_11" default="false" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<component name="ProjectRootManager" version="2" languageLevel="JDK_11" default="true" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
......
package com.app.smartphotoeditor.activities;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.speech.tts.TextToSpeech;
import android.util.Log;
import android.view.SurfaceView;
import android.widget.ImageView;
import android.widget.TextView;
import com.app.smartphotoeditor.R;
import com.app.smartphotoeditor.models.EyeDetectionResultSet;
import org.checkerframework.checker.units.qual.A;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.imgproc.Moments;
import org.opencv.objdetect.CascadeClassifier;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
public class CameraView extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2{
static
{
OpenCVLoader.initDebug();
}
private static final String TAG="MainActivity";
private Mat mRgba;
private Mat mGray;
private CameraBridgeViewBase mOpenCvCameraView;
private CascadeClassifier cascadeClassifier;
private CascadeClassifier cascadeClassifier_eye;
private ImageView frame;
private ImageView leftV,rightV,centerV;
private TextView view_point;
private TextToSpeech tts = null;
private String lastSpeechText = "";
private List<EyeDetectionResultSet> detectionResults;
private float brightnessLevel = 0.0f;
private static final float MAX_BRIGHTNESS_LEVEL = 3f;
private boolean isAccessingEnvironment = true;
private final BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status)
{
if (status == LoaderCallbackInterface.SUCCESS)
{
Log.i(TAG, "OpenCv Is loaded");
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.enableView();
}
super.onManagerConnected(status);
}
};
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera_view);
tts = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
@Override
public void onInit(int status)
{
if(status == TextToSpeech.SUCCESS)
{
tts.setLanguage(Locale.US);
}
}
});
int MY_PERMISSIONS_REQUEST_CAMERA = 0;
if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_DENIED){
ActivityCompat.requestPermissions(this, new String[] {Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
}
mOpenCvCameraView = findViewById(R.id.frame_Surface);
mOpenCvCameraView.setAlpha(0);
mOpenCvCameraView.setCvCameraViewListener(this);
frame = findViewById(R.id.frame);
leftV = findViewById(R.id.left);
rightV = findViewById(R.id.right);
centerV = findViewById(R.id.center);
view_point = findViewById(R.id.view_point);
loadCascadeModel();
}
@Override
protected void onResume()
{
super.onResume();
if (OpenCVLoader.initDebug())
{
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
else
{
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_4_0,this,mLoaderCallback);
}
}
@Override
protected void onPause()
{
super.onPause();
if (mOpenCvCameraView !=null)
{
mOpenCvCameraView.disableView();
}
}
public void onDestroy()
{
super.onDestroy();
if(mOpenCvCameraView !=null)
{
mOpenCvCameraView.disableView();
}
}
public void onCameraViewStarted(int width ,int height)
{
mRgba = new Mat(height,width, CvType.CV_8UC1);
mGray = new Mat(height,width,CvType.CV_8UC1);
}
public void onCameraViewStopped()
{
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame)
{
//catch input frames in RGB and Gray
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
//rotate by 180
Core.flip(mRgba,mRgba,-1);
Core.flip(mGray,mGray,-1);
//find face and eyes and return the image frame
return findFace(mRgba);
}
void loadCascadeModel()
{
try
{
InputStream is =getResources().openRawResource(R.raw.haarcascade_frontalface_alt);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE); // creating a folder
File mCascadeFile = new File(cascadeDir,"haarcascade_frontalface_alt.xml"); // creating file on that folder
FileOutputStream os=new FileOutputStream(mCascadeFile);
byte[] buffer=new byte[4096];
int byteRead;
// writing that file from raw folder
while((byteRead =is.read(buffer)) != -1)
{
os.write(buffer,0,byteRead);
}
is.close();
os.close();
// loading file from cascade folder created above
cascadeClassifier = new CascadeClassifier(mCascadeFile.getAbsolutePath());
// model is loaded
// load eye haarcascade classifier
InputStream is2 =getResources().openRawResource(R.raw.haarcascade_righteye_2splits);
// created before
File mCascadeFile_eye =new File(cascadeDir,"haarcascade_eye.xml"); // creating file on that folder
FileOutputStream os2=new FileOutputStream(mCascadeFile_eye);
byte[] buffer1=new byte[4096];
int byteRead1;
// writing that file from raw folder
while((byteRead1 =is2.read(buffer1)) != -1){
os2.write(buffer1,0,byteRead1);
}
is2.close();
os2.close();
// loading file from cascade folder created above
cascadeClassifier_eye = new CascadeClassifier(mCascadeFile_eye.getAbsolutePath());
}
catch (IOException e)
{
Log.i(TAG,"Cascade file not found");
}
}
private Mat findFace(Mat mRgba)
{
// original frame is -90 degree so we have to rotate is to 90 to get proper face for detection
Core.flip(mRgba.t(),mRgba,1);
// convert it into RGB
Mat mRbg = new Mat();
Imgproc.cvtColor(mRgba,mRbg,Imgproc.COLOR_RGBA2RGB);
int height=mRbg.height();
// minimum size of face in frame
int absoluteFaceSize=(int) (height*0.1);
MatOfRect faces=new MatOfRect();
if(cascadeClassifier !=null)
{
// input output // minimum size of output
cascadeClassifier.detectMultiScale(mRbg,faces,1.1,2,2, new Size(absoluteFaceSize,absoluteFaceSize),new Size());
}
// loop through all faces
Rect[] facesArray=faces.toArray();
for (Rect value : facesArray)
{
// draw face on original frame mRgba
// Imgproc.rectangle(mRgba, value.tl(), value.br(), new Scalar(0, 255, 0, 255), 2);
// crop face image and then pass it through eye classifier
// starting point
Rect roi = new Rect((int) value.tl().x, (int) value.tl().y, (int) value.br().x - (int) value.tl().x, (int) value.br().y - (int) value.tl().y);
// cropped mat image
Mat cropped = new Mat(mRgba, roi);
// create a array to store eyes coordinate but we have to pass MatOfRect to classifier
MatOfRect eyes = new MatOfRect();
if (cascadeClassifier_eye != null)
{ // find biggest size object
cascadeClassifier_eye.detectMultiScale(cropped, eyes, 1.15, 2, 2, new Size(35, 35), new Size());
// now create an array
Rect[] eyesArray = eyes.toArray();
// loop through each eye
for (Rect rect : eyesArray)
{
// find coordinate on original frame mRgba
// starting point
int x1 = (int) (rect.tl().x + value.tl().x);
int y1 = (int) (rect.tl().y + value.tl().y);
// width and height
int w1 = (int) (rect.br().x - rect.tl().x);
int h1 = (int) (rect.br().y - rect.tl().y);
// end point
int x2 = (int) (w1 + x1);
int y2 = (int) (h1 + y1);
// draw eye on original frame mRgba
//input starting point ending point color thickness
// Imgproc.rectangle(mRgba,new Point(x1,y1),new Point(x2,y2),new Scalar(0,255,0,255),2);
float centerX = (float) ((x1 + x2) / 2.0);
float centerY = (float) ((y1 + y2) / 2.0);
//Imgproc.circle(mRgba, new Point(centerX, centerY + 10), 2, new Scalar(0, 0, 0, 255), 2);
Rect eye_roi = new Rect(x1, y1 + 10, w1, h1);
Mat eye_cropped = new Mat(mRgba, eye_roi);
markIrisLocation(eye_cropped);
break;
}
}
break;
}
// rotate back original frame to -90 degree
Core.flip(mRgba.t(),mRgba,0);
return mRgba;
}
void markIrisLocation(Mat eye_cropped)
{
Mat gray = new Mat();
//make gray frame
Imgproc.cvtColor(eye_cropped, gray, Imgproc.COLOR_BGR2GRAY);
gray.convertTo(gray,-1,brightnessLevel,0);
// gray.convertTo(gray,-1,2,0);
//add Blur
Imgproc.blur(gray,gray,new Size(11,11));
//Imgproc.GaussianBlur(gray,gray,new Size(11,11),11);
//gray.convertTo(gray,-1,1.1,0);
//Imgproc.Canny(gray,gray,0,120);
//create binary
Mat binary = new Mat();
Imgproc.threshold(gray, binary, 60, 255, Imgproc.THRESH_BINARY);
//find contours
List<MatOfPoint> contours = new ArrayList<>();
Mat hierarchy = new Mat();
Imgproc.findContours(binary, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
//Collections.sort(contours, Collections.reverseOrder());
if(contours.size() >= 1)
{
contours.remove(0);
}
// now iterate over all top level contours
double largestContuorArea = 0;
int position = 0;
for ( int contourIdx = 0; contourIdx < contours.size(); contourIdx++ )
{
Log.d("Contour","" +contours.size());
double contourArea = Imgproc.contourArea(contours.get(contourIdx));
if (largestContuorArea < contourArea)
{
largestContuorArea = contourArea;
position = contourIdx;
}
}
Log.d("ContourArea","" + largestContuorArea);
if(contours.size() > position)
{
//draw contour
Imgproc.drawContours ( eye_cropped, contours, position, new Scalar(0, 255, 0), 1);
//draw circle on middle point
Moments p = Imgproc.moments(contours.get(position));
int x = (int) (p.get_m10() / p.get_m00());
int y = (int) (p.get_m01() / p.get_m00());
Imgproc.circle(eye_cropped, new Point(x, y), 4, new Scalar(255,49,0,255));
//draw horizontal and vertical lines to create a cross
Imgproc.line(eye_cropped,new Point(x - 20,y),new Point(x + 20,y),new Scalar(0, 255, 0));
Imgproc.line(eye_cropped,new Point(x,y - 20),new Point(x,y + 20),new Scalar(0, 255, 0));
//draw circle in the gray frame
Imgproc.circle(binary, new Point(x, y), 5, new Scalar(0,0,0,255),10);
}
Bitmap grayFrame = matToBitmap(gray);
Bitmap binaryFrame = matToBitmap(binary);
Bitmap croppedOriginalFrame = matToBitmap(eye_cropped);
findTheViewPoint(binary);
if(isAccessingEnvironment)
{
accessEnvironment(binary,contours.size(),largestContuorArea);
}
runOnUiThread(new Runnable() {
@Override
public void run()
{
frame.setImageBitmap(croppedOriginalFrame);
leftV.setImageBitmap(binaryFrame);
// centerV.setImageBitmap(grayFrame);
// rightV.setImageBitmap(rightViewBitmap);
}
});
}
void findTheViewPoint(Mat binary)
{
if(binary == null) return;
//binary image width and height
int width = binary.cols();
int height = binary.rows();
//calculate the equal proportion of width
int equalWidth = width / 3;
//calculate the width pixels percentages
int leftRightWidth = (int) (equalWidth * 1.2);
int centerWidth = (int) (equalWidth * 0.6);
//calculate the equal proportion of height
int equalHeight = height / 3;
//calculate the height pixels percentages
int topBottomHeight = (int) (equalHeight * 1.3);
int centerHeight = (int) (equalHeight * 0.4);
//--variable naming convention--
//the acquired image (binary image) is divided into 9 subset of images
//topLeft -> TOP in Vertical direction but LEFT in horizontal direction
//centerCenter -> CENTER in Vertical direction and CENTER in horizontal direction
//crop left 3 images
Mat topLeft = binary.submat(0,topBottomHeight,0,leftRightWidth);
Mat centerLeft = binary.submat(topBottomHeight,(topBottomHeight + centerHeight),0,leftRightWidth);
Mat bottomLeft = binary.submat((topBottomHeight + centerHeight),(2 * topBottomHeight + centerHeight),0,leftRightWidth);
//crop center 3 images
Mat topCenter = binary.submat(0,topBottomHeight,leftRightWidth,(centerWidth + leftRightWidth));
Mat centerCenter = binary.submat(topBottomHeight,(topBottomHeight + centerHeight),leftRightWidth,(centerWidth + leftRightWidth));
Mat bottomCenter = binary.submat((topBottomHeight + centerHeight),(2 * topBottomHeight + centerHeight),leftRightWidth,(centerWidth + leftRightWidth));
//crop right 3 images
Mat topRight = binary.submat(0,topBottomHeight,(centerWidth + leftRightWidth),(leftRightWidth * 2 + centerWidth));
Mat centerRight = binary.submat(topBottomHeight,(topBottomHeight + centerHeight),(centerWidth + leftRightWidth),(leftRightWidth * 2 + centerWidth));
Mat bottomRight = binary.submat((topBottomHeight + centerHeight),(2 * topBottomHeight + centerHeight),(centerWidth + leftRightWidth),(leftRightWidth * 2 + centerWidth));
//creating bitmaps
Bitmap bTopLeft = matToBitmap(topLeft);
Bitmap bCenterLeft = matToBitmap(centerLeft);
Bitmap bBottomLeft = matToBitmap(bottomLeft);
//creating bitmaps
Bitmap bTopCenter = matToBitmap(topCenter);
Bitmap bCenterCenter = matToBitmap(centerCenter);
Bitmap bBottomCenter = matToBitmap(bottomCenter);
//creating bitmaps
Bitmap bTopRight = matToBitmap(topRight);
Bitmap bCenterRight = matToBitmap(centerRight);
Bitmap bBottomRight = matToBitmap(bottomRight);
runOnUiThread(new Runnable() {
@Override
public void run()
{
// leftV.setImageBitmap(bCenterLeft);
// centerV.setImageBitmap(bCenterCenter);
// rightV.setImageBitmap(bCenterRight);
// rightV.setImageBitmap(rightViewBitmap);
}
});
//Log.d("ViewPoint","Width - " + width + ", Equal width" + equalWidth + " ,left - " + left.cols() + ", Center - " + center.cols());
int topLeftAllPixels = topLeft.cols() * topLeft.rows();
int centerLeftAllPixels = centerLeft.cols() * centerLeft.rows();
int bottomLeftAllPixels = bottomLeft.cols() * bottomLeft.rows();
int topCenterAllPixels = topCenter.cols() * topCenter.rows();
int centerCenterAllPixels = centerCenter.cols() * centerCenter.rows();
int bottomCenterAllPixels = bottomCenter.cols() * bottomCenter.rows();
int topRightAllPixels = topRight.cols() * topRight.rows();
int centerRightAllPixels = centerRight.cols() * centerRight.rows();
int bottomRightAllPixels = bottomRight.cols() * bottomRight.rows();
int topLeftWhitePixels = Core.countNonZero(topLeft);
int centerLeftWhitePixels = Core.countNonZero(centerLeft);
int bottomLeftWhitePixels = Core.countNonZero(bottomLeft);
int topCenterWhitePixels = Core.countNonZero(topCenter);
int centerCenterWhitePixels = Core.countNonZero(centerCenter);
int bottomCenterWhitePixels = Core.countNonZero(bottomCenter);
int topRightWhitePixels = Core.countNonZero(topRight);
int centerRightWhitePixels = Core.countNonZero(centerRight);
int bottomRightWhitePixels = Core.countNonZero(bottomRight);
int topLeftBlackPixels = topLeftAllPixels - topLeftWhitePixels;
int centerLeftBlackPixels = centerLeftAllPixels - centerLeftWhitePixels;
int bottomLeftBlackPixels = bottomLeftAllPixels - bottomLeftWhitePixels;
int topCenterBlackPixels = topCenterAllPixels - topCenterWhitePixels;
int centerCenterBlackPixels = centerCenterAllPixels - centerCenterWhitePixels;
int bottomCenterBlackPixels = bottomCenterAllPixels - bottomCenterWhitePixels;
int topRightBlackPixels = topRightAllPixels - topRightWhitePixels;
int centerRightBlackPixels = centerRightAllPixels - centerRightWhitePixels;
int bottomRightBlackPixels = bottomRightAllPixels - bottomRightWhitePixels;
int[] arr =
{
topLeftBlackPixels,
centerLeftBlackPixels,
bottomLeftBlackPixels,
topCenterBlackPixels,
centerCenterBlackPixels,
bottomCenterBlackPixels,
topRightBlackPixels,
centerRightBlackPixels,
bottomRightBlackPixels
};
//sort the array, so i can find the largest value
Arrays.sort(arr);
//get the final item of list. because the list is sorted and the final item is the largest
int max = arr[arr.length - 1];
if(max != 0)
{
if(max == topLeftBlackPixels)
{
//left
Log.d("ViewPoint","Top left");
setViewPointText("Looking top Left");
speak("Top left");
}
else if(max == centerLeftBlackPixels)
{
//left
Log.d("ViewPoint","Center left");
setViewPointText("Looking center Left");
speak("Center left");
}
else if(max == bottomLeftBlackPixels)
{
//left
Log.d("ViewPoint","Bottom left");
setViewPointText("Looking bottom Left");
speak("Bottom left");
}
else if(max == topRightBlackPixels)
{
//right
Log.d("ViewPoint","Top Right");
setViewPointText("Looking top Right");
speak("Top right");
}
else if(max == centerRightBlackPixels)
{
//right
Log.d("ViewPoint","Center Right");
setViewPointText("Looking center Right");
speak("center right");
}
else if(max == bottomRightBlackPixels)
{
//right
Log.d("ViewPoint","Bottom Right");
setViewPointText("Looking bottom Right");
speak("bottom right");
}
else if(max == topCenterBlackPixels)
{
//center
Log.d("ViewPoint","Top Center");
setViewPointText("Looking top Center");
speak("Top center");
}
else if(max == centerCenterBlackPixels)
{
//center
Log.d("ViewPoint","Center Center");
setViewPointText("Looking center Center");
//speak("cent left");
}
else if(max == bottomCenterBlackPixels)
{
//center
Log.d("ViewPoint","Bottom Center");
setViewPointText("Looking Bottom Center");
speak("bottom center");
}
}
}
public Bitmap matToBitmap(Mat mat)
{
Bitmap bitmap =
Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.RGB_565);
Utils.matToBitmap(mat, bitmap);
return bitmap;
}
public void setViewPointText(String text)
{
runOnUiThread(new Runnable() {
@Override
public void run() {
view_point.setText(text);
}
});
}
public synchronized void speak(String text)
{
// if(lastSpeechText.equals(text))
// {
// return;
// }
// tts.speak("" + text, TextToSpeech.QUEUE_ADD,null,null);
// lastSpeechText = text;
}
public synchronized void accessEnvironment(Mat mat,int contourSize,double contourArea)
{
if(detectionResults == null) detectionResults = new ArrayList<>();
if(mat == null) return;
int allPixels = mat.cols() * mat.rows();
int whitePixels = Core.countNonZero(mat);
int blackPixels = allPixels - whitePixels;
brightnessLevel += 0.1f;
if(brightnessLevel > MAX_BRIGHTNESS_LEVEL)
{
brightnessLevel = 0.0f;
isAccessingEnvironment = false;
findBestBrightnessLevel();
}
Log.d("ContourSize","" + contourSize);
if(contourSize == 1 && contourArea < 500 && contourArea > 10)
{
EyeDetectionResultSet result = new EyeDetectionResultSet();
result.setBlackPixels(blackPixels);
result.setBrightnessLevel(brightnessLevel);
result.setContourSize(contourSize);
detectionResults.add(result);
}
}
synchronized void findBestBrightnessLevel()
{
if(detectionResults.isEmpty())
{
isAccessingEnvironment = true;
return;
}
float [] brightnessArray = new float[detectionResults.size()];
for(int i = 0; i < detectionResults.size(); i++)
{
EyeDetectionResultSet set = detectionResults.get(i);
brightnessArray[i] = set.getBrightnessLevel();
}
brightnessLevel = brightnessArray[brightnessArray.length -1];
}
}
\ No newline at end of file
......@@ -536,7 +536,7 @@ public class EditorActivity extends AppCompatActivity
public void eyeLeft(Mat mat) {
viewsInDisplay.get(1).setBackgroundColor(Color.RED);
viewsInDisplay.get(0).setBackgroundColor(Color.TRANSPARENT);
viewsInDisplay.get(0).setBackgroundColor(Color.BLACK);
}
......@@ -544,7 +544,7 @@ public class EditorActivity extends AppCompatActivity
public void eyeRight(Mat mat) {
viewsInDisplay.get(0).setBackgroundColor(Color.RED);
viewsInDisplay.get(1).setBackgroundColor(Color.TRANSPARENT);
viewsInDisplay.get(1).setBackgroundColor(Color.BLACK);
}
@Override
......
......@@ -111,6 +111,12 @@ public class LowLightEnhanceActivity extends AppCompatActivity
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_low_light_enhance);
inputImage = ImageList.getInstance().getCurrentBitmap();
//Views
......
......@@ -93,7 +93,6 @@ public class ComputerVision
public void onSkeletonDrawn(Mat mat)
{
Bitmap bitmap = Methods.matToBit(mat);
gestureDetection.detectGestures(bitmap);
}
......@@ -102,12 +101,6 @@ public class ComputerVision
{
Bitmap bitmap = Methods.matToBit(mat);
// try {
// Methods.saveImage(activity.getApplicationContext(),bitmap,"NewEyeClose");
// } catch (IOException e) {
// e.printStackTrace();
// }
if(eyeBlinkDetection != null)
eyeBlinkDetection.detectEyeBlinkState(bitmap);
......@@ -118,7 +111,6 @@ public class ComputerVision
@Override
public void onPupilChanged(Mat binary,boolean isLeft,boolean isRight)
{
// eyeStateListener.eyeLeft(binary);
if(isLeft)eyeStateListener.eyeLeft(binary);
if(isRight)eyeStateListener.eyeRight(binary);
}
......
......@@ -36,7 +36,6 @@ import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.app.smartphotoeditor.config.Constants.EYE_END_POSITION_X;
......@@ -86,7 +85,7 @@ public class FacialLandmarkDetection
options.setNumThreads(4);
//load CNN model
interpreter = new Interpreter(loadModelFile(assetManager,modelPath),options);
interpreter = new Interpreter(Methods.loadModelFile(assetManager,modelPath),options);
//load haar cascade classifier
loadCascadeClassifier();
......@@ -193,7 +192,7 @@ public class FacialLandmarkDetection
Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap, inputImageSize, inputImageSize, false);
//create buffer from bitmap
ByteBuffer byteBuffer = convertBitmapToByteBuffer(scaledBitmap);
ByteBuffer byteBuffer = Methods.convertBitmapToByteBuffer(scaledBitmap,inputImageSize);
//landmark resulting array
......@@ -416,24 +415,16 @@ public class FacialLandmarkDetection
} catch (Exception e) { }
}
public Mat setDefaultValues(Mat srcMat) {
// final Bitmap bitmap = Bitmap.createBitmap(srcMat.clone().width(), srcMat.clone().height(), Bitmap.Config.ARGB_8888);
public Mat setDefaultValues(Mat srcMat)
{
Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2GRAY, 0);
Mat srcMat1 = srcMat;
Imgproc.GaussianBlur(srcMat1, srcMat1, new Size(1, 1), 0);
//Mat srcMat1 = new Mat(srcMat.rows(), srcMat.cols(), CV_8UC1);
//int kernalsize = 3;
//Imgproc.bilateralFilter(srcMat, srcMat1, kernalsize, kernalsize * 2, kernalsize / 2);
srcMat1.convertTo(srcMat1, 0, 1.9, 1);
srcMat1.convertTo(srcMat1, CvType.CV_8U, 1.9, -255);
//Imgproc.cvtColor(srcMat1, srcMat1, Imgproc.COLOR_GRAY2RGBA, 4);
int whiteCount = 0;
int leftWhiteCount = 0;
......@@ -454,7 +445,7 @@ public class FacialLandmarkDetection
{
whiteCount++;
if(i < 5)
if(i < 15)
{
rightWhiteCount++;
}
......@@ -484,7 +475,6 @@ public class FacialLandmarkDetection
Log.d("cccccccccccccccc","left looked");
listener.onPupilChanged(srcMat1,true,false);
}
// Log.d("cccccccccccccccc","" + leftWhiteCount + "-" + rightWhiteCount);
}
......@@ -604,59 +594,12 @@ public class FacialLandmarkDetection
//listener.onPupilChanged(binary,calcX,calcY,range,pupilX);
Log.d("fsffsefsess","Y - " + pupilY + " Range - " + (eyeEndY - eyeStartY));
}
private ByteBuffer convertBitmapToByteBuffer(Bitmap scaledBitmap)
{
ByteBuffer byteBuffer;
int inputSize = inputImageSize;// 96
int quant = 1;
if(quant == 0)
{
byteBuffer = ByteBuffer.allocateDirect(3 * 1 * inputSize * inputSize);
}
else
{
byteBuffer = ByteBuffer.allocateDirect(4 * 1 * inputSize * inputSize * 3);
}
byteBuffer.order(ByteOrder.nativeOrder());
int pixel=0;
int [] intValues=new int [inputSize*inputSize];
scaledBitmap.getPixels(intValues,0,scaledBitmap.getWidth(),0,0,scaledBitmap.getWidth(),scaledBitmap.getHeight());
for (int i=0;i<inputSize;++i){
for(int j=0;j<inputSize;++j){
final int val= intValues[pixel++];
byteBuffer.putFloat((((val >> 16) & 0xFF))/255.0f);
byteBuffer.putFloat((((val >> 8) & 0xFF))/255.0f);
byteBuffer.putFloat(((val & 0xFF))/255.0f);
}
}
return byteBuffer;
}
// now call this function in CameraActivity
private MappedByteBuffer loadModelFile(AssetManager assetManager, String modelPath) throws IOException
{
// description of file
AssetFileDescriptor assetFileDescriptor=assetManager.openFd(modelPath);
FileInputStream inputStream=new FileInputStream(assetFileDescriptor.getFileDescriptor());
FileChannel fileChannel=inputStream.getChannel();
long startOffset=assetFileDescriptor.getStartOffset();
long declaredLength=assetFileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY,startOffset,declaredLength);
}
private void loadCascadeClassifier()
{
try
......
package com.app.smartphotoeditor.vision;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.util.Log;
import com.app.smartphotoeditor.listeners.ml.OnGestureDetected;
import com.app.smartphotoeditor.sdk.Methods;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.gpu.GpuDelegate;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
public class GestureDetection
{
private Interpreter interpreter;
private GpuDelegate gpuDelegate;
private final int INPUT_SIZE;
private OnGestureDetected listener;
private final Interpreter interpreter;
private final int inputSize;
private final OnGestureDetected listener;
public GestureDetection(AssetManager assetManager, Context context, String modelPath, int inputSize,OnGestureDetected listener) throws IOException
{
INPUT_SIZE = inputSize;
this.inputSize = inputSize;
this.listener = listener;
// Interpreter.Options options = new Interpreter.Options();
// gpuDelegate = new GpuDelegate();
// options.addDelegate(gpuDelegate);
// options.setNumThreads(4);
interpreter = new Interpreter(loadModelFile(assetManager,modelPath));
Log.d("sssssssssssssss","Model Loaded");
}
private MappedByteBuffer loadModelFile(AssetManager assetManager, String modelPath) throws IOException
{
// description of file
AssetFileDescriptor assetFileDescriptor=assetManager.openFd(modelPath);
FileInputStream inputStream=new FileInputStream(assetFileDescriptor.getFileDescriptor());
FileChannel fileChannel=inputStream.getChannel();
long startOffset=assetFileDescriptor.getStartOffset();
long declaredLength=assetFileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY,startOffset,declaredLength);
}
private ByteBuffer convertBitmapToByteBuffer(Bitmap scaledBitmap)
{
ByteBuffer byteBuffer;
int inputSize=INPUT_SIZE;// 96
int quant = 1;
if(quant == 0)
{
byteBuffer = ByteBuffer.allocateDirect(3 * 1 * inputSize * inputSize);
}
else
{
byteBuffer = ByteBuffer.allocateDirect(4 * 1 * inputSize * inputSize * 3);
}
byteBuffer.order(ByteOrder.nativeOrder());
int pixel=0;
int [] intValues=new int [inputSize*inputSize];
scaledBitmap.getPixels(intValues,0,scaledBitmap.getWidth(),0,0,scaledBitmap.getWidth(),scaledBitmap.getHeight());
for (int i=0;i<inputSize;++i){
for(int j=0;j<inputSize;++j){
final int val= intValues[pixel++];
byteBuffer.putFloat((((val >> 16) & 0xFF))/255.0f);
byteBuffer.putFloat((((val >> 8) & 0xFF))/255.0f);
byteBuffer.putFloat(((val & 0xFF))/255.0f);
}
}
return byteBuffer;
interpreter = new Interpreter(Methods.loadModelFile(assetManager,modelPath),null);
}
public String detectGestures(Bitmap bitmap)
{
String output = "Idle";
Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap,INPUT_SIZE,INPUT_SIZE,false);
ByteBuffer byteBuffer = convertBitmapToByteBuffer(scaledBitmap);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap,inputSize,inputSize,false);
ByteBuffer byteBuffer = Methods.convertBitmapToByteBuffer(scaledBitmap,inputSize);
float[][] result = new float[1][5];
......@@ -104,8 +45,6 @@ public class GestureDetection
float top = (float) Array.get(Array.get(result,0),i + 3);
float bottom = (float) Array.get(Array.get(result,0),i + 4);
Log.d("Leftccc", "" + idle);
float max = 0;
......
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/white"
tools:context=".activities.CameraView">
<org.opencv.android.JavaCameraView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:id="@+id/frame_Surface"/>
<ImageView
android:id="@+id/frame"
android:layout_width="100dp"
android:layout_height="130dp"
android:layout_marginStart="32dp"
android:layout_marginTop="32dp"
android:scaleType="centerCrop"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<ImageView
android:id="@+id/left"
android:layout_width="100dp"
android:layout_height="0dp"
android:scaleType="centerCrop"
app:layout_constraintBottom_toBottomOf="@+id/frame"
app:layout_constraintHorizontal_chainStyle="packed"
app:layout_constraintStart_toEndOf="@+id/frame"
app:layout_constraintTop_toTopOf="@+id/frame" />
<ImageView
android:id="@+id/center"
android:layout_width="100dp"
android:layout_height="100dp"
android:layout_marginEnd="32dp"
android:scaleType="centerCrop"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toStartOf="@+id/right" />
<ImageView
android:id="@+id/right"
android:layout_width="100dp"
android:layout_height="100dp"
android:layout_marginEnd="32dp"
android:scaleType="centerCrop"
app:layout_constraintBottom_toBottomOf="@+id/center"
app:layout_constraintEnd_toEndOf="parent" />
<TextView
android:id="@+id/view_point"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:text="View Point"
android:textColor="@android:color/black"
android:textSize="20sp"
app:layout_constraintBottom_toTopOf="@+id/center"
app:layout_constraintTop_toBottomOf="@+id/frame" />
</androidx.constraintlayout.widget.ConstraintLayout>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment