I’m currently building an app that tracks a calibration card with 7 circles and use the colour values from the circles to do some calculations. I’ve tried to build on [this](http://blog.codeonion.com/2016/04/09/show-camera-on-android-app-using-opencv-for-android/)
tutorial and even though it works the frame rate is really low, even at 640 x 320 on a Nexus 6P.
The screen orientation is also in landscape because it didn't fill the screen when it was transposed.
Is there a better way to do this?
public class Detector extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final int MY_PERMISSIONS_REQUEST_CAMERA_GROUP = 0;
public final static String EXTRA_CIRCLES = "com.company.app.CIRCLES";
private Mat mRgba;
private Mat mGray;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
//System.loadLibrary("mixed_sample");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Detector() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
Log.i(TAG, String.format("Large memory class: %d MB", activityManager.getLargeMemoryClass()));
// Here, thisActivity is the current activity
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA)) {
} else {
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.CAMERA},
MY_PERMISSIONS_REQUEST_CAMERA_GROUP);
}
}
requestWindowFeature(Window.FEATURE_NO_TITLE);
if (!OpenCVLoader.initDebug()) {
// Handle initialization error
}
// Fullscreen
if (Build.VERSION.SDK_INT < 16) {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
} else {
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
// Stay on
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.custom_camera);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.color_blob_detection_activity_surface_view);
// Compatibility and performance
// 1280 x 720 => ~3fps
// 640 x 360 => ~12fps
mOpenCvCameraView.setMaxFrameSize(640,360);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
} else {
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
}
public void onCameraViewStopped() {
mRgba.release();
mGray.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
// Convert to grayscale
int colorChannels = (mRgba.channels() == 3) ? Imgproc.COLOR_BGR2GRAY
: ((mRgba.channels() == 4) ? Imgproc.COLOR_BGRA2GRAY : 1);
// Convert image to grayscale
Imgproc.cvtColor(mRgba, mGray, colorChannels);
// Reduce the noise so we avoid false circle detection
Imgproc.GaussianBlur(mGray, mGray, new Size(9, 9), 2, 2);
// Use screen size as reference point
int screen_width = mRgba.width(), screen_height = mRgba.height();
// Parameters for circle detection accumulator value
double dp = 1.2d;
// minimum distance between the center coordinates of detected circles in pixels
double minDist = 70;
// min and max radius
int minRadius = screen_height / 20, maxRadius = screen_height / 3;
// param1 = gradient value used to handle edge detection
// param2 = Accumulator threshold value
double param1 = 90, param2 = 90;
// create a Mat object to store the circles detected
Mat circles = new Mat(screen_width, screen_height, CvType.CV_8UC1);
// find the circle in the image
Imgproc.HoughCircles(mGray, circles, Imgproc.CV_HOUGH_GRADIENT, dp, minDist, param1, param2, minRadius, maxRadius);
// get the number of circles detected
int numberOfCircles = (circles.rows() == 0) ? 0 : circles.cols();
// Draw the circles found on the image
// Only if numberOfCircles = 7?
// if (numberOfCircles == 7) {
for (int i = 0; i < numberOfCircles; i++) {
// get the circle details, circleCoordinates[0, 1, 2] = (x,y,r)
// (x,y) are the coordinates of the circle's center
double[] circleCoordinates = circles.get(0, i);
int x = (int) circleCoordinates[0], y = (int) circleCoordinates[1];
// Circle outline, for visualization
Point center = new Point(x, y);
int radius = (int) circleCoordinates[2];
Imgproc.circle(mRgba, center, radius, new Scalar(0, 255, 0), 10);
// Hough circles are sorted from large to small radius
// Find the largest
double[] circleLCoordinates = circles.get(0, 0);
int xx = (int) circleLCoordinates[0], yy = (int) circleLCoordinates[1];
// Find average circle values if 7 circles are detected, biggest circle is at correct location
if (numberOfCircles == 7 && 0.6 * screen_width <= xx && 0.85 * screen_width >= xx&& (2 / 6.5) * screen_height <= yy && (4 / 6.5) * screen_height >= yy) {
int cols = mRgba.cols();
int rows = mRgba.rows();
Rect averageRect = new Rect();
averageRect.x = (x > 8) ? x - 8 : 0;
averageRect.y = (y > 8) ? y - 8 : 0;
averageRect.width = (x + 8 < cols) ? x + 8 - averageRect.x : cols - averageRect.x;
averageRect.height = (y + 8 < rows) ? y + 8 - averageRect.y : rows - averageRect.y;
Mat averageRegionRgba = mRgba.submat(averageRect);
Mat averageRegionHsv = new Mat();
Imgproc.cvtColor(averageRegionRgba, averageRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(averageRegionHsv);
int pointCount = averageRect.width * averageRect.height;
for (int j = 0; j < mBlobColorHsv.val.length; j++)
mBlobColorHsv.val[j] /= pointCount;
mBlobColorRgba = convertScalarHsv2Rgba(mBlobColorHsv);
Log.i("RGB+A circle values", "Number " + i + ": (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
averageRegionRgba.release();
averageRegionHsv.release();
calibrationCardDetected(mBlobColorRgba);
}
}
// }
// Verification
Log.i("Number of circles", "(" + numberOfCircles + ")");
return mRgba;
}
/**
* Convert scalar from HSV to RGB
* @param hsvColor
* @return scalar
*/
private Scalar convertScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
/**
* Open new intent displaying and calculate the results
* @param scalar
*/
protected void calibrationCardDetected(Scalar scalar) {
String scalar_string = scalar.toString();
try {
// Pop intent
Intent intent = new Intent(this, DisplayResultActivity.class);
intent.putExtra(EXTRA_CIRCLES, scalar_string);
startActivity(intent);
} catch (Exception e) {
e.printStackTrace();
}
}
}
↧