From f36645d74290c4baf17703be4959c863288e8840 Mon Sep 17 00:00:00 2001 From: "dan.rustia" <dan.rustia@wur.nl> Date: Tue, 21 Nov 2023 13:31:23 +0100 Subject: [PATCH] Added ARCore support checks --- TraitDetector/app/build.gradle | 2 +- .../invite/traitdetector/MainActivity.java | 9 +- .../invite/traitdetector/config/Config.java | 37 +--- .../traitdetector/depth/ARActivity.java | 200 +++++++++--------- .../traitdetector/depth/DepthHandler.java | 8 +- .../traitdetector/gallery/TomatoActivity.java | 2 +- .../traitdetector/processing/ImageOps.java | 9 +- .../traitdetector/processing/Processor.java | 2 +- .../src/main/res/layout/tomato_activity.xml | 34 +-- 9 files changed, 141 insertions(+), 162 deletions(-) diff --git a/TraitDetector/app/build.gradle b/TraitDetector/app/build.gradle index 4e5aaa0..76b302c 100644 --- a/TraitDetector/app/build.gradle +++ b/TraitDetector/app/build.gradle @@ -75,7 +75,7 @@ dependencies { implementation "androidx.camera:camera-core:$camerax_version" implementation "androidx.camera:camera-camera2:$camerax_version" - implementation 'com.google.ar:core:1.35.0' + implementation 'com.google.ar:core:1.32.0' implementation 'com.google.code.gson:gson:2.10' implementation 'com.google.mlkit:barcode-scanning:17.0.3' implementation 'com.google.android.play:asset-delivery:2.1.0' diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/MainActivity.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/MainActivity.java index 24c95ee..a7f8a1d 100644 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/MainActivity.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/MainActivity.java @@ -177,7 +177,7 @@ public class MainActivity extends AppCompatActivity { yolo_model_filename_2 = FileFinder.assetFilePath(getApplicationContext(), user_cfg.yolo_model_filename_2); Log.d("INVITE", yolo_model_filename_2); if (yolo_model_filename_2.contains("yolo")) { - YOLOv8_2.initModel(getApplicationContext(), user_cfg); // TODO: Don't make this a class! Make it an object + YOLOv8_2.initModel(getApplicationContext(), user_cfg); } } catch (IOException e) { @@ -518,6 +518,13 @@ public class MainActivity extends AppCompatActivity { // Display output ImageOps.displayImage(img_output_final, imageView); + if (index != mTestImages.length-1){ + index = index + 1; + } + else{ + index = 0; + } + } @Override diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/config/Config.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/config/Config.java index 835a440..c4d345d 100644 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/config/Config.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/config/Config.java @@ -22,28 +22,26 @@ public class Config { public String maskrcnn_model_filename = "maskrcnn-1scale.ptl"; // YOLO -// public String yolo_model_filename = "yolov8m-seg-320.onnx"; // 834 ms -// public String yolo_model_filename = "yolov8s-seg-320.onnx"; // 515 ms public String yolo_model_filename = "yolov8n-seg-320.onnx"; // 335 ms -// public String yolo_model_filename = "yolov8m-seg-640.onnx"; // 2795 ms -// public String yolo_model_filename = "yolov8s-seg-640.onnx"; // 1491 ms -// public String yolo_model_filename = "yolov8n-seg-640.onnx"; // 820 ms - public String[] yolo_classes_list = {"tomato"}; - public float yolo_score_threshold = 0.5f; + public float yolo_score_threshold = 0.25f; public float yolo_iou_threshold = 0.5f; public int yolo_input_size = 320; public float yolo_box_padding = 1.0f; + + // SRGAN + public String sr_model_filename = "realesrgan-x2.ort"; // 0.98s each + public int sr_threshold = 32; // Threshold if SR will be used + public int sr_input_size = 64; // Required input size + public int sr_output_size = 128; // x2 + public int sr_force_size = 320; // Forced input size for 2nd YOLO + + + // YOLO 2 public String yolo_model_filename_2 = "yolov8n-seg-320-2.onnx"; // 292 ms -// public String yolo_model_filename_2 = "yolov8s-seg-320-2.onnx"; // 330 ms -// public String yolo_model_filename_2 = "yolov8m-seg-320-2.onnx"; // 650 ms - -// public String yolo_model_filename_2 = "yolov8n-seg-640-2.onnx"; // 437 ms -// public String yolo_model_filename_2 = "yolov8s-seg-640-2.onnx"; // 1113 ms -// public String yolo_model_filename_2 = "yolov8m-seg-640-2.onnx"; // 1113 ms public String[] yolo_classes_list_2 = {"tomato", "peduncle"}; public float yolo_score_threshold_2 = 0.5f; public float yolo_iou_threshold_2 = 0.5f; @@ -60,19 +58,6 @@ public class Config { - // SRGAN - public String sr_model_filename = "realesrgan-x2.ort"; // 0.98s each -// public String sr_model_filename = "realesrgan-x2.onnx"; // 0.8s each -// public String sr_model_filename = "realesrgan-x2-fp16.onnx"; // doesn't work? - // public String sr_model_filename = "realesrgan-x4.onnx"; // 3.2s each - - public int sr_threshold = 32; // Threshold if SR will be used - public int sr_input_size = 64; // Required input size - public int sr_output_size = 128; // x2 -// public int sr_output_size = 256; // x4 - public int sr_force_size = 320; // Forced input size for 2nd YOLO - - diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/ARActivity.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/ARActivity.java index 683b7db..1c5333e 100755 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/ARActivity.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/ARActivity.java @@ -72,7 +72,6 @@ import javax.microedition.khronos.opengles.GL10; public class ARActivity extends AppCompatActivity implements GLSurfaceView.Renderer { private static final String TAG = ARActivity.class.getSimpleName(); - private boolean isDepthSupported; private boolean installRequested; // Rendering. The Renderers are created here, and initialized when the GL surface is created. @@ -99,6 +98,8 @@ public class ARActivity extends AppCompatActivity implements GLSurfaceView.Rende // public float[] cameraDistance = new float[] {0, 0}; public float cameraDistance = 0; + private boolean isDepthSupported = false; + @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); @@ -163,9 +164,7 @@ public class ARActivity extends AppCompatActivity implements GLSurfaceView.Rende // }) .collect(Collectors.toList()); session.setCameraConfig(hdConfigs.get(0)); - config.setFocusMode(Config.FocusMode.AUTO); - session.configure(config); } catch (UnavailableArcoreNotInstalledException @@ -267,10 +266,8 @@ public class ARActivity extends AppCompatActivity implements GLSurfaceView.Rende if (distance == 0) { message = "Distance: Initialising"; } else if (distance < 500) { -// message = "Distance: " + distance / 10 + " cm. Too close for accurate analysis (" + confidence + ")"; message = "Distance: " + distance / 10 + " cm. Too close for accurate analysis"; } else if (distance > 1200) { -// message = "Distance: " + distance / 10 + " cm. Too far for accurate analysis (" + confidence + ")" ; message = "Distance: " + distance / 10 + " cm. Too far for accurate analysis" ; } else { message = "Distance: " + distance / 10 + " cm"; @@ -291,151 +288,144 @@ public class ARActivity extends AppCompatActivity implements GLSurfaceView.Rende displayRotationHelper.updateSessionIfNeeded(session); try { - session.setCameraTextureName(backgroundRenderer.getTextureId()); - // Obtain the current frame from ARSession. When the configuration is set to - // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the - // camera framerate. - Frame frame = session.update(); - Camera camera = frame.getCamera(); - // If frame is ready, render camera preview image to the GL surface. - backgroundRenderer.draw(frame); + session.setCameraTextureName(backgroundRenderer.getTextureId()); - // Keep the screen unlocked while tracking, but allow it to lock when tracking stops. - trackingStateHelper.updateKeepScreenOnFlag(camera.getTrackingState()); - - // If not tracking, don't draw 3D objects, show tracking failure reason instead. - if (camera.getTrackingState() == TrackingState.PAUSED) { - messageSnackbarHelper.showMessage( - this, TrackingStateHelper.getTrackingFailureReasonString(camera)); - return; - } + // Obtain the current frame from ARSession. When the configuration is set to + // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the + // camera framerate. + Frame frame = session.update(); + Camera camera = frame.getCamera(); - // Get projection matrix. - float[] projmtx = new float[16]; - camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f); + // If frame is ready, render camera preview image to the GL surface. + backgroundRenderer.draw(frame); - // Get camera matrix and draw. - float[] viewmtx = new float[16]; - camera.getViewMatrix(viewmtx, 0); + // Keep the screen unlocked while tracking, but allow it to lock when tracking stops. + trackingStateHelper.updateKeepScreenOnFlag(camera.getTrackingState()); - // Compute lighting from average intensity of the image. - // The first three components are color scaling factors. - // The last one is the average pixel intensity in gamma space. - final float[] colorCorrectionRgba = new float[4]; - frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0); + // If not tracking, don't draw 3D objects, show tracking failure reason instead. +// if (camera.getTrackingState() == TrackingState.PAUSED) { +// messageSnackbarHelper.showMessage( +// this, TrackingStateHelper.getTrackingFailureReasonString(camera)); +// return; +// } - // No tracking error at this point. Inform user of what to do based on if planes are found. - String message = ""; + float[] projmtx = new float[16]; + camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f); + // Get camera matrix and draw. + float[] viewmtx = new float[16]; + camera.getViewMatrix(viewmtx, 0); + final float[] colorCorrectionRgba = new float[4]; + frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0); - if (!isDepthSupported) { - messageToShow += "\n" + DEPTH_NOT_AVAILABLE_MESSAGE; - messageSnackbarHelper.showMessage(this, message); - return; - } - Bundle extras = getIntent().getExtras(); - assert extras != null; - qrEnabled = extras.getBoolean("qr_enabled"); -// Image depthImage = frame.acquireDepthImage(); - Image depthImage = frame.acquireDepthImage16Bits(); - Image depthConfidence = frame.acquireRawDepthConfidenceImage(); - Image cameraImage = frame.acquireCameraImage(); - Image.Plane depthPlane = depthImage.getPlanes()[0]; - int pixelStride = depthPlane.getPixelStride(); - int rowStride = depthPlane.getRowStride(); - ByteBuffer depthBufferRaw = depthPlane.getBuffer().order(ByteOrder.nativeOrder()); - // Get camera distance - int[] depthShape = {depthImage.getWidth(), depthImage.getHeight()}; - int depthX = depthImage.getWidth() / 2; - int depthY = depthImage.getHeight() / 2; - int depthIndex = depthX * depthPlane.getPixelStride() + depthY * depthPlane.getRowStride(); - cameraDistance = depthBufferRaw.getShort(depthIndex); + // No tracking error at this point. Inform user of what to do based on if planes are found. + String message = ""; + Log.d("INVITE_X", String.valueOf(isDepthSupported) + " " + camera.getTrackingState()); + if (!isDepthSupported) { + message = DEPTH_NOT_AVAILABLE_MESSAGE; + messageSnackbarHelper.showMessage(this, message); + } + else { + Bundle extras = getIntent().getExtras(); + assert extras != null; + qrEnabled = extras.getBoolean("qr_enabled"); + // Image depthImage = frame.acquireDepthImage(); + Image depthImage = frame.acquireDepthImage16Bits(); + Image depthConfidence = frame.acquireRawDepthConfidenceImage(); + Image cameraImage = frame.acquireCameraImage(); + Image.Plane depthPlane = depthImage.getPlanes()[0]; + int pixelStride = depthPlane.getPixelStride(); + int rowStride = depthPlane.getRowStride(); + ByteBuffer depthBufferRaw = depthPlane.getBuffer().order(ByteOrder.nativeOrder()); + // Get camera distance + int[] depthShape = {depthImage.getWidth(), depthImage.getHeight()}; + int depthX = depthImage.getWidth() / 2; + int depthY = depthImage.getHeight() / 2; + int depthIndex = depthX * depthPlane.getPixelStride() + depthY * depthPlane.getRowStride(); + cameraDistance = depthBufferRaw.getShort(depthIndex); // cameraDistance = depthTexture.getDepthValue(depthImage, depthConfidence, depthX, depthY); // cameraDistance = depthTexture.getDepthOnly(depthImage, depthX, depthY); - float distance = cameraDistance; + float distance = cameraDistance; // float confidence = cameraDistance[1]; - // - // Distance handler - // - message = checkDistance(distance); + // + // Distance handler + // + message = checkDistance(distance); - // FORCE QR - // - // Show status message in camera interface - // - if (qrEnabled) { - if (!qrCodeRetrieved) { - message += "\nNo QR code found. Please try moving the camera slightly."; + // FORCE QR + // + // Show status message in camera interface + // + if (qrEnabled) { + if (!qrCodeRetrieved) { + message += "\nNo QR code found. Please try moving the camera slightly."; - qrCodeValues = qrCodeHandler.analyze(cameraImage); + qrCodeValues = qrCodeHandler.analyze(cameraImage); - if (qrCodeValues.length() != 0) { - qrCodeRetrieved = true; - } - } else { + if (qrCodeValues.length() != 0) { + qrCodeRetrieved = true; + } + } else { // if (!message.isEmpty()) { // message += "\nDistance: " + distance/10 + "cm Variety: " + qrCodeValues.getString("label"); // } else { // message = "Distance: " + distance/10 + "cm Variety: " + qrCodeValues.getString("label"); // } - } - } - else - { - message += "\nQR scanning is disabled. Now using default values."; - qrCodeValues = qrCodeHandler.defaultValues(); - } + } + } else { + message += "\nQR scanning is disabled. Now using default values."; + qrCodeValues = qrCodeHandler.defaultValues(); + } - if (message != messageToShow) { - messageToShow = message; - messageSnackbarHelper.showMessage(this, messageToShow); - } + if (message != messageToShow) { + messageToShow = message; + messageSnackbarHelper.showMessage(this, messageToShow); + } - if (capturePicture && distance != 0) { - capturePicture = false; + if (capturePicture && distance != 0) { + capturePicture = false; - // - // Save depth image - // - session.pause(); + // Save depth image + session.pause(); + JSONObject traits = depthTexture.saveDepth(this, frame, qrCodeValues, cameraImage, pixelStride, rowStride, depthBufferRaw, depthShape); + qrCodeValues.put("traits", traits); -// depthImage.close(); - JSONObject traits = depthTexture.saveDepth(this, frame, qrCodeValues, cameraImage, pixelStride, rowStride, depthBufferRaw, depthShape); - qrCodeValues.put("traits", traits); + Intent output = new Intent(); - Intent output = new Intent(); + String label = (String) qrCodeValues.get("label"); + String metadata = qrCodeValues.toString(); + output.putExtra("label", label); + output.putExtra("metadata", metadata); + output.putExtra("distance", distance / 10); - String label = (String) qrCodeValues.get("label"); - String metadata = qrCodeValues.toString(); - output.putExtra("label", label); - output.putExtra("metadata", metadata); - output.putExtra("distance", distance / 10); + setResult(this.RESULT_OK, output); + finish(); + } + + depthImage.close(); + cameraImage.close(); + } - setResult(this.RESULT_OK, output); - finish(); - } - depthImage.close(); - cameraImage.close(); } catch (Throwable t) { // Avoid crashing the application due to unhandled exceptions. diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/DepthHandler.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/DepthHandler.java index f4de53d..53884ec 100644 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/DepthHandler.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/depth/DepthHandler.java @@ -180,10 +180,10 @@ public final class DepthHandler { bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos); fos.flush(); fos.close(); - FileOutputStream fos2 = new FileOutputStream(outX); - bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos2); - fos2.flush(); - fos2.close(); +// FileOutputStream fos2 = new FileOutputStream(outX); +// bitmap.compress(Bitmap.CompressFormat.PNG, 100, fos2); +// fos2.flush(); +// fos2.close(); cameraImage.close(); diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/gallery/TomatoActivity.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/gallery/TomatoActivity.java index 896f515..d854dad 100644 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/gallery/TomatoActivity.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/gallery/TomatoActivity.java @@ -46,7 +46,7 @@ public class TomatoActivity extends AppCompatActivity { ImageView imageView = findViewById(R.id.image); TextView labelView = findViewById(R.id.label); // TextView widthValue = findViewById(R.id.widthValue); - TextView sizeValue = findViewById(R.id.sizeValue); +// TextView sizeValue = findViewById(R.id.sizeValue); // TextView heightValue = findViewById(R.id.heightValue); TextView volumeValue = findViewById(R.id.volumeValue); TextView ratioValue = findViewById(R.id.ratioValue); diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/ImageOps.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/ImageOps.java index 5b5749e..122774d 100644 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/ImageOps.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/ImageOps.java @@ -355,9 +355,8 @@ public class ImageOps { draw_m_map = instance.m_map; draw_m_map.convertTo(draw_m_map, CvType.CV_32SC1, 255); Imgproc.findContours(draw_m_map, contours, new Mat(), Imgproc.RETR_FLOODFILL, Imgproc.CHAIN_APPROX_SIMPLE); - for (int i = 0; i < contours.size(); i++) { - Imgproc.drawContours(dest_img, contours, i, colors.get(c), myConfig.thickness); - } + Imgproc.drawContours(dest_img, contours, ImageOps.get_max_contour(contours), colors.get(c), myConfig.thickness); + contours = new ArrayList<MatOfPoint>(); if (c2 == 1) @@ -365,9 +364,7 @@ public class ImageOps { draw_m_map = instance.m_map2; draw_m_map.convertTo(draw_m_map, CvType.CV_32SC1, 255); Imgproc.findContours(draw_m_map, contours, new Mat(), Imgproc.RETR_FLOODFILL, Imgproc.CHAIN_APPROX_SIMPLE); - for (int i = 0; i < contours.size(); i++) { - Imgproc.drawContours(dest_img, contours, i, colors.get(c2), myConfig.thickness); - } + Imgproc.drawContours(dest_img, contours, ImageOps.get_max_contour(contours), colors.get(c2), myConfig.thickness); } } diff --git a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/Processor.java b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/Processor.java index 495bee2..fdbc0d4 100644 --- a/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/Processor.java +++ b/TraitDetector/app/src/main/java/com/wur/invite/traitdetector/processing/Processor.java @@ -245,7 +245,7 @@ public class Processor { } // If peduncle - if (c2 == 1) { + if (c2 == 1 && o.orientation == 0) { Mat clone_mat = new Mat(img_shape[1], img_shape[0], CvType.CV_8U, Scalar.all(0)); Mat dest_mat = clone_mat.submat(y1, y2, x1, x2); m_map2.convertTo(m_map2, CvType.CV_8U, 255); diff --git a/TraitDetector/app/src/main/res/layout/tomato_activity.xml b/TraitDetector/app/src/main/res/layout/tomato_activity.xml index 7c76a15..4b457ed 100644 --- a/TraitDetector/app/src/main/res/layout/tomato_activity.xml +++ b/TraitDetector/app/src/main/res/layout/tomato_activity.xml @@ -35,23 +35,23 @@ android:textColor="#000000" android:textStyle="bold" /> - <TextView - android:layout_marginTop="10dp" - android:id="@+id/sizeLabel" - android:layout_width="170dp" - android:layout_height="wrap_content" - android:textSize="10dp" - android:textColor="#000000" - android:textStyle="bold" - android:text="#26: SIZE (mm2)" /> - - <TextView - android:id="@+id/sizeValue" - android:layout_width="50dp" - android:text="0" - android:textColor="#000000" - android:layout_height="wrap_content" - android:textSize="16dp" /> +<!-- <TextView--> +<!-- android:layout_marginTop="10dp"--> +<!-- android:id="@+id/sizeLabel"--> +<!-- android:layout_width="170dp"--> +<!-- android:layout_height="wrap_content"--> +<!-- android:textSize="10dp"--> +<!-- android:textColor="#000000"--> +<!-- android:textStyle="bold"--> +<!-- android:text="#26: SIZE (mm2)" />--> + +<!-- <TextView--> +<!-- android:id="@+id/sizeValue"--> +<!-- android:layout_width="50dp"--> +<!-- android:text="0"--> +<!-- android:textColor="#000000"--> +<!-- android:layout_height="wrap_content"--> +<!-- android:textSize="16dp" />--> -- GitLab