diff --git a/lite/examples/object_detection/android/app/build.gradle b/lite/examples/object_detection/android/app/build.gradle index fd48aa2b803..4ce43a3c541 100644 --- a/lite/examples/object_detection/android/app/build.gradle +++ b/lite/examples/object_detection/android/app/build.gradle @@ -40,6 +40,10 @@ android { dimension "tfliteInference" } } + + buildFeatures { + dataBinding = true + } } // import DownloadModels task @@ -54,12 +58,31 @@ dependencies { implementation fileTree(dir: 'libs', include: ['*.jar','*.aar']) interpreterImplementation project(":lib_interpreter") taskApiImplementation project(":lib_task_api") - implementation 'androidx.appcompat:appcompat:1.0.0' - implementation 'androidx.coordinatorlayout:coordinatorlayout:1.0.0' - implementation 'com.google.android.material:material:1.0.0' + implementation 'androidx.appcompat:appcompat:1.3.1' + implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0' + implementation 'com.google.android.material:material:1.4.0' + + androidTestImplementation 'androidx.test.ext:junit:1.1.3' + androidTestImplementation 'com.google.truth:truth:1.1.3' + androidTestImplementation 'androidx.test:runner:1.4.0' + androidTestImplementation 'androidx.test:rules:1.4.0' + + // Required -- JUnit 4 framework + androidTestImplementation("junit:junit:4.13.2") + // Optional -- Robolectric environment + androidTestImplementation('androidx.test:core:1.4.0') + // Optional -- Mockito framework + androidTestImplementation("org.mockito:mockito-core:3.11.2") + implementation 'net.bytebuddy:byte-buddy-android-test:1.11.12' + implementation 'org.tensorflow:tensorflow-lite-support:0.2.0' + implementation 'org.mockito:mockito-android:3.11.2' - androidTestImplementation 'androidx.test.ext:junit:1.1.1' - androidTestImplementation 'com.google.truth:truth:1.0.1' - androidTestImplementation 'androidx.test:runner:1.2.0' - androidTestImplementation 'androidx.test:rules:1.1.0' + // CameraX dependencies + def camerax_version = "1.0.1" + // CameraX core library using camera2 implementation + implementation "androidx.camera:camera-camera2:$camerax_version" + // CameraX Lifecycle Library + implementation "androidx.camera:camera-lifecycle:$camerax_version" + // CameraX View class + implementation "androidx.camera:camera-view:1.0.0-alpha27" } diff --git a/lite/examples/object_detection/android/app/src/androidTest/assets/table_results.txt b/lite/examples/object_detection/android/app/src/androidTest/assets/table_results.txt index 11709e22f07..c345b7dfa52 100644 --- a/lite/examples/object_detection/android/app/src/androidTest/assets/table_results.txt +++ b/lite/examples/object_detection/android/app/src/androidTest/assets/table_results.txt @@ -1,4 +1,4 @@ -dining_table 27.492085 97.94615 623.1435 444.8627 0.48828125 -knife 342.53433 243.71082 583.89185 416.34595 0.4765625 -cup 68.025925 197.5857 202.02031 374.2206 0.4375 -book 185.43098 139.64153 244.51149 203.37737 0.3125 +knife 345.29675 242.38895 585.65424 415.0241 0.54 +dining_table 24.836613 95.182755 620.488 447.6261 0.55 +wine_glass 63.532368 202.38976 204.03336 387.60184 0.51 +book 186.38379 138.98523 242.53781 205.76802 0.32 \ No newline at end of file diff --git a/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java b/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java index e5e07ba5039..cdfa6610fe8 100644 --- a/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java +++ b/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java @@ -17,34 +17,43 @@ package org.tensorflow.lite.examples.detection; import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.openMocks; import static java.lang.Math.abs; import static java.lang.Math.max; import static java.lang.Math.min; +import android.content.Context; import android.content.res.AssetManager; import android.graphics.Bitmap; -import android.graphics.Bitmap.Config; import android.graphics.BitmapFactory; -import android.graphics.Canvas; -import android.graphics.Matrix; +import android.graphics.ImageFormat; import android.graphics.RectF; -import android.util.Size; +import android.media.Image; + +import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.platform.app.InstrumentationRegistry; + import java.io.IOException; import java.io.InputStream; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Scanner; + import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.tensorflow.lite.examples.detection.env.ImageUtils; import org.tensorflow.lite.examples.detection.tflite.Detector; import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition; import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel; +import org.tensorflow.lite.support.image.ColorSpaceType; -/** Golden test for Object Detection Reference app. */ +/** + * Golden test for Object Detection Reference app. + */ @RunWith(AndroidJUnit4.class) public class DetectorTest { @@ -52,52 +61,33 @@ public class DetectorTest { private static final boolean IS_MODEL_QUANTIZED = true; private static final String MODEL_FILE = "detect.tflite"; private static final String LABELS_FILE = "labelmap.txt"; - private static final Size IMAGE_SIZE = new Size(640, 480); - private Detector detector; - private Bitmap croppedBitmap; - private Matrix frameToCropTransform; - private Matrix cropToFrameTransform; + private final Context applicationContext = ApplicationProvider.getApplicationContext(); @Before public void setUp() throws IOException { + openMocks(this); detector = TFLiteObjectDetectionAPIModel.create( - InstrumentationRegistry.getInstrumentation().getContext(), + applicationContext, MODEL_FILE, LABELS_FILE, MODEL_INPUT_SIZE, IS_MODEL_QUANTIZED); - int cropSize = MODEL_INPUT_SIZE; - int previewWidth = IMAGE_SIZE.getWidth(); - int previewHeight = IMAGE_SIZE.getHeight(); - int sensorOrientation = 0; - croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); - - frameToCropTransform = - ImageUtils.getTransformationMatrix( - previewWidth, previewHeight, - cropSize, cropSize, - sensorOrientation, false); - cropToFrameTransform = new Matrix(); - frameToCropTransform.invert(cropToFrameTransform); } @Test public void detectionResultsShouldNotChange() throws Exception { - Canvas canvas = new Canvas(croppedBitmap); - canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null); - final List results = detector.recognizeImage(croppedBitmap); + Bitmap assetsBitmap = loadImage("table.jpg"); + final List results = detector.recognizeImage(mockMediaImageFromBitmap(assetsBitmap, ColorSpaceType.NV21), 0); final List expected = loadRecognitions("table_results.txt"); for (Recognition target : expected) { // Find a matching result in results boolean matched = false; for (Recognition item : results) { - RectF bbox = new RectF(); - cropToFrameTransform.mapRect(bbox, item.getLocation()); if (item.getTitle().equals(target.getTitle()) - && matchBoundingBoxes(bbox, target.getLocation()) + && matchBoundingBoxes(item.getLocation(), target.getLocation()) && matchConfidence(item.getConfidence(), target.getConfidence())) { matched = true; break; @@ -135,7 +125,7 @@ private static Bitmap loadImage(String fileName) throws Exception { // category bbox.left bbox.top bbox.right bbox.bottom confidence // ... // Example: - // Apple 99 25 30 75 80 0.99 + // Apple 99 25 30 75 0.99 // Banana 25 90 75 200 0.98 // ... private static List loadRecognitions(String fileName) throws Exception { @@ -161,4 +151,137 @@ private static List loadRecognitions(String fileName) throws Except } return result; } + + private static YuvPlaneInfo createYuvPlaneInfo( + ColorSpaceType colorSpaceType, int width, int height) { + int uIndex = 0; + int vIndex = 0; + int uvPixelStride = 0; + int yBufferSize = width * height; + int uvBufferSize = ((width + 1) / 2) * ((height + 1) / 2); + int uvRowStride = 0; + switch (colorSpaceType) { + case NV12: + uIndex = yBufferSize; + vIndex = yBufferSize + 1; + uvPixelStride = 2; + uvRowStride = (width + 1) / 2 * 2; + break; + case NV21: + vIndex = yBufferSize; + uIndex = yBufferSize + 1; + uvPixelStride = 2; + uvRowStride = (width + 1) / 2 * 2; + break; + case YV12: + vIndex = yBufferSize; + uIndex = yBufferSize + uvBufferSize; + uvPixelStride = 1; + uvRowStride = (width + 1) / 2; + break; + case YV21: + uIndex = yBufferSize; + vIndex = yBufferSize + uvBufferSize; + uvPixelStride = 1; + uvRowStride = (width + 1) / 2; + break; + default: + throw new IllegalArgumentException( + "ColorSpaceType: " + colorSpaceType.name() + ", is unsupported."); + } + + return YuvPlaneInfo.create( + uIndex, + vIndex, + /*yRowStride=*/ width, + uvRowStride, + uvPixelStride, + yBufferSize, + uvBufferSize); + } + + private static byte[] getYuvBytesFromBitmap(Bitmap bitmap, ColorSpaceType colorSpaceType) { + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + int[] rgb = new int[width * height]; + bitmap.getPixels(rgb, 0, width, 0, 0, width, height); + + YuvPlaneInfo yuvPlaneInfo = createYuvPlaneInfo(colorSpaceType, width, height); + + byte[] yuv = new byte[yuvPlaneInfo.getYBufferSize() + yuvPlaneInfo.getUvBufferSize() * 2]; + int rgbIndex = 0; + int yIndex = 0; + int vIndex = yuvPlaneInfo.getVIndex(); + int uIndex = yuvPlaneInfo.getUIndex(); + int uvPixelStride = yuvPlaneInfo.getUvPixelStride(); + + for (int j = 0; j < height; ++j) { + for (int i = 0; i < width; ++i) { + int r = (rgb[rgbIndex] >> 16) & 0xff; + int g = (rgb[rgbIndex] >> 8) & 0xff; + int b = rgb[rgbIndex] & 0xff; + + int y = (int) (0.299f * r + 0.587f * g + 0.114f * b); + int v = (int) ((r - y) * 0.713f + 128); + int u = (int) ((b - y) * 0.564f + 128); + + yuv[yIndex++] = (byte) max(0, min(255, y)); + byte uByte = (byte) max(0, min(255, u)); + byte vByte = (byte) max(0, min(255, v)); + + if ((i & 0x01) == 0 && (j & 0x01) == 0) { + yuv[vIndex] = vByte; + yuv[uIndex] = uByte; + vIndex += uvPixelStride; + uIndex += uvPixelStride; + } + + rgbIndex++; + } + } + return yuv; + } + + public static Image mockMediaImageFromBitmap(Bitmap bitmap, ColorSpaceType colorSpaceType) { + // Converts the RGB Bitmap to YUV TensorBuffer + byte[] yuv = getYuvBytesFromBitmap(bitmap, colorSpaceType); + + int width = bitmap.getWidth(); + int height = bitmap.getHeight(); + YuvPlaneInfo yuvPlaneInfo = createYuvPlaneInfo(colorSpaceType, width, height); + + ByteBuffer yuvBuffer = + ByteBuffer.allocateDirect( + yuvPlaneInfo.getYBufferSize() + yuvPlaneInfo.getUvBufferSize() * 2); + yuvBuffer.put(yuv); + yuvBuffer.rewind(); + ByteBuffer yPlane = yuvBuffer.slice(); + + yuvBuffer.rewind(); + yuvBuffer.position(yuvPlaneInfo.getUIndex()); + ByteBuffer uPlane = yuvBuffer.slice(); + + yuvBuffer.rewind(); + yuvBuffer.position(yuvPlaneInfo.getVIndex()); + ByteBuffer vPlane = yuvBuffer.slice(); + + Image.Plane mockPlaneY = mock(Image.Plane.class); + when(mockPlaneY.getBuffer()).thenReturn(yPlane); + when(mockPlaneY.getRowStride()).thenReturn(yuvPlaneInfo.getYRowStride()); + Image.Plane mockPlaneU = mock(Image.Plane.class); + when(mockPlaneU.getBuffer()).thenReturn(uPlane); + when(mockPlaneU.getRowStride()).thenReturn(yuvPlaneInfo.getUvRowStride()); + when(mockPlaneU.getPixelStride()).thenReturn(yuvPlaneInfo.getUvPixelStride()); + Image.Plane mockPlaneV = mock(Image.Plane.class); + when(mockPlaneV.getBuffer()).thenReturn(vPlane); + when(mockPlaneV.getRowStride()).thenReturn(yuvPlaneInfo.getUvRowStride()); + when(mockPlaneV.getPixelStride()).thenReturn(yuvPlaneInfo.getUvPixelStride()); + + Image imageMock = mock(Image.class); + when(imageMock.getFormat()).thenReturn(ImageFormat.YUV_420_888); + when(imageMock.getPlanes()).thenReturn(new Image.Plane[]{mockPlaneY, mockPlaneU, mockPlaneV}); + when(imageMock.getWidth()).thenReturn(width); + when(imageMock.getHeight()).thenReturn(height); + return imageMock; + } } diff --git a/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/YuvPlaneInfo.java b/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/YuvPlaneInfo.java new file mode 100644 index 00000000000..f4e1b1e7cd8 --- /dev/null +++ b/lite/examples/object_detection/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/YuvPlaneInfo.java @@ -0,0 +1,88 @@ +/* + * Copyright 2020 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.tensorflow.lite.examples.detection; + +import android.util.Log; + +public class YuvPlaneInfo { + + int uIndexVar; + int vIndexVar; + int uvPixelStrideVar; + int yBufferSizeVar; + int uvBufferSizeVar; + int uvRowStrideVar; + int widthVar; + + public YuvPlaneInfo(int uIndex, + int vIndex, + int width, + int uvRowStride, + int uvPixelStride, + int yBufferSize, + int uvBufferSize) { + this.uIndexVar = uIndex; + this.vIndexVar = vIndex; + this.widthVar = width; + this.uvRowStrideVar = uvRowStride; + this.uvPixelStrideVar = uvPixelStride; + this.yBufferSizeVar = yBufferSize; + this.uvBufferSizeVar = uvBufferSize; + } + + public static YuvPlaneInfo create(int uIndex, + int vIndex, + int width, + int uvRowStride, + int uvPixelStride, + int yBufferSize, + int uvBufferSize) { + return new YuvPlaneInfo(uIndex, vIndex, width, uvRowStride, uvPixelStride, yBufferSize, uvBufferSize); + } + + public int getYBufferSize() { + Log.v("YuvPlaneInfo", String.valueOf(yBufferSizeVar)); + return yBufferSizeVar; + } + + public int getUvBufferSize() { + Log.v("YuvPlaneInfo", String.valueOf(uvBufferSizeVar)); + return uvBufferSizeVar; + } + + public int getVIndex() { + return vIndexVar; + } + + public int getUIndex() { + return uIndexVar; + } + + public int getUvPixelStride() { + return uvPixelStrideVar; + } + + public int getYRowStride() { + return widthVar; + } + + + public int getUvRowStride() { + return uvRowStrideVar; + } + +} diff --git a/lite/examples/object_detection/android/app/src/main/AndroidManifest.xml b/lite/examples/object_detection/android/app/src/main/AndroidManifest.xml index daee37a1e81..e8c8caee587 100644 --- a/lite/examples/object_detection/android/app/src/main/AndroidManifest.xml +++ b/lite/examples/object_detection/android/app/src/main/AndroidManifest.xml @@ -17,7 +17,7 @@ android:theme="@style/AppTheme.ObjectDetection"> diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java index 8afe3d7d0af..b78ccb46024 100644 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java +++ b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java @@ -17,47 +17,52 @@ package org.tensorflow.lite.examples.detection; import android.Manifest; -import android.app.Fragment; -import android.content.Context; +import android.annotation.SuppressLint; import android.content.pm.PackageManager; -import android.hardware.Camera; -import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraManager; -import android.hardware.camera2.params.StreamConfigurationMap; -import android.media.Image; -import android.media.Image.Plane; -import android.media.ImageReader; -import android.media.ImageReader.OnImageAvailableListener; +import android.graphics.RectF; +import android.graphics.Typeface; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; -import android.os.Trace; -import androidx.appcompat.app.AppCompatActivity; -import androidx.appcompat.widget.SwitchCompat; -import androidx.appcompat.widget.Toolbar; +import android.os.SystemClock; +import android.util.Log; import android.util.Size; -import android.view.Surface; +import android.util.TypedValue; import android.view.View; import android.view.ViewTreeObserver; import android.view.WindowManager; -import android.widget.CompoundButton; import android.widget.ImageView; import android.widget.LinearLayout; -import android.widget.TextView; import android.widget.Toast; + import androidx.annotation.NonNull; +import androidx.appcompat.app.AppCompatActivity; +import androidx.camera.core.AspectRatio; +import androidx.camera.core.Camera; +import androidx.camera.core.CameraSelector; +import androidx.camera.core.ImageAnalysis; +import androidx.camera.core.Preview; +import androidx.camera.lifecycle.ProcessCameraProvider; +import androidx.core.content.ContextCompat; +import androidx.databinding.DataBindingUtil; + import com.google.android.material.bottomsheet.BottomSheetBehavior; -import java.nio.ByteBuffer; -import org.tensorflow.lite.examples.detection.env.ImageUtils; +import com.google.common.util.concurrent.ListenableFuture; + +import org.tensorflow.lite.examples.detection.databinding.TfeOdActivityCameraBinding; +import org.tensorflow.lite.examples.detection.env.BorderedText; import org.tensorflow.lite.examples.detection.env.Logger; +import org.tensorflow.lite.examples.detection.tflite.Detector; +import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel; +import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutionException; -public abstract class CameraActivity extends AppCompatActivity - implements OnImageAvailableListener, - Camera.PreviewCallback, - CompoundButton.OnCheckedChangeListener, - View.OnClickListener { +public class CameraActivity extends AppCompatActivity implements View.OnClickListener { private static final Logger LOGGER = new Logger(); private static final int PERMISSIONS_REQUEST = 1; @@ -65,26 +70,40 @@ public abstract class CameraActivity extends AppCompatActivity private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA; protected int previewWidth = 0; protected int previewHeight = 0; + + // To draw the bounding boxes private boolean debug = false; + private Handler handler; private HandlerThread handlerThread; - private boolean useCamera2API; + private boolean firstTimeStartModel = true; private boolean isProcessingFrame = false; - private byte[][] yuvBytes = new byte[3][]; - private int[] rgbBytes = null; - private int yRowStride; - private Runnable postInferenceCallback; - private Runnable imageConverter; private LinearLayout bottomSheetLayout; - private LinearLayout gestureLayout; private BottomSheetBehavior sheetBehavior; - protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView; + private static final int INPUT_SIZE = 300; + private static final boolean IS_QUANTIZED = true; + private static final String MODEL_FILE = "detect.tflite"; + private static final String LABELS_FILE = "labelmap.txt"; + + // Minimum detection confidence to track a detection. + private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f; + private static final Size DESIRED_ANALYSIS_SIZE = new Size(640, 480); + private static final float TEXT_SIZE_DIP = 10; + private long lastProcessingTimeMs; + protected ImageView bottomSheetArrowImageView; - private ImageView plusImageView, minusImageView; - private SwitchCompat apiSwitchCompat; - private TextView threadsTextView; + + private Integer sensorOrientation; + + private Detector detector; + + //Data Binding + private TfeOdActivityCameraBinding binding; + + private long timestamp = 0; + private MultiBoxTracker tracker; @Override protected void onCreate(final Bundle savedInstanceState) { @@ -92,61 +111,48 @@ protected void onCreate(final Bundle savedInstanceState) { super.onCreate(null); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); - setContentView(R.layout.tfe_od_activity_camera); - Toolbar toolbar = findViewById(R.id.toolbar); - setSupportActionBar(toolbar); - getSupportActionBar().setDisplayShowTitleEnabled(false); + binding = DataBindingUtil.setContentView(this, R.layout.tfe_od_activity_camera); if (hasPermission()) { - setFragment(); + // Start CameraX + startCamera(); } else { + // Requesting Permission for CameraX requestPermission(); } - threadsTextView = findViewById(R.id.threads); - plusImageView = findViewById(R.id.plus); - minusImageView = findViewById(R.id.minus); - apiSwitchCompat = findViewById(R.id.api_info_switch); bottomSheetLayout = findViewById(R.id.bottom_sheet_layout); - gestureLayout = findViewById(R.id.gesture_layout); sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout); bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow); - ViewTreeObserver vto = gestureLayout.getViewTreeObserver(); + // Controlling bottom modal sheet + ViewTreeObserver vto = binding.bottomSheetLayout.gestureLayout.getViewTreeObserver(); vto.addOnGlobalLayoutListener( new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) { - gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this); - } else { - gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this); - } - // int width = bottomSheetLayout.getMeasuredWidth(); - int height = gestureLayout.getMeasuredHeight(); - + binding.bottomSheetLayout.gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this); + int height = binding.bottomSheetLayout.gestureLayout.getMeasuredHeight(); sheetBehavior.setPeekHeight(height); } }); sheetBehavior.setHideable(false); - sheetBehavior.setBottomSheetCallback( + sheetBehavior.addBottomSheetCallback( new BottomSheetBehavior.BottomSheetCallback() { @Override public void onStateChanged(@NonNull View bottomSheet, int newState) { switch (newState) { case BottomSheetBehavior.STATE_HIDDEN: break; - case BottomSheetBehavior.STATE_EXPANDED: - { - bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down); - } - break; - case BottomSheetBehavior.STATE_COLLAPSED: - { - bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up); - } - break; + case BottomSheetBehavior.STATE_EXPANDED: { + bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down); + } + break; + case BottomSheetBehavior.STATE_COLLAPSED: { + bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up); + } + break; case BottomSheetBehavior.STATE_DRAGGING: break; case BottomSheetBehavior.STATE_SETTLING: @@ -156,139 +162,151 @@ public void onStateChanged(@NonNull View bottomSheet, int newState) { } @Override - public void onSlide(@NonNull View bottomSheet, float slideOffset) {} + public void onSlide(@NonNull View bottomSheet, float slideOffset) { + } }); - frameValueTextView = findViewById(R.id.frame_info); - cropValueTextView = findViewById(R.id.crop_info); - inferenceTimeTextView = findViewById(R.id.inference_info); - - apiSwitchCompat.setOnCheckedChangeListener(this); - - plusImageView.setOnClickListener(this); - minusImageView.setOnClickListener(this); - } - - protected int[] getRgbBytes() { - imageConverter.run(); - return rgbBytes; + binding.bottomSheetLayout.plus.setOnClickListener(this); + binding.bottomSheetLayout.minus.setOnClickListener(this); } - protected int getLuminanceStride() { - return yRowStride; - } + private void onStartCameraX(final int rotation) { + final float textSize = + TypedValue.applyDimension( + TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); + BorderedText borderedText = new BorderedText(textSize); + borderedText.setTypeface(Typeface.MONOSPACE); + Log.v("Camera Image Rotation", String.valueOf(rotation)); - protected byte[] getLuminance() { - return yuvBytes[0]; - } - - /** Callback for android.hardware.Camera API */ - @Override - public void onPreviewFrame(final byte[] bytes, final Camera camera) { - if (isProcessingFrame) { - LOGGER.w("Dropping frame!"); - return; - } + sensorOrientation = rotation; + previewWidth = DESIRED_ANALYSIS_SIZE.getWidth(); + previewHeight = DESIRED_ANALYSIS_SIZE.getHeight(); + tracker = new MultiBoxTracker(this); try { - // Initialize the storage bitmaps once when the resolution is known. - if (rgbBytes == null) { - Camera.Size previewSize = camera.getParameters().getPreviewSize(); - previewHeight = previewSize.height; - previewWidth = previewSize.width; - rgbBytes = new int[previewWidth * previewHeight]; - onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90); - } - } catch (final Exception e) { - LOGGER.e(e, "Exception!"); - return; + detector = TFLiteObjectDetectionAPIModel.create( + this, + MODEL_FILE, + LABELS_FILE, + INPUT_SIZE, + IS_QUANTIZED); + setUseNNAPI(true); + } catch (final IOException e) { + e.printStackTrace(); + LOGGER.e(e, "Exception initializing Detector!"); + Toast toast = Toast.makeText( + getApplicationContext(), "Detector could not be initialized", Toast.LENGTH_SHORT); + toast.show(); + finish(); } - isProcessingFrame = true; - yuvBytes[0] = bytes; - yRowStride = previewWidth; + LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation); + LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight); - imageConverter = - new Runnable() { - @Override - public void run() { - ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes); + binding.trackingOverlay.addCallback( + canvas -> { + tracker.draw(canvas); + if (isDebug()) { + tracker.drawDebug(canvas); } - }; + }); - postInferenceCallback = - new Runnable() { - @Override - public void run() { - camera.addCallbackBuffer(bytes); - isProcessingFrame = false; - } - }; - processImage(); + tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation); } - /** Callback for Camera2 API */ - @Override - public void onImageAvailable(final ImageReader reader) { - // We need wait until we have some size from onPreviewSizeChosen - if (previewWidth == 0 || previewHeight == 0) { - return; - } - if (rgbBytes == null) { - rgbBytes = new int[previewWidth * previewHeight]; - } - try { - final Image image = reader.acquireLatestImage(); + @SuppressLint("UnsafeOptInUsageError") + private void startCamera() { + ListenableFuture cameraProviderFuture = ProcessCameraProvider.getInstance(this); + + cameraProviderFuture.addListener(() -> { + try { + ProcessCameraProvider cameraProvider = cameraProviderFuture.get(); + Preview preview = new Preview.Builder() + .setTargetAspectRatio(AspectRatio.RATIO_4_3) + .build(); + + // Selecting the Camera here - Back Camera + CameraSelector cameraSelector = new CameraSelector.Builder() + .requireLensFacing(CameraSelector.LENS_FACING_BACK) + .build(); + + // Images are processed by passing an executor in which the image analysis is run + ImageAnalysis imageAnalysis = + new ImageAnalysis.Builder() + .setTargetAspectRatio(AspectRatio.RATIO_4_3) + .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST) + .build(); + + imageAnalysis.setAnalyzer(ContextCompat.getMainExecutor(this), image -> { + int rotationDegrees = image.getImageInfo().getRotationDegrees(); + Log.i("Rotation Degrees", String.valueOf(rotationDegrees)); + Log.i("Rotation preview", String.valueOf(binding.previewView.getDisplay().getRotation())); + + ++timestamp; + final long currTimestamp = timestamp; + + if (firstTimeStartModel) { + onStartCameraX(rotationDegrees); + firstTimeStartModel = false; + } - if (image == null) { - return; - } + if (!isProcessingFrame) { + runInBackground( + () -> { + if (detector != null) { + final long startTime = SystemClock.uptimeMillis(); + final List results = detector.recognizeImage(image.getImage(), sensorOrientation); + lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime; + LOGGER.e("Degrees: %s", results); + + final List mappedRecognitions = + new ArrayList<>(); + + for (final Detector.Recognition result : results) { + final RectF location = result.getLocation(); + if (location != null && result.getConfidence() >= MINIMUM_CONFIDENCE_TF_OD_API) { + result.setLocation(location); + mappedRecognitions.add(result); + } + } + + tracker.trackResults(mappedRecognitions, currTimestamp); + binding.trackingOverlay.postInvalidate(); + + runOnUiThread( + () -> { + showFrameInfo(DESIRED_ANALYSIS_SIZE.getWidth() + "x" + DESIRED_ANALYSIS_SIZE.getHeight()); + showCropInfo(INPUT_SIZE + "x" + INPUT_SIZE); + showInference(lastProcessingTimeMs + "ms"); + }); + } + image.close(); + isProcessingFrame = false; + }); + isProcessingFrame = true; + } + }); - if (isProcessingFrame) { - image.close(); - return; + // Connect the preview use case to the previewView + preview.setSurfaceProvider(binding.previewView.getSurfaceProvider()); + + // Attach use cases to the camera with the same lifecycle owner + if (cameraProvider != null) { + Camera camera = cameraProvider.bindToLifecycle( + this, + cameraSelector, + imageAnalysis, + preview); + } + + } catch (ExecutionException | InterruptedException e) { + LOGGER.d("cameraException", e.toString()); } - isProcessingFrame = true; - Trace.beginSection("imageAvailable"); - final Plane[] planes = image.getPlanes(); - fillBytes(planes, yuvBytes); - yRowStride = planes[0].getRowStride(); - final int uvRowStride = planes[1].getRowStride(); - final int uvPixelStride = planes[1].getPixelStride(); - - imageConverter = - new Runnable() { - @Override - public void run() { - ImageUtils.convertYUV420ToARGB8888( - yuvBytes[0], - yuvBytes[1], - yuvBytes[2], - previewWidth, - previewHeight, - yRowStride, - uvRowStride, - uvPixelStride, - rgbBytes); - } - }; - - postInferenceCallback = - new Runnable() { - @Override - public void run() { - image.close(); - isProcessingFrame = false; - } - }; + }, ContextCompat.getMainExecutor(this)); + } - processImage(); - } catch (final Exception e) { - LOGGER.e(e, "Exception!"); - Trace.endSection(); - return; - } - Trace.endSection(); + public boolean isDebug() { + return debug; } @Override @@ -347,7 +365,8 @@ public void onRequestPermissionsResult( super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == PERMISSIONS_REQUEST) { if (allPermissionsGranted(grantResults)) { - setFragment(); + // Start CameraX + startCamera(); } else { requestPermission(); } @@ -375,176 +394,63 @@ private void requestPermission() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) { Toast.makeText( - CameraActivity.this, - "Camera permission is required for this demo", - Toast.LENGTH_LONG) + CameraActivity.this, + "Camera permission is required for this demo", + Toast.LENGTH_LONG) .show(); - } - requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST); - } - } - - // Returns true if the device supports the required hardware level, or better. - private boolean isHardwareLevelSupported( - CameraCharacteristics characteristics, int requiredLevel) { - int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); - if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { - return requiredLevel == deviceLevel; - } - // deviceLevel is not LEGACY, can use numerical sort - return requiredLevel <= deviceLevel; - } - - private String chooseCamera() { - final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); - try { - for (final String cameraId : manager.getCameraIdList()) { - final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); - - // We don't use a front facing camera in this sample. - final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); - if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { - continue; - } - - final StreamConfigurationMap map = - characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); - - if (map == null) { - continue; - } - // Fallback to camera1 API for internal cameras that don't have full support. - // This should help with legacy situations where using the camera2 API causes - // distorted or otherwise broken previews. - useCamera2API = - (facing == CameraCharacteristics.LENS_FACING_EXTERNAL) - || isHardwareLevelSupported( - characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); - LOGGER.i("Camera API lv2?: %s", useCamera2API); - return cameraId; } - } catch (CameraAccessException e) { - LOGGER.e(e, "Not allowed to access camera"); + requestPermissions(new String[]{PERMISSION_CAMERA}, PERMISSIONS_REQUEST); } - - return null; } - protected void setFragment() { - String cameraId = chooseCamera(); - - Fragment fragment; - if (useCamera2API) { - CameraConnectionFragment camera2Fragment = - CameraConnectionFragment.newInstance( - new CameraConnectionFragment.ConnectionCallback() { - @Override - public void onPreviewSizeChosen(final Size size, final int rotation) { - previewHeight = size.getHeight(); - previewWidth = size.getWidth(); - CameraActivity.this.onPreviewSizeChosen(size, rotation); - } - }, - this, - getLayoutId(), - getDesiredPreviewFrameSize()); - - camera2Fragment.setCamera(cameraId); - fragment = camera2Fragment; - } else { - fragment = - new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize()); - } - - getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit(); - } - - protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) { - // Because of the variable row stride it's not possible to know in - // advance the actual necessary dimensions of the yuv planes. - for (int i = 0; i < planes.length; ++i) { - final ByteBuffer buffer = planes[i].getBuffer(); - if (yuvBytes[i] == null) { - LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity()); - yuvBytes[i] = new byte[buffer.capacity()]; - } - buffer.get(yuvBytes[i]); - } + protected void showFrameInfo(String frameInfo) { + binding.bottomSheetLayout.frameInfo.setText(frameInfo); } - public boolean isDebug() { - return debug; + protected void showCropInfo(String cropInfo) { + binding.bottomSheetLayout.cropInfo.setText(cropInfo); } - protected void readyForNextImage() { - if (postInferenceCallback != null) { - postInferenceCallback.run(); - } + protected void showInference(String inferenceTime) { + binding.bottomSheetLayout.inferenceInfo.setText(inferenceTime); } - protected int getScreenOrientation() { - switch (getWindowManager().getDefaultDisplay().getRotation()) { - case Surface.ROTATION_270: - return 270; - case Surface.ROTATION_180: - return 180; - case Surface.ROTATION_90: - return 90; - default: - return 0; - } + protected void setUseNNAPI(final boolean isChecked) { + runInBackground( + () -> { + try { + detector.setUseNNAPI(isChecked); + } catch (UnsupportedOperationException e) { + LOGGER.e(e, "Failed to set \"Use NNAPI\"."); + } + }); } - @Override - public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { - setUseNNAPI(isChecked); - if (isChecked) apiSwitchCompat.setText("NNAPI"); - else apiSwitchCompat.setText("TFLITE"); + private void setNumThreads(final int numThreads) { + runInBackground(() -> { + detector.setNumThreads(numThreads); + }); } @Override public void onClick(View v) { if (v.getId() == R.id.plus) { - String threads = threadsTextView.getText().toString().trim(); + String threads = binding.bottomSheetLayout.threads.getText().toString().trim(); int numThreads = Integer.parseInt(threads); if (numThreads >= 9) return; numThreads++; - threadsTextView.setText(String.valueOf(numThreads)); + binding.bottomSheetLayout.threads.setText(String.valueOf(numThreads)); setNumThreads(numThreads); } else if (v.getId() == R.id.minus) { - String threads = threadsTextView.getText().toString().trim(); + String threads = binding.bottomSheetLayout.threads.getText().toString().trim(); int numThreads = Integer.parseInt(threads); if (numThreads == 1) { return; } numThreads--; - threadsTextView.setText(String.valueOf(numThreads)); + binding.bottomSheetLayout.threads.setText(String.valueOf(numThreads)); setNumThreads(numThreads); } } - - protected void showFrameInfo(String frameInfo) { - frameValueTextView.setText(frameInfo); - } - - protected void showCropInfo(String cropInfo) { - cropValueTextView.setText(cropInfo); - } - - protected void showInference(String inferenceTime) { - inferenceTimeTextView.setText(inferenceTime); - } - - protected abstract void processImage(); - - protected abstract void onPreviewSizeChosen(final Size size, final int rotation); - - protected abstract int getLayoutId(); - - protected abstract Size getDesiredPreviewFrameSize(); - - protected abstract void setNumThreads(int numThreads); - - protected abstract void setUseNNAPI(boolean isChecked); -} +} \ No newline at end of file diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java deleted file mode 100644 index 641f3084bda..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java +++ /dev/null @@ -1,569 +0,0 @@ -/* - * Copyright 2019 The TensorFlow Authors. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.tensorflow.lite.examples.detection; - -import android.annotation.SuppressLint; -import android.app.Activity; -import android.app.AlertDialog; -import android.app.Dialog; -import android.app.DialogFragment; -import android.app.Fragment; -import android.content.Context; -import android.content.DialogInterface; -import android.content.res.Configuration; -import android.graphics.ImageFormat; -import android.graphics.Matrix; -import android.graphics.RectF; -import android.graphics.SurfaceTexture; -import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCaptureSession; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraDevice; -import android.hardware.camera2.CameraManager; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.CaptureResult; -import android.hardware.camera2.TotalCaptureResult; -import android.hardware.camera2.params.StreamConfigurationMap; -import android.media.ImageReader; -import android.media.ImageReader.OnImageAvailableListener; -import android.os.Bundle; -import android.os.Handler; -import android.os.HandlerThread; -import android.text.TextUtils; -import android.util.Size; -import android.util.SparseIntArray; -import android.view.LayoutInflater; -import android.view.Surface; -import android.view.TextureView; -import android.view.View; -import android.view.ViewGroup; -import android.widget.Toast; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; -import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView; -import org.tensorflow.lite.examples.detection.env.Logger; - -@SuppressLint("ValidFragment") -public class CameraConnectionFragment extends Fragment { - private static final Logger LOGGER = new Logger(); - - /** - * The camera preview size will be chosen to be the smallest frame by pixel size capable of - * containing a DESIRED_SIZE x DESIRED_SIZE square. - */ - private static final int MINIMUM_PREVIEW_SIZE = 320; - - /** Conversion from screen rotation to JPEG orientation. */ - private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); - - private static final String FRAGMENT_DIALOG = "dialog"; - - static { - ORIENTATIONS.append(Surface.ROTATION_0, 90); - ORIENTATIONS.append(Surface.ROTATION_90, 0); - ORIENTATIONS.append(Surface.ROTATION_180, 270); - ORIENTATIONS.append(Surface.ROTATION_270, 180); - } - - /** A {@link Semaphore} to prevent the app from exiting before closing the camera. */ - private final Semaphore cameraOpenCloseLock = new Semaphore(1); - /** A {@link OnImageAvailableListener} to receive frames as they are available. */ - private final OnImageAvailableListener imageListener; - /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */ - private final Size inputSize; - /** The layout identifier to inflate for this Fragment. */ - private final int layout; - - private final ConnectionCallback cameraConnectionCallback; - private final CameraCaptureSession.CaptureCallback captureCallback = - new CameraCaptureSession.CaptureCallback() { - @Override - public void onCaptureProgressed( - final CameraCaptureSession session, - final CaptureRequest request, - final CaptureResult partialResult) {} - - @Override - public void onCaptureCompleted( - final CameraCaptureSession session, - final CaptureRequest request, - final TotalCaptureResult result) {} - }; - /** ID of the current {@link CameraDevice}. */ - private String cameraId; - /** An {@link AutoFitTextureView} for camera preview. */ - private AutoFitTextureView textureView; - /** A {@link CameraCaptureSession } for camera preview. */ - private CameraCaptureSession captureSession; - /** A reference to the opened {@link CameraDevice}. */ - private CameraDevice cameraDevice; - /** The rotation in degrees of the camera sensor from the display. */ - private Integer sensorOrientation; - /** The {@link Size} of camera preview. */ - private Size previewSize; - /** An additional thread for running tasks that shouldn't block the UI. */ - private HandlerThread backgroundThread; - /** A {@link Handler} for running tasks in the background. */ - private Handler backgroundHandler; - /** An {@link ImageReader} that handles preview frame capture. */ - private ImageReader previewReader; - /** {@link CaptureRequest.Builder} for the camera preview */ - private CaptureRequest.Builder previewRequestBuilder; - /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */ - private CaptureRequest previewRequest; - /** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ - private final CameraDevice.StateCallback stateCallback = - new CameraDevice.StateCallback() { - @Override - public void onOpened(final CameraDevice cd) { - // This method is called when the camera is opened. We start camera preview here. - cameraOpenCloseLock.release(); - cameraDevice = cd; - createCameraPreviewSession(); - } - - @Override - public void onDisconnected(final CameraDevice cd) { - cameraOpenCloseLock.release(); - cd.close(); - cameraDevice = null; - } - - @Override - public void onError(final CameraDevice cd, final int error) { - cameraOpenCloseLock.release(); - cd.close(); - cameraDevice = null; - final Activity activity = getActivity(); - if (null != activity) { - activity.finish(); - } - } - }; - /** - * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link - * TextureView}. - */ - private final TextureView.SurfaceTextureListener surfaceTextureListener = - new TextureView.SurfaceTextureListener() { - @Override - public void onSurfaceTextureAvailable( - final SurfaceTexture texture, final int width, final int height) { - openCamera(width, height); - } - - @Override - public void onSurfaceTextureSizeChanged( - final SurfaceTexture texture, final int width, final int height) { - configureTransform(width, height); - } - - @Override - public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { - return true; - } - - @Override - public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} - }; - - private CameraConnectionFragment( - final ConnectionCallback connectionCallback, - final OnImageAvailableListener imageListener, - final int layout, - final Size inputSize) { - this.cameraConnectionCallback = connectionCallback; - this.imageListener = imageListener; - this.layout = layout; - this.inputSize = inputSize; - } - - /** - * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose - * width and height are at least as large as the minimum of both, or an exact match if possible. - * - * @param choices The list of sizes that the camera supports for the intended output class - * @param width The minimum desired width - * @param height The minimum desired height - * @return The optimal {@code Size}, or an arbitrary one if none were big enough - */ - protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { - final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE); - final Size desiredSize = new Size(width, height); - - // Collect the supported resolutions that are at least as big as the preview Surface - boolean exactSizeFound = false; - final List bigEnough = new ArrayList(); - final List tooSmall = new ArrayList(); - for (final Size option : choices) { - if (option.equals(desiredSize)) { - // Set the size but don't return yet so that remaining sizes will still be logged. - exactSizeFound = true; - } - - if (option.getHeight() >= minSize && option.getWidth() >= minSize) { - bigEnough.add(option); - } else { - tooSmall.add(option); - } - } - - LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize); - LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]"); - LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]"); - - if (exactSizeFound) { - LOGGER.i("Exact size match found."); - return desiredSize; - } - - // Pick the smallest of those, assuming we found any - if (bigEnough.size() > 0) { - final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); - LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight()); - return chosenSize; - } else { - LOGGER.e("Couldn't find any suitable preview size"); - return choices[0]; - } - } - - public static CameraConnectionFragment newInstance( - final ConnectionCallback callback, - final OnImageAvailableListener imageListener, - final int layout, - final Size inputSize) { - return new CameraConnectionFragment(callback, imageListener, layout, inputSize); - } - - /** - * Shows a {@link Toast} on the UI thread. - * - * @param text The message to show - */ - private void showToast(final String text) { - final Activity activity = getActivity(); - if (activity != null) { - activity.runOnUiThread( - new Runnable() { - @Override - public void run() { - Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); - } - }); - } - } - - @Override - public View onCreateView( - final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { - return inflater.inflate(layout, container, false); - } - - @Override - public void onViewCreated(final View view, final Bundle savedInstanceState) { - textureView = (AutoFitTextureView) view.findViewById(R.id.texture); - } - - @Override - public void onActivityCreated(final Bundle savedInstanceState) { - super.onActivityCreated(savedInstanceState); - } - - @Override - public void onResume() { - super.onResume(); - startBackgroundThread(); - - // When the screen is turned off and turned back on, the SurfaceTexture is already - // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open - // a camera and start preview from here (otherwise, we wait until the surface is ready in - // the SurfaceTextureListener). - if (textureView.isAvailable()) { - openCamera(textureView.getWidth(), textureView.getHeight()); - } else { - textureView.setSurfaceTextureListener(surfaceTextureListener); - } - } - - @Override - public void onPause() { - closeCamera(); - stopBackgroundThread(); - super.onPause(); - } - - public void setCamera(String cameraId) { - this.cameraId = cameraId; - } - - /** Sets up member variables related to camera. */ - private void setUpCameraOutputs() { - final Activity activity = getActivity(); - final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); - try { - final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); - - final StreamConfigurationMap map = - characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); - - sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); - - // Danger, W.R.! Attempting to use too large a preview size could exceed the camera - // bus' bandwidth limitation, resulting in gorgeous previews but the storage of - // garbage capture data. - previewSize = - chooseOptimalSize( - map.getOutputSizes(SurfaceTexture.class), - inputSize.getWidth(), - inputSize.getHeight()); - - // We fit the aspect ratio of TextureView to the size of preview we picked. - final int orientation = getResources().getConfiguration().orientation; - if (orientation == Configuration.ORIENTATION_LANDSCAPE) { - textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); - } else { - textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); - } - } catch (final CameraAccessException e) { - LOGGER.e(e, "Exception!"); - } catch (final NullPointerException e) { - // Currently an NPE is thrown when the Camera2API is used but not supported on the - // device this code runs. - ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)) - .show(getChildFragmentManager(), FRAGMENT_DIALOG); - throw new IllegalStateException(getString(R.string.tfe_od_camera_error)); - } - - cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation); - } - - /** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */ - private void openCamera(final int width, final int height) { - setUpCameraOutputs(); - configureTransform(width, height); - final Activity activity = getActivity(); - final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); - try { - if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { - throw new RuntimeException("Time out waiting to lock camera opening."); - } - manager.openCamera(cameraId, stateCallback, backgroundHandler); - } catch (final CameraAccessException e) { - LOGGER.e(e, "Exception!"); - } catch (final InterruptedException e) { - throw new RuntimeException("Interrupted while trying to lock camera opening.", e); - } - } - - /** Closes the current {@link CameraDevice}. */ - private void closeCamera() { - try { - cameraOpenCloseLock.acquire(); - if (null != captureSession) { - captureSession.close(); - captureSession = null; - } - if (null != cameraDevice) { - cameraDevice.close(); - cameraDevice = null; - } - if (null != previewReader) { - previewReader.close(); - previewReader = null; - } - } catch (final InterruptedException e) { - throw new RuntimeException("Interrupted while trying to lock camera closing.", e); - } finally { - cameraOpenCloseLock.release(); - } - } - - /** Starts a background thread and its {@link Handler}. */ - private void startBackgroundThread() { - backgroundThread = new HandlerThread("ImageListener"); - backgroundThread.start(); - backgroundHandler = new Handler(backgroundThread.getLooper()); - } - - /** Stops the background thread and its {@link Handler}. */ - private void stopBackgroundThread() { - backgroundThread.quitSafely(); - try { - backgroundThread.join(); - backgroundThread = null; - backgroundHandler = null; - } catch (final InterruptedException e) { - LOGGER.e(e, "Exception!"); - } - } - - /** Creates a new {@link CameraCaptureSession} for camera preview. */ - private void createCameraPreviewSession() { - try { - final SurfaceTexture texture = textureView.getSurfaceTexture(); - assert texture != null; - - // We configure the size of default buffer to be the size of camera preview we want. - texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); - - // This is the output Surface we need to start preview. - final Surface surface = new Surface(texture); - - // We set up a CaptureRequest.Builder with the output Surface. - previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); - previewRequestBuilder.addTarget(surface); - - LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight()); - - // Create the reader for the preview frames. - previewReader = - ImageReader.newInstance( - previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); - - previewReader.setOnImageAvailableListener(imageListener, backgroundHandler); - previewRequestBuilder.addTarget(previewReader.getSurface()); - - // Here, we create a CameraCaptureSession for camera preview. - cameraDevice.createCaptureSession( - Arrays.asList(surface, previewReader.getSurface()), - new CameraCaptureSession.StateCallback() { - - @Override - public void onConfigured(final CameraCaptureSession cameraCaptureSession) { - // The camera is already closed - if (null == cameraDevice) { - return; - } - - // When the session is ready, we start displaying the preview. - captureSession = cameraCaptureSession; - try { - // Auto focus should be continuous for camera preview. - previewRequestBuilder.set( - CaptureRequest.CONTROL_AF_MODE, - CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); - // Flash is automatically enabled when necessary. - previewRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); - - // Finally, we start displaying the camera preview. - previewRequest = previewRequestBuilder.build(); - captureSession.setRepeatingRequest( - previewRequest, captureCallback, backgroundHandler); - } catch (final CameraAccessException e) { - LOGGER.e(e, "Exception!"); - } - } - - @Override - public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) { - showToast("Failed"); - } - }, - null); - } catch (final CameraAccessException e) { - LOGGER.e(e, "Exception!"); - } - } - - /** - * Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be - * called after the camera preview size is determined in setUpCameraOutputs and also the size of - * `mTextureView` is fixed. - * - * @param viewWidth The width of `mTextureView` - * @param viewHeight The height of `mTextureView` - */ - private void configureTransform(final int viewWidth, final int viewHeight) { - final Activity activity = getActivity(); - if (null == textureView || null == previewSize || null == activity) { - return; - } - final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); - final Matrix matrix = new Matrix(); - final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); - final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth()); - final float centerX = viewRect.centerX(); - final float centerY = viewRect.centerY(); - if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { - bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); - matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); - final float scale = - Math.max( - (float) viewHeight / previewSize.getHeight(), - (float) viewWidth / previewSize.getWidth()); - matrix.postScale(scale, scale, centerX, centerY); - matrix.postRotate(90 * (rotation - 2), centerX, centerY); - } else if (Surface.ROTATION_180 == rotation) { - matrix.postRotate(180, centerX, centerY); - } - textureView.setTransform(matrix); - } - - /** - * Callback for Activities to use to initialize their data once the selected preview size is - * known. - */ - public interface ConnectionCallback { - void onPreviewSizeChosen(Size size, int cameraRotation); - } - - /** Compares two {@code Size}s based on their areas. */ - static class CompareSizesByArea implements Comparator { - @Override - public int compare(final Size lhs, final Size rhs) { - // We cast here to ensure the multiplications won't overflow - return Long.signum( - (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); - } - } - - /** Shows an error message dialog. */ - public static class ErrorDialog extends DialogFragment { - private static final String ARG_MESSAGE = "message"; - - public static ErrorDialog newInstance(final String message) { - final ErrorDialog dialog = new ErrorDialog(); - final Bundle args = new Bundle(); - args.putString(ARG_MESSAGE, message); - dialog.setArguments(args); - return dialog; - } - - @Override - public Dialog onCreateDialog(final Bundle savedInstanceState) { - final Activity activity = getActivity(); - return new AlertDialog.Builder(activity) - .setMessage(getArguments().getString(ARG_MESSAGE)) - .setPositiveButton( - android.R.string.ok, - new DialogInterface.OnClickListener() { - @Override - public void onClick(final DialogInterface dialogInterface, final int i) { - activity.finish(); - } - }) - .create(); - } - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java deleted file mode 100644 index 9928cd366d7..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java +++ /dev/null @@ -1,267 +0,0 @@ -/* - * Copyright 2019 The TensorFlow Authors. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.tensorflow.lite.examples.detection; - -import android.graphics.Bitmap; -import android.graphics.Bitmap.Config; -import android.graphics.Canvas; -import android.graphics.Color; -import android.graphics.Matrix; -import android.graphics.Paint; -import android.graphics.Paint.Style; -import android.graphics.RectF; -import android.graphics.Typeface; -import android.media.ImageReader.OnImageAvailableListener; -import android.os.SystemClock; -import android.util.Size; -import android.util.TypedValue; -import android.widget.Toast; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import org.tensorflow.lite.examples.detection.customview.OverlayView; -import org.tensorflow.lite.examples.detection.customview.OverlayView.DrawCallback; -import org.tensorflow.lite.examples.detection.env.BorderedText; -import org.tensorflow.lite.examples.detection.env.ImageUtils; -import org.tensorflow.lite.examples.detection.env.Logger; -import org.tensorflow.lite.examples.detection.tflite.Detector; -import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel; -import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker; - -/** - * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track - * objects. - */ -public class DetectorActivity extends CameraActivity implements OnImageAvailableListener { - private static final Logger LOGGER = new Logger(); - - // Configuration values for the prepackaged SSD model. - private static final int TF_OD_API_INPUT_SIZE = 300; - private static final boolean TF_OD_API_IS_QUANTIZED = true; - private static final String TF_OD_API_MODEL_FILE = "detect.tflite"; - private static final String TF_OD_API_LABELS_FILE = "labelmap.txt"; - private static final DetectorMode MODE = DetectorMode.TF_OD_API; - // Minimum detection confidence to track a detection. - private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f; - private static final boolean MAINTAIN_ASPECT = false; - private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480); - private static final boolean SAVE_PREVIEW_BITMAP = false; - private static final float TEXT_SIZE_DIP = 10; - OverlayView trackingOverlay; - private Integer sensorOrientation; - - private Detector detector; - - private long lastProcessingTimeMs; - private Bitmap rgbFrameBitmap = null; - private Bitmap croppedBitmap = null; - private Bitmap cropCopyBitmap = null; - - private boolean computingDetection = false; - - private long timestamp = 0; - - private Matrix frameToCropTransform; - private Matrix cropToFrameTransform; - - private MultiBoxTracker tracker; - - private BorderedText borderedText; - - @Override - public void onPreviewSizeChosen(final Size size, final int rotation) { - final float textSizePx = - TypedValue.applyDimension( - TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); - borderedText = new BorderedText(textSizePx); - borderedText.setTypeface(Typeface.MONOSPACE); - - tracker = new MultiBoxTracker(this); - - int cropSize = TF_OD_API_INPUT_SIZE; - - try { - detector = - TFLiteObjectDetectionAPIModel.create( - this, - TF_OD_API_MODEL_FILE, - TF_OD_API_LABELS_FILE, - TF_OD_API_INPUT_SIZE, - TF_OD_API_IS_QUANTIZED); - cropSize = TF_OD_API_INPUT_SIZE; - } catch (final IOException e) { - e.printStackTrace(); - LOGGER.e(e, "Exception initializing Detector!"); - Toast toast = - Toast.makeText( - getApplicationContext(), "Detector could not be initialized", Toast.LENGTH_SHORT); - toast.show(); - finish(); - } - - previewWidth = size.getWidth(); - previewHeight = size.getHeight(); - - sensorOrientation = rotation - getScreenOrientation(); - LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation); - - LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight); - rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888); - croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); - - frameToCropTransform = - ImageUtils.getTransformationMatrix( - previewWidth, previewHeight, - cropSize, cropSize, - sensorOrientation, MAINTAIN_ASPECT); - - cropToFrameTransform = new Matrix(); - frameToCropTransform.invert(cropToFrameTransform); - - trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay); - trackingOverlay.addCallback( - new DrawCallback() { - @Override - public void drawCallback(final Canvas canvas) { - tracker.draw(canvas); - if (isDebug()) { - tracker.drawDebug(canvas); - } - } - }); - - tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation); - } - - @Override - protected void processImage() { - ++timestamp; - final long currTimestamp = timestamp; - trackingOverlay.postInvalidate(); - - // No mutex needed as this method is not reentrant. - if (computingDetection) { - readyForNextImage(); - return; - } - computingDetection = true; - LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread."); - - rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight); - - readyForNextImage(); - - final Canvas canvas = new Canvas(croppedBitmap); - canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null); - // For examining the actual TF input. - if (SAVE_PREVIEW_BITMAP) { - ImageUtils.saveBitmap(croppedBitmap); - } - - runInBackground( - new Runnable() { - @Override - public void run() { - LOGGER.i("Running detection on image " + currTimestamp); - final long startTime = SystemClock.uptimeMillis(); - final List results = detector.recognizeImage(croppedBitmap); - lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime; - - cropCopyBitmap = Bitmap.createBitmap(croppedBitmap); - final Canvas canvas = new Canvas(cropCopyBitmap); - final Paint paint = new Paint(); - paint.setColor(Color.RED); - paint.setStyle(Style.STROKE); - paint.setStrokeWidth(2.0f); - - float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API; - switch (MODE) { - case TF_OD_API: - minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API; - break; - } - - final List mappedRecognitions = - new ArrayList(); - - for (final Detector.Recognition result : results) { - final RectF location = result.getLocation(); - if (location != null && result.getConfidence() >= minimumConfidence) { - canvas.drawRect(location, paint); - - cropToFrameTransform.mapRect(location); - - result.setLocation(location); - mappedRecognitions.add(result); - } - } - - tracker.trackResults(mappedRecognitions, currTimestamp); - trackingOverlay.postInvalidate(); - - computingDetection = false; - - runOnUiThread( - new Runnable() { - @Override - public void run() { - showFrameInfo(previewWidth + "x" + previewHeight); - showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight()); - showInference(lastProcessingTimeMs + "ms"); - } - }); - } - }); - } - - @Override - protected int getLayoutId() { - return R.layout.tfe_od_camera_connection_fragment_tracking; - } - - @Override - protected Size getDesiredPreviewFrameSize() { - return DESIRED_PREVIEW_SIZE; - } - - // Which detection model to use: by default uses Tensorflow Object Detection API frozen - // checkpoints. - private enum DetectorMode { - TF_OD_API; - } - - @Override - protected void setUseNNAPI(final boolean isChecked) { - runInBackground( - () -> { - try { - detector.setUseNNAPI(isChecked); - } catch (UnsupportedOperationException e) { - LOGGER.e(e, "Failed to set \"Use NNAPI\"."); - runOnUiThread( - () -> { - Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show(); - }); - } - }); - } - - @Override - protected void setNumThreads(final int numThreads) { - runInBackground(() -> detector.setNumThreads(numThreads)); - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java deleted file mode 100644 index 2fa3417459b..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java +++ /dev/null @@ -1,205 +0,0 @@ -package org.tensorflow.lite.examples.detection; - -/* - * Copyright 2019 The TensorFlow Authors. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import android.app.Fragment; -import android.graphics.SurfaceTexture; -import android.hardware.Camera; -import android.hardware.Camera.CameraInfo; -import android.os.Bundle; -import android.os.Handler; -import android.os.HandlerThread; -import android.util.Size; -import android.util.SparseIntArray; -import android.view.LayoutInflater; -import android.view.Surface; -import android.view.TextureView; -import android.view.View; -import android.view.ViewGroup; -import java.io.IOException; -import java.util.List; -import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView; -import org.tensorflow.lite.examples.detection.env.ImageUtils; -import org.tensorflow.lite.examples.detection.env.Logger; - -public class LegacyCameraConnectionFragment extends Fragment { - private static final Logger LOGGER = new Logger(); - /** Conversion from screen rotation to JPEG orientation. */ - private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); - - static { - ORIENTATIONS.append(Surface.ROTATION_0, 90); - ORIENTATIONS.append(Surface.ROTATION_90, 0); - ORIENTATIONS.append(Surface.ROTATION_180, 270); - ORIENTATIONS.append(Surface.ROTATION_270, 180); - } - - private Camera camera; - private Camera.PreviewCallback imageListener; - private Size desiredSize; - /** The layout identifier to inflate for this Fragment. */ - private int layout; - /** An {@link AutoFitTextureView} for camera preview. */ - private AutoFitTextureView textureView; - private SurfaceTexture availableSurfaceTexture = null; - - /** - * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link - * TextureView}. - */ - private final TextureView.SurfaceTextureListener surfaceTextureListener = - new TextureView.SurfaceTextureListener() { - @Override - public void onSurfaceTextureAvailable( - final SurfaceTexture texture, final int width, final int height) { - availableSurfaceTexture = texture; - startCamera(); - } - - @Override - public void onSurfaceTextureSizeChanged( - final SurfaceTexture texture, final int width, final int height) {} - - @Override - public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { - return true; - } - - @Override - public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} - }; - /** An additional thread for running tasks that shouldn't block the UI. */ - private HandlerThread backgroundThread; - - public LegacyCameraConnectionFragment( - final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) { - this.imageListener = imageListener; - this.layout = layout; - this.desiredSize = desiredSize; - } - - @Override - public View onCreateView( - final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { - return inflater.inflate(layout, container, false); - } - - @Override - public void onViewCreated(final View view, final Bundle savedInstanceState) { - textureView = (AutoFitTextureView) view.findViewById(R.id.texture); - } - - @Override - public void onActivityCreated(final Bundle savedInstanceState) { - super.onActivityCreated(savedInstanceState); - } - - @Override - public void onResume() { - super.onResume(); - startBackgroundThread(); - // When the screen is turned off and turned back on, the SurfaceTexture is already - // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open - // a camera and start preview from here (otherwise, we wait until the surface is ready in - // the SurfaceTextureListener). - - if (textureView.isAvailable()) { - startCamera(); - } else { - textureView.setSurfaceTextureListener(surfaceTextureListener); - } - } - - @Override - public void onPause() { - stopCamera(); - stopBackgroundThread(); - super.onPause(); - } - - /** Starts a background thread and its {@link Handler}. */ - private void startBackgroundThread() { - backgroundThread = new HandlerThread("CameraBackground"); - backgroundThread.start(); - } - - /** Stops the background thread and its {@link Handler}. */ - private void stopBackgroundThread() { - backgroundThread.quitSafely(); - try { - backgroundThread.join(); - backgroundThread = null; - } catch (final InterruptedException e) { - LOGGER.e(e, "Exception!"); - } - } - - private void startCamera() { - int index = getCameraId(); - camera = Camera.open(index); - - try { - Camera.Parameters parameters = camera.getParameters(); - List focusModes = parameters.getSupportedFocusModes(); - if (focusModes != null - && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { - parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); - } - List cameraSizes = parameters.getSupportedPreviewSizes(); - Size[] sizes = new Size[cameraSizes.size()]; - int i = 0; - for (Camera.Size size : cameraSizes) { - sizes[i++] = new Size(size.width, size.height); - } - Size previewSize = - CameraConnectionFragment.chooseOptimalSize( - sizes, desiredSize.getWidth(), desiredSize.getHeight()); - parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); - camera.setDisplayOrientation(90); - camera.setParameters(parameters); - camera.setPreviewTexture(availableSurfaceTexture); - } catch (IOException exception) { - camera.release(); - } - - camera.setPreviewCallbackWithBuffer(imageListener); - Camera.Size s = camera.getParameters().getPreviewSize(); - camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]); - - textureView.setAspectRatio(s.height, s.width); - - camera.startPreview(); - } - - protected void stopCamera() { - if (camera != null) { - camera.stopPreview(); - camera.setPreviewCallback(null); - camera.release(); - camera = null; - } - } - - private int getCameraId() { - CameraInfo ci = new CameraInfo(); - for (int i = 0; i < Camera.getNumberOfCameras(); i++) { - Camera.getCameraInfo(i, ci); - if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i; - } - return -1; // No camera found - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java deleted file mode 100644 index 8f41eb71336..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2019 The TensorFlow Authors. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.tensorflow.lite.examples.detection.customview; - -import android.content.Context; -import android.util.AttributeSet; -import android.view.TextureView; - -/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */ -public class AutoFitTextureView extends TextureView { - private int ratioWidth = 0; - private int ratioHeight = 0; - - public AutoFitTextureView(final Context context) { - this(context, null); - } - - public AutoFitTextureView(final Context context, final AttributeSet attrs) { - this(context, attrs, 0); - } - - public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) { - super(context, attrs, defStyle); - } - - /** - * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio - * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is, - * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. - * - * @param width Relative horizontal size - * @param height Relative vertical size - */ - public void setAspectRatio(final int width, final int height) { - if (width < 0 || height < 0) { - throw new IllegalArgumentException("Size cannot be negative."); - } - ratioWidth = width; - ratioHeight = height; - requestLayout(); - } - - @Override - protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, heightMeasureSpec); - final int width = MeasureSpec.getSize(widthMeasureSpec); - final int height = MeasureSpec.getSize(heightMeasureSpec); - if (0 == ratioWidth || 0 == ratioHeight) { - setMeasuredDimension(width, height); - } else { - if (width < height * ratioWidth / ratioHeight) { - setMeasuredDimension(width, width * ratioHeight / ratioWidth); - } else { - setMeasuredDimension(height * ratioWidth / ratioHeight, height); - } - } - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java deleted file mode 100644 index ac78e758993..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java +++ /dev/null @@ -1,67 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -package org.tensorflow.lite.examples.detection.customview; - -import android.content.Context; -import android.graphics.Canvas; -import android.graphics.Paint; -import android.util.AttributeSet; -import android.util.TypedValue; -import android.view.View; -import java.util.List; -import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition; - -public class RecognitionScoreView extends View implements ResultsView { - private static final float TEXT_SIZE_DIP = 14; - private final float textSizePx; - private final Paint fgPaint; - private final Paint bgPaint; - private List results; - - public RecognitionScoreView(final Context context, final AttributeSet set) { - super(context, set); - - textSizePx = - TypedValue.applyDimension( - TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); - fgPaint = new Paint(); - fgPaint.setTextSize(textSizePx); - - bgPaint = new Paint(); - bgPaint.setColor(0xcc4285f4); - } - - @Override - public void setResults(final List results) { - this.results = results; - postInvalidate(); - } - - @Override - public void onDraw(final Canvas canvas) { - final int x = 10; - int y = (int) (fgPaint.getTextSize() * 1.5f); - - canvas.drawPaint(bgPaint); - - if (results != null) { - for (final Recognition recog : results) { - canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint); - y += (int) (fgPaint.getTextSize() * 1.5f); - } - } - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java deleted file mode 100644 index bd28b71bdf9..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java +++ /dev/null @@ -1,23 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -package org.tensorflow.lite.examples.detection.customview; - -import java.util.List; -import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition; - -public interface ResultsView { - public void setResults(final List results); -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java index 006c4c7847e..73aab69cfc6 100644 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java +++ b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java @@ -22,9 +22,12 @@ import android.graphics.Paint.Style; import android.graphics.Rect; import android.graphics.Typeface; + import java.util.Vector; -/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */ +/** + * A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. + */ public class BorderedText { private final Paint interiorPaint; private final Paint exteriorPaint; @@ -47,7 +50,7 @@ public BorderedText(final float textSize) { * * @param interiorColor the interior text color * @param exteriorColor the exterior text color - * @param textSize text size in pixels + * @param textSize text size in pixels */ public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) { interiorPaint = new Paint(); @@ -125,4 +128,4 @@ public void setTextAlign(final Align align) { interiorPaint.setTextAlign(align); exteriorPaint.setTextAlign(align); } -} +} \ No newline at end of file diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java deleted file mode 100644 index df7b0999a80..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java +++ /dev/null @@ -1,219 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -package org.tensorflow.lite.examples.detection.env; - -import android.graphics.Bitmap; -import android.graphics.Matrix; -import android.os.Environment; -import java.io.File; -import java.io.FileOutputStream; - -/** Utility class for manipulating images. */ -public class ImageUtils { - // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges - // are normalized to eight bits. - static final int kMaxChannelValue = 262143; - - @SuppressWarnings("unused") - private static final Logger LOGGER = new Logger(); - - /** - * Utility method to compute the allocated size in bytes of a YUV420SP image of the given - * dimensions. - */ - public static int getYUVByteSize(final int width, final int height) { - // The luminance plane requires 1 byte per pixel. - final int ySize = width * height; - - // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. - // Each 2x2 block takes 2 bytes to encode, one each for U and V. - final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; - - return ySize + uvSize; - } - - /** - * Saves a Bitmap object to disk for analysis. - * - * @param bitmap The bitmap to save. - */ - public static void saveBitmap(final Bitmap bitmap) { - saveBitmap(bitmap, "preview.png"); - } - - /** - * Saves a Bitmap object to disk for analysis. - * - * @param bitmap The bitmap to save. - * @param filename The location to save the bitmap to. - */ - public static void saveBitmap(final Bitmap bitmap, final String filename) { - final String root = - Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; - LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); - final File myDir = new File(root); - - if (!myDir.mkdirs()) { - LOGGER.i("Make dir failed"); - } - - final String fname = filename; - final File file = new File(myDir, fname); - if (file.exists()) { - file.delete(); - } - try { - final FileOutputStream out = new FileOutputStream(file); - bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); - out.flush(); - out.close(); - } catch (final Exception e) { - LOGGER.e(e, "Exception!"); - } - } - - public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) { - final int frameSize = width * height; - for (int j = 0, yp = 0; j < height; j++) { - int uvp = frameSize + (j >> 1) * width; - int u = 0; - int v = 0; - - for (int i = 0; i < width; i++, yp++) { - int y = 0xff & input[yp]; - if ((i & 1) == 0) { - v = 0xff & input[uvp++]; - u = 0xff & input[uvp++]; - } - - output[yp] = YUV2RGB(y, u, v); - } - } - } - - private static int YUV2RGB(int y, int u, int v) { - // Adjust and check YUV values - y = (y - 16) < 0 ? 0 : (y - 16); - u -= 128; - v -= 128; - - // This is the floating point equivalent. We do the conversion in integer - // because some Android devices do not have floating point in hardware. - // nR = (int)(1.164 * nY + 2.018 * nU); - // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); - // nB = (int)(1.164 * nY + 1.596 * nV); - int y1192 = 1192 * y; - int r = (y1192 + 1634 * v); - int g = (y1192 - 833 * v - 400 * u); - int b = (y1192 + 2066 * u); - - // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ] - r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r); - g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g); - b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b); - - return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); - } - - public static void convertYUV420ToARGB8888( - byte[] yData, - byte[] uData, - byte[] vData, - int width, - int height, - int yRowStride, - int uvRowStride, - int uvPixelStride, - int[] out) { - int yp = 0; - for (int j = 0; j < height; j++) { - int pY = yRowStride * j; - int pUV = uvRowStride * (j >> 1); - - for (int i = 0; i < width; i++) { - int uv_offset = pUV + (i >> 1) * uvPixelStride; - - out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]); - } - } - } - - /** - * Returns a transformation matrix from one reference frame into another. Handles cropping (if - * maintaining aspect ratio is desired) and rotation. - * - * @param srcWidth Width of source frame. - * @param srcHeight Height of source frame. - * @param dstWidth Width of destination frame. - * @param dstHeight Height of destination frame. - * @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple - * of 90. - * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, - * cropping the image if necessary. - * @return The transformation fulfilling the desired requirements. - */ - public static Matrix getTransformationMatrix( - final int srcWidth, - final int srcHeight, - final int dstWidth, - final int dstHeight, - final int applyRotation, - final boolean maintainAspectRatio) { - final Matrix matrix = new Matrix(); - - if (applyRotation != 0) { - if (applyRotation % 90 != 0) { - LOGGER.w("Rotation of %d % 90 != 0", applyRotation); - } - - // Translate so center of image is at origin. - matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f); - - // Rotate around origin. - matrix.postRotate(applyRotation); - } - - // Account for the already applied rotation, if any, and then determine how - // much scaling is needed for each axis. - final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; - - final int inWidth = transpose ? srcHeight : srcWidth; - final int inHeight = transpose ? srcWidth : srcHeight; - - // Apply scaling if necessary. - if (inWidth != dstWidth || inHeight != dstHeight) { - final float scaleFactorX = dstWidth / (float) inWidth; - final float scaleFactorY = dstHeight / (float) inHeight; - - if (maintainAspectRatio) { - // Scale by minimum factor so that dst is filled completely while - // maintaining the aspect ratio. Some image may fall off the edge. - final float scaleFactor = Math.max(scaleFactorX, scaleFactorY); - matrix.postScale(scaleFactor, scaleFactor); - } else { - // Scale exactly to fill dst from src. - matrix.postScale(scaleFactorX, scaleFactorY); - } - } - - if (applyRotation != 0) { - // Translate back from origin centered reference to destination frame. - matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); - } - - return matrix; - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java index 9dc05f4d1bd..a61fb6575f4 100644 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java +++ b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java @@ -16,10 +16,13 @@ package org.tensorflow.lite.examples.detection.env; import android.util.Log; + import java.util.HashSet; import java.util.Set; -/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */ +/** + * Wrapper for the platform log function, allows convenient message prefixing and log disabling. + */ public final class Logger { private static final String DEFAULT_TAG = "tensorflow"; private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG; @@ -60,12 +63,12 @@ public Logger(final String messagePrefix) { * Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to * *
null
- * + *

* , the caller's class name is used as the prefix. * - * @param tag identifies the source of a log message. + * @param tag identifies the source of a log message. * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is - * being used + * being used */ public Logger(final String tag, final String messagePrefix) { this.tag = tag; @@ -73,12 +76,16 @@ public Logger(final String tag, final String messagePrefix) { this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix; } - /** Creates a Logger using the caller's class name as the message prefix. */ + /** + * Creates a Logger using the caller's class name as the message prefix. + */ public Logger() { this(DEFAULT_TAG, null); } - /** Creates a Logger using the caller's class name as the message prefix. */ + /** + * Creates a Logger using the caller's class name as the message prefix. + */ public Logger(final int minLogLevel) { this(DEFAULT_TAG, null); this.minLogLevel = minLogLevel; diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java deleted file mode 100644 index e3f71e0e03c..00000000000 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java +++ /dev/null @@ -1,142 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -package org.tensorflow.lite.examples.detection.env; - -import android.graphics.Bitmap; -import android.text.TextUtils; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; - -/** Size class independent of a Camera object. */ -public class Size implements Comparable, Serializable { - - // 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when - // upgrading. - public static final long serialVersionUID = 7689808733290872361L; - - public final int width; - public final int height; - - public Size(final int width, final int height) { - this.width = width; - this.height = height; - } - - public Size(final Bitmap bmp) { - this.width = bmp.getWidth(); - this.height = bmp.getHeight(); - } - - /** - * Rotate a size by the given number of degrees. - * - * @param size Size to rotate. - * @param rotation Degrees {0, 90, 180, 270} to rotate the size. - * @return Rotated size. - */ - public static Size getRotatedSize(final Size size, final int rotation) { - if (rotation % 180 != 0) { - // The phone is portrait, therefore the camera is sideways and frame should be rotated. - return new Size(size.height, size.width); - } - return size; - } - - public static Size parseFromString(String sizeString) { - if (TextUtils.isEmpty(sizeString)) { - return null; - } - - sizeString = sizeString.trim(); - - // The expected format is "x". - final String[] components = sizeString.split("x"); - if (components.length == 2) { - try { - final int width = Integer.parseInt(components[0]); - final int height = Integer.parseInt(components[1]); - return new Size(width, height); - } catch (final NumberFormatException e) { - return null; - } - } else { - return null; - } - } - - public static List sizeStringToList(final String sizes) { - final List sizeList = new ArrayList(); - if (sizes != null) { - final String[] pairs = sizes.split(","); - for (final String pair : pairs) { - final Size size = Size.parseFromString(pair); - if (size != null) { - sizeList.add(size); - } - } - } - return sizeList; - } - - public static String sizeListToString(final List sizes) { - String sizesString = ""; - if (sizes != null && sizes.size() > 0) { - sizesString = sizes.get(0).toString(); - for (int i = 1; i < sizes.size(); i++) { - sizesString += "," + sizes.get(i).toString(); - } - } - return sizesString; - } - - public static final String dimensionsAsString(final int width, final int height) { - return width + "x" + height; - } - - public final float aspectRatio() { - return (float) width / (float) height; - } - - @Override - public int compareTo(final Size other) { - return width * height - other.width * other.height; - } - - @Override - public boolean equals(final Object other) { - if (other == null) { - return false; - } - - if (!(other instanceof Size)) { - return false; - } - - final Size otherSize = (Size) other; - return (width == otherSize.width && height == otherSize.height); - } - - @Override - public int hashCode() { - return width * 32713 + height; - } - - @Override - public String toString() { - return dimensionsAsString(width, height); - } -} diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java index 4af5f54af4c..c5977a5ba07 100644 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java +++ b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java @@ -27,41 +27,46 @@ import android.text.TextUtils; import android.util.Pair; import android.util.TypedValue; + import java.util.LinkedList; import java.util.List; import java.util.Queue; + import org.tensorflow.lite.examples.detection.env.BorderedText; -import org.tensorflow.lite.examples.detection.env.ImageUtils; import org.tensorflow.lite.examples.detection.env.Logger; import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition; -/** A tracker that handles non-max suppression and matches existing objects to new detections. */ +import static org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel.getTransformationMatrix; + +/** + * A tracker that handles non-max suppression and matches existing objects to new detections. + */ public class MultiBoxTracker { private static final float TEXT_SIZE_DIP = 18; private static final float MIN_SIZE = 16.0f; private static final int[] COLORS = { - Color.BLUE, - Color.RED, - Color.GREEN, - Color.YELLOW, - Color.CYAN, - Color.MAGENTA, - Color.WHITE, - Color.parseColor("#55FF55"), - Color.parseColor("#FFA500"), - Color.parseColor("#FF8888"), - Color.parseColor("#AAAAFF"), - Color.parseColor("#FFFFAA"), - Color.parseColor("#55AAAA"), - Color.parseColor("#AA33AA"), - Color.parseColor("#0D0068") + Color.BLUE, + Color.RED, + Color.GREEN, + Color.YELLOW, + Color.CYAN, + Color.MAGENTA, + Color.WHITE, + Color.parseColor("#55FF55"), + Color.parseColor("#FFA500"), + Color.parseColor("#FF8888"), + Color.parseColor("#AAAAFF"), + Color.parseColor("#FFFFAA"), + Color.parseColor("#55AAAA"), + Color.parseColor("#AA33AA"), + Color.parseColor("#0D0068") }; final List> screenRects = new LinkedList>(); private final Logger logger = new Logger(); private final Queue availableColors = new LinkedList(); private final List trackedObjects = new LinkedList(); private final Paint boxPaint = new Paint(); - private final float textSizePx; + private final float textSize; private final BorderedText borderedText; private Matrix frameToCanvasMatrix; private int frameWidth; @@ -80,16 +85,17 @@ public MultiBoxTracker(final Context context) { boxPaint.setStrokeJoin(Join.ROUND); boxPaint.setStrokeMiter(100); - textSizePx = + textSize = TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics()); - borderedText = new BorderedText(textSizePx); + borderedText = new BorderedText(textSize); } public synchronized void setFrameConfiguration( final int width, final int height, final int sensorOrientation) { frameWidth = width; frameHeight = height; + logger.i("Frame Height %d Frame Width %d Sensor Orientation %d", frameHeight, frameWidth, sensorOrientation); this.sensorOrientation = sensorOrientation; } @@ -123,17 +129,16 @@ private Matrix getFrameToCanvasMatrix() { public synchronized void draw(final Canvas canvas) { final boolean rotated = sensorOrientation % 180 == 90; final float multiplier = - Math.min( - canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight), + Math.max( + canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight), //min -> max canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth)); - frameToCanvasMatrix = - ImageUtils.getTransformationMatrix( - frameWidth, - frameHeight, - (int) (multiplier * (rotated ? frameHeight : frameWidth)), - (int) (multiplier * (rotated ? frameWidth : frameHeight)), - sensorOrientation, - false); + frameToCanvasMatrix = getTransformationMatrix( + frameWidth, + frameHeight, + (int) (multiplier * (rotated ? frameHeight : frameWidth)), + (int) (multiplier * (rotated ? frameWidth : frameHeight)), + sensorOrientation, + true); for (final TrackedRecognition recognition : trackedObjects) { final RectF trackedPos = new RectF(recognition.location); @@ -147,8 +152,6 @@ public synchronized void draw(final Canvas canvas) { !TextUtils.isEmpty(recognition.title) ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence)) : String.format("%.2f", (100 * recognition.detectionConfidence)); - // borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, - // labelString); borderedText.drawText( canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint); } diff --git a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/OverlayView.java similarity index 87% rename from lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java rename to lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/OverlayView.java index 8f7e66102a7..4fac85ce034 100644 --- a/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java +++ b/lite/examples/object_detection/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/OverlayView.java @@ -13,16 +13,19 @@ limitations under the License. ==============================================================================*/ -package org.tensorflow.lite.examples.detection.customview; +package org.tensorflow.lite.examples.detection.tracking; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.View; + import java.util.LinkedList; import java.util.List; -/** A simple View providing a render callback to other classes. */ +/** + * A simple View providing a render callback to other classes. + */ public class OverlayView extends View { private final List callbacks = new LinkedList(); @@ -41,7 +44,9 @@ public synchronized void draw(final Canvas canvas) { } } - /** Interface defining the callback for client classes. */ + /** + * Interface defining the callback for client classes. + */ public interface DrawCallback { public void drawCallback(final Canvas canvas); } diff --git a/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_activity_camera.xml b/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_activity_camera.xml index 6d7fcc8c622..b7f8f2549fb 100644 --- a/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_activity_camera.xml +++ b/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_activity_camera.xml @@ -13,44 +13,57 @@ See the License for the specific language governing permissions and limitations under the License. --> + + - + - + android:background="#00000000"> - - + android:background="#00000000" + android:orientation="vertical"> - - - - - - - - - + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml b/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml deleted file mode 100644 index 754f29512b7..00000000000 --- a/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - diff --git a/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml b/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml index 8f589375833..98f5b8f7c6e 100644 --- a/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml +++ b/lite/examples/object_detection/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml @@ -1,186 +1,192 @@ - + + + + + - - + + android:orientation="vertical" + android:paddingTop="10dp" + android:paddingBottom="20dp"> - - - + - + - - + android:orientation="horizontal"> - + - + + - - + android:orientation="horizontal"> + + + - - - - + android:orientation="horizontal"> - + - + + - + android:background="@android:color/darker_gray" /> - - - + android:layout_marginTop="10dp" + android:orientation="horizontal"> + android:layout_marginTop="10dp" + android:text="@string/threads" + android:textColor="@android:color/black" /> - - - - - - - - - - + + + + + + + + + + + android:background="@android:color/darker_gray" /> - + + android:orientation="horizontal" + android:visibility="gone"> + + + + + - + diff --git a/lite/examples/object_detection/android/app/src/main/res/values/strings.xml b/lite/examples/object_detection/android/app/src/main/res/values/strings.xml index 90842d07169..5d44051e690 100644 --- a/lite/examples/object_detection/android/app/src/main/res/values/strings.xml +++ b/lite/examples/object_detection/android/app/src/main/res/values/strings.xml @@ -1,4 +1,12 @@ TFL Detect This device doesn\'t support Camera2 API. + Frame + Crop + Inference Time + Threads + API + TFLITE + 10ms + 640*480 diff --git a/lite/examples/object_detection/android/lib_interpreter/build.gradle b/lite/examples/object_detection/android/lib_interpreter/build.gradle index 2d60d2595ee..7a322dbc2fd 100644 --- a/lite/examples/object_detection/android/lib_interpreter/build.gradle +++ b/lite/examples/object_detection/android/lib_interpreter/build.gradle @@ -38,9 +38,10 @@ android { dependencies { implementation fileTree(dir: 'libs', include: ['*.jar']) - implementation 'androidx.appcompat:appcompat:1.1.0' + implementation 'androidx.appcompat:appcompat:1.3.1' - // Build off of nightly TensorFlow Lite - implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly-SNAPSHOT' - implementation 'org.tensorflow:tensorflow-lite-metadata:0.0.0-nightly-SNAPSHOT' + // Build off TensorFlow Lite + implementation 'org.tensorflow:tensorflow-lite:2.5.0' + implementation 'org.tensorflow:tensorflow-lite-metadata:0.2.0' + implementation 'org.tensorflow:tensorflow-lite-support:0.2.0' } diff --git a/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java b/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java index 49de32517c3..c1d001464b7 100644 --- a/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java +++ b/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java @@ -15,13 +15,16 @@ package org.tensorflow.lite.examples.detection.tflite; -import android.graphics.Bitmap; import android.graphics.RectF; +import android.media.Image; + import java.util.List; -/** Generic interface for interacting with different recognition engines. */ +/** + * Generic interface for interacting with different recognition engines. + */ public interface Detector { - List recognizeImage(Bitmap bitmap); + List recognizeImage(Image image, int sensorOrientation); void enableStatLogging(final boolean debug); @@ -33,7 +36,9 @@ public interface Detector { void setUseNNAPI(boolean isChecked); - /** An immutable result returned by a Detector describing what was recognized. */ + /** + * An immutable result returned by a Detector describing what was recognized. + */ public class Recognition { /** * A unique identifier for what has been recognized. Specific to the class, not the instance of @@ -41,7 +46,9 @@ public class Recognition { */ private final String id; - /** Display name for the recognition. */ + /** + * Display name for the recognition. + */ private final String title; /** @@ -49,7 +56,9 @@ public class Recognition { */ private final Float confidence; - /** Optional location within the source image for the location of the recognized object. */ + /** + * Optional location within the source image for the location of the recognized object. + */ private RectF location; public Recognition( diff --git a/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java b/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java index f98c524a18c..38cd0912079 100644 --- a/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java +++ b/lite/examples/object_detection/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java @@ -17,13 +17,18 @@ import static java.lang.Math.min; +import android.annotation.SuppressLint; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.content.res.AssetManager; import android.graphics.Bitmap; +import android.graphics.Matrix; import android.graphics.RectF; +import android.media.Image; +import android.os.SystemClock; import android.os.Trace; import android.util.Log; + import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; @@ -37,7 +42,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; + +import org.tensorflow.lite.DataType; import org.tensorflow.lite.Interpreter; +import org.tensorflow.lite.support.common.TensorOperator; +import org.tensorflow.lite.support.image.ImageProcessor; +import org.tensorflow.lite.support.image.TensorImage; +import org.tensorflow.lite.support.image.ops.ResizeOp; +import org.tensorflow.lite.support.image.ops.Rot90Op; import org.tensorflow.lite.support.metadata.MetadataExtractor; /** @@ -53,8 +65,12 @@ * - * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android */ -public class TFLiteObjectDetectionAPIModel implements Detector { +public abstract class TFLiteObjectDetectionAPIModel implements Detector { private static final String TAG = "TFLiteObjectDetectionAPIModelWithInterpreter"; + private byte[][] yuvBytes = new byte[3][]; + private int[] rgbBytes = null; + private int yRowStride; + static final int kMaxChannelValue = 262143; // Only return this many results. private static final int NUM_DETECTIONS = 10; @@ -68,7 +84,9 @@ public class TFLiteObjectDetectionAPIModel implements Detector { private int inputSize; // Pre-allocated buffers. private final List labels = new ArrayList<>(); - private int[] intValues; + /** + * Input image TensorBuffer. + */ // outputLocations: array of shape [Batchsize, NUM_DETECTIONS,4] // contains the location of detected boxes private float[][][] outputLocations; @@ -81,16 +99,19 @@ public class TFLiteObjectDetectionAPIModel implements Detector { // numDetections: array of shape [Batchsize] // contains the number of detected boxes private float[] numDetections; - - private ByteBuffer imgData; - private MappedByteBuffer tfLiteModel; private Interpreter.Options tfLiteOptions; private Interpreter tfLite; - private TFLiteObjectDetectionAPIModel() {} + private ByteBuffer imgData; + private int[] intValues; - /** Memory-map the model file in Assets. */ + private TFLiteObjectDetectionAPIModel() { + } + + /** + * Memory-map the model file in Assets. + */ private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename) throws IOException { AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename); @@ -106,9 +127,10 @@ private static MappedByteBuffer loadModelFile(AssetManager assets, String modelF * * @param modelFilename The model file path relative to the assets folder * @param labelFilename The label file path relative to the assets folder - * @param inputSize The size of image input - * @param isQuantized Boolean representing model is quantized or not + * @param inputSize The size of image input + * @param isQuantized Boolean representing model is quantized or not */ + @SuppressLint("LongLogTag") public static Detector create( final Context context, final String modelFilename, @@ -116,14 +138,20 @@ public static Detector create( final int inputSize, final boolean isQuantized) throws IOException { - final TFLiteObjectDetectionAPIModel d = new TFLiteObjectDetectionAPIModel(); + final TFLiteObjectDetectionAPIModel d = new TFLiteObjectDetectionAPIModel() { + + @Override + protected TensorOperator getPreprocessNormalizeOp() { + return null; + } + }; MappedByteBuffer modelFile = loadModelFile(context.getAssets(), modelFilename); MetadataExtractor metadata = new MetadataExtractor(modelFile); try (BufferedReader br = - new BufferedReader( - new InputStreamReader( - metadata.getAssociatedFile(labelFilename), Charset.defaultCharset()))) { + new BufferedReader( + new InputStreamReader( + metadata.getAssociatedFile(labelFilename), Charset.defaultCharset()))) { String line; while ((line = br.readLine()) != null) { Log.w(TAG, line); @@ -145,6 +173,8 @@ public static Detector create( } d.isModelQuantized = isQuantized; + Log.i("QUANTIZED", String.valueOf(isQuantized)); + // Pre-allocate buffers. int numBytesPerChannel; if (isQuantized) { @@ -163,49 +193,53 @@ public static Detector create( return d; } + @SuppressLint("LongLogTag") @Override - public List recognizeImage(final Bitmap bitmap) { + public List recognizeImage(final Image image, int sensorOrientation) { // Log this method so that it can be analyzed with systrace. Trace.beginSection("recognizeImage"); - Trace.beginSection("preprocessBitmap"); - // Preprocess the image data from 0-255 int to normalized float based - // on the provided parameters. - bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); - - imgData.rewind(); - for (int i = 0; i < inputSize; ++i) { - for (int j = 0; j < inputSize; ++j) { - int pixelValue = intValues[i * inputSize + j]; - if (isModelQuantized) { - // Quantized model - imgData.put((byte) ((pixelValue >> 16) & 0xFF)); - imgData.put((byte) ((pixelValue >> 8) & 0xFF)); - imgData.put((byte) (pixelValue & 0xFF)); - } else { // Float model - imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); - imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); - imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD); - } - } - } + Trace.beginSection("loadImage"); + long startTimeForLoadImage = SystemClock.uptimeMillis(); + + //Convert image to Bitmap + Bitmap bitmap = imageToRGB(image, image.getWidth(), image.getHeight()); + Log.v("TFLITE_w", String.valueOf(image.getWidth())); + Log.v("TFLITE_h", String.valueOf(image.getHeight())); + + //Loads bitmap into a TensorImage. + int imageTensorIndex = 0; + int[] imageShape = tfLite.getInputTensor(imageTensorIndex).shape(); + DataType imageDataType = tfLite.getInputTensor(imageTensorIndex).dataType(); + //Log.v("TFLITE", String.valueOf(imageShape[0])); + + TensorImage tensorImage = new TensorImage(imageDataType); + tensorImage.load(bitmap); + + // Creates processor for the TensorImage. + //int cropSize = min(bitmap.getWidth(), bitmap.getHeight()); + int numRotation = sensorOrientation / 90; + + ImageProcessor imageProcessor = new ImageProcessor.Builder() + .add(new ResizeOp(inputSize, inputSize, ResizeOp.ResizeMethod.BILINEAR)) + .add(new Rot90Op(numRotation)) + .build(); + + TensorImage tensorImageInput = imageProcessor.process(tensorImage); + + long endTimeForLoadImage = SystemClock.uptimeMillis(); + Trace.endSection(); + Log.v(TAG, "Time-Cost to load the image: " + (endTimeForLoadImage - startTimeForLoadImage)); Trace.endSection(); // preprocessBitmap - // Copy the input data into TensorFlow. - Trace.beginSection("feed"); - outputLocations = new float[1][NUM_DETECTIONS][4]; - outputClasses = new float[1][NUM_DETECTIONS]; - outputScores = new float[1][NUM_DETECTIONS]; - numDetections = new float[1]; + Object[] inputArray = {tensorImageInput.getBuffer()}; - Object[] inputArray = {imgData}; Map outputMap = new HashMap<>(); outputMap.put(0, outputLocations); outputMap.put(1, outputClasses); outputMap.put(2, outputScores); outputMap.put(3, numDetections); Trace.endSection(); - // Run the inference call. Trace.beginSection("run"); tfLite.runForMultipleInputsOutputs(inputArray, outputMap); @@ -227,21 +261,130 @@ public List recognizeImage(final Bitmap bitmap) { for (int i = 0; i < numDetectionsOutput; ++i) { final RectF detection = new RectF( - outputLocations[0][i][1] * inputSize, - outputLocations[0][i][0] * inputSize, - outputLocations[0][i][3] * inputSize, - outputLocations[0][i][2] * inputSize); + outputLocations[0][i][1] * image.getWidth(), + outputLocations[0][i][0] * image.getHeight(), + outputLocations[0][i][3] * image.getWidth(), + outputLocations[0][i][2] * image.getHeight()); recognitions.add( new Recognition( "" + i, labels.get((int) outputClasses[0][i]), outputScores[0][i], detection)); } - Trace.endSection(); // "recognizeImage" + Trace.endSection(); return recognitions; } + private Bitmap imageToRGB(final Image image, final int width, final int height) { + if (rgbBytes == null) { + rgbBytes = new int[width * height]; + } + + Bitmap rgbFrameBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + + try { + + if (image == null) { + return null; + } + + Log.e("Degrees_length", String.valueOf(rgbBytes.length)); + final Image.Plane[] planes = image.getPlanes(); + fillBytesCameraX(planes, yuvBytes); + yRowStride = planes[0].getRowStride(); + final int uvRowStride = planes[1].getRowStride(); + final int uvPixelStride = planes[1].getPixelStride(); + + convertYUV420ToARGB8888( + yuvBytes[0], + yuvBytes[1], + yuvBytes[2], + width, + height, + yRowStride, + uvRowStride, + uvPixelStride, + rgbBytes); + + rgbFrameBitmap.setPixels(rgbBytes, 0, width, 0, 0, width, height); + + + } catch (final Exception e) { + Log.e(e.toString(), "Exception!"); + } + + return rgbFrameBitmap; + } + + private void fillBytesCameraX(final Image.Plane[] planes, final byte[][] yuvBytes) { + // Because of the variable row stride it's not possible to know in + // advance the actual necessary dimensions of the yuv planes. + for (int i = 0; i < planes.length; ++i) { + final ByteBuffer buffer = planes[i].getBuffer(); + if (yuvBytes[i] == null) { + yuvBytes[i] = new byte[buffer.capacity()]; + } + buffer.get(yuvBytes[i]); + } + } + + private static int YUV2RGB(int y, int u, int v) { + // Adjust and check YUV values + y = Math.max((y - 16), 0); + u -= 128; + v -= 128; + + // This is the floating point equivalent. We do the conversion in integer + // because some Android devices do not have floating point in hardware. + // nR = (int)(1.164 * nY + 2.018 * nU); + // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); + // nB = (int)(1.164 * nY + 1.596 * nV); + int y1192 = 1192 * y; + int r = (y1192 + 1634 * v); + int g = (y1192 - 833 * v - 400 * u); + int b = (y1192 + 2066 * u); + + // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ] + r = r > kMaxChannelValue ? kMaxChannelValue : (Math.max(r, 0)); + g = g > kMaxChannelValue ? kMaxChannelValue : (Math.max(g, 0)); + b = b > kMaxChannelValue ? kMaxChannelValue : (Math.max(b, 0)); + + return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); + } + + public static void convertYUV420ToARGB8888( + byte[] yData, + byte[] uData, + byte[] vData, + int width, + int height, + int yRowStride, + int uvRowStride, + int uvPixelStride, + int[] out) { + int yp = 0; + for (int j = 0; j < height; j++) { + int pY = yRowStride * j; + int pUV = uvRowStride * (j >> 1); + + for (int i = 0; i < width; i++) { + int uv_offset = pUV + (i >> 1) * uvPixelStride; + + out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]); + } + } + } + + private Bitmap rotateBitmap(Bitmap bitmap, int rotationDegrees) { + Matrix rotationMatrix = new Matrix(); + rotationMatrix.postRotate((float) rotationDegrees); + Bitmap rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), rotationMatrix, true); + bitmap.recycle(); + return rotatedBitmap; + } + @Override - public void enableStatLogging(final boolean logStats) {} + public void enableStatLogging(final boolean logStats) { + } @Override public String getStatString() { @@ -257,17 +400,17 @@ public void close() { } @Override - public void setNumThreads(int numThreads) { + public void setUseNNAPI(boolean isChecked) { if (tfLite != null) { - tfLiteOptions.setNumThreads(numThreads); + tfLiteOptions.setUseNNAPI(isChecked); recreateInterpreter(); } } @Override - public void setUseNNAPI(boolean isChecked) { + public void setNumThreads(int numThreads) { if (tfLite != null) { - tfLiteOptions.setUseNNAPI(isChecked); + tfLiteOptions.setNumThreads(numThreads); recreateInterpreter(); } } @@ -276,4 +419,67 @@ private void recreateInterpreter() { tfLite.close(); tfLite = new Interpreter(tfLiteModel, tfLiteOptions); } + + /** + * Gets the TensorOperator to normalize the input image in preprocessing. + */ + protected abstract TensorOperator getPreprocessNormalizeOp(); + + public static Matrix getTransformationMatrix( + final int srcWidth, + final int srcHeight, + final int dstWidth, + final int dstHeight, + final int applyRotation, + final boolean maintainAspectRatio) { + final Matrix matrix = new Matrix(); + + // Translate so center of image is at origin. + matrix.postTranslate(-srcWidth / 2f, -srcHeight / 2f); + + if (applyRotation == 90) { + // Rotate around origin. + matrix.postRotate(180); + } + + // Account for the already applied rotation, if any, and then determine how + // much scaling is needed for each axis. + final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; + + final int inWidth = transpose ? srcHeight : srcWidth; + final int inHeight = transpose ? srcWidth : srcHeight; + + final float margin = 1f; + + // Apply scaling if necessary. + if (inWidth != dstWidth || inHeight != dstHeight) { + final float scaleFactorX = dstWidth / (float) inWidth; + final float scaleFactorY = dstHeight / (float) inHeight; + + if (maintainAspectRatio) { + // Scale by minimum factor so that dst is filled completely while + // maintaining the aspect ratio. Some image may fall off the edge. + final float scaleFactor = Math.min(scaleFactorX, scaleFactorY); + if (applyRotation == 90) { + // Rotate around origin. + matrix.postScale(scaleFactor - margin, scaleFactor + margin); + } else { + matrix.postScale(scaleFactor, scaleFactor); + } + + } else { + // Scale exactly to fill dst from src. + matrix.postScale(scaleFactorX, scaleFactorY); + } + } + + // Translate back from origin centered reference to destination frame. + if (applyRotation == 90) { + matrix.postTranslate(dstWidth / 3f, dstHeight / 2f); + } else if (applyRotation == 0 || applyRotation == 180) { + matrix.postTranslate(dstWidth / 2f, dstHeight / 3f); + } + + return matrix; + } } diff --git a/lite/examples/object_detection/android/lib_task_api/build.gradle b/lite/examples/object_detection/android/lib_task_api/build.gradle index 721130838d9..8d7b8241753 100644 --- a/lite/examples/object_detection/android/lib_task_api/build.gradle +++ b/lite/examples/object_detection/android/lib_task_api/build.gradle @@ -38,6 +38,6 @@ android { dependencies { implementation fileTree(dir: 'libs', include: ['*.jar']) - implementation 'androidx.appcompat:appcompat:1.1.0' - implementation 'org.tensorflow:tensorflow-lite-task-vision:0.1.0' + implementation 'androidx.appcompat:appcompat:1.3.1' + implementation 'org.tensorflow:tensorflow-lite-task-vision:0.2.0' } diff --git a/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java b/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java index 49de32517c3..c1d001464b7 100644 --- a/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java +++ b/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java @@ -15,13 +15,16 @@ package org.tensorflow.lite.examples.detection.tflite; -import android.graphics.Bitmap; import android.graphics.RectF; +import android.media.Image; + import java.util.List; -/** Generic interface for interacting with different recognition engines. */ +/** + * Generic interface for interacting with different recognition engines. + */ public interface Detector { - List recognizeImage(Bitmap bitmap); + List recognizeImage(Image image, int sensorOrientation); void enableStatLogging(final boolean debug); @@ -33,7 +36,9 @@ public interface Detector { void setUseNNAPI(boolean isChecked); - /** An immutable result returned by a Detector describing what was recognized. */ + /** + * An immutable result returned by a Detector describing what was recognized. + */ public class Recognition { /** * A unique identifier for what has been recognized. Specific to the class, not the instance of @@ -41,7 +46,9 @@ public class Recognition { */ private final String id; - /** Display name for the recognition. */ + /** + * Display name for the recognition. + */ private final String title; /** @@ -49,7 +56,9 @@ public class Recognition { */ private final Float confidence; - /** Optional location within the source image for the location of the recognized object. */ + /** + * Optional location within the source image for the location of the recognized object. + */ private RectF location; public Recognition( diff --git a/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java b/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java index f66b3556f16..bda11f478fc 100644 --- a/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java +++ b/lite/examples/object_detection/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java @@ -16,14 +16,22 @@ package org.tensorflow.lite.examples.detection.tflite; import android.content.Context; -import android.graphics.Bitmap; +import android.graphics.Matrix; +import android.graphics.Rect; +import android.media.Image; + +import static java.lang.Math.min; + import android.os.Trace; + import java.io.IOException; import java.nio.MappedByteBuffer; import java.util.ArrayList; import java.util.List; + import org.tensorflow.lite.support.common.FileUtil; import org.tensorflow.lite.support.image.TensorImage; +import org.tensorflow.lite.task.core.vision.ImageProcessingOptions; import org.tensorflow.lite.task.vision.detector.Detection; import org.tensorflow.lite.task.vision.detector.ObjectDetector; import org.tensorflow.lite.task.vision.detector.ObjectDetector.ObjectDetectorOptions; @@ -45,19 +53,26 @@ * href="https://www.tensorflow.org/lite/convert/metadata#read_the_metadata_from_models">Read the * metadata from models */ + public class TFLiteObjectDetectionAPIModel implements Detector { private static final String TAG = "TFLiteObjectDetectionAPIModelWithTaskApi"; - /** Only return this many results. */ + /** + * Only return this many results. + */ private static final int NUM_DETECTIONS = 10; - private final MappedByteBuffer modelBuffer; + private MappedByteBuffer modelBuffer; - /** An instance of the driver class to run model inference with Tensorflow Lite. */ + /** + * An instance of the driver class to run model inference with Tensorflow Lite. + */ private ObjectDetector objectDetector; - /** Builder of the options used to config the ObjectDetector. */ - private final ObjectDetectorOptions.Builder optionsBuilder; + /** + * Builder of the options used to config the ObjectDetector. + */ + private ObjectDetectorOptions.Builder optionsBuilder; /** * Initializes a native TensorFlow session for classifying images. @@ -68,16 +83,15 @@ public class TFLiteObjectDetectionAPIModel implements Detector { * * @param modelFilename The model file path relative to the assets folder * @param labelFilename The label file path relative to the assets folder - * @param inputSize The size of image input - * @param isQuantized Boolean representing model is quantized or not + * @param inputSize The size of image input + * @param isQuantized Boolean representing model is quantized or not */ public static Detector create( final Context context, final String modelFilename, final String labelFilename, final int inputSize, - final boolean isQuantized) - throws IOException { + final boolean isQuantized) throws IOException { return new TFLiteObjectDetectionAPIModel(context, modelFilename); } @@ -88,10 +102,29 @@ private TFLiteObjectDetectionAPIModel(Context context, String modelFilename) thr } @Override - public List recognizeImage(final Bitmap bitmap) { + public List recognizeImage(final Image image, int sensorOrientation) { // Log this method so that it can be analyzed with systrace. Trace.beginSection("recognizeImage"); - List results = objectDetector.detect(TensorImage.fromBitmap(bitmap)); + TensorImage inputImage = new TensorImage(); + inputImage.load(image); + int width = image.getWidth(); + int height = image.getHeight(); + int cropSize = min(width, height); + + ImageProcessingOptions imageOptions = + ImageProcessingOptions.builder() + .setOrientation(getOrientation(sensorOrientation)) + // Set the ROI to the center of the image. + .setRoi( + new Rect( + /*left=*/ (width - cropSize) / 2, + /*top=*/ (height - cropSize) / 2, + /*right=*/ (width + cropSize) / 2, + /*bottom=*/ (height + cropSize) / 2)) + .build(); + + List results = objectDetector.detect(inputImage, imageOptions); + // Converts a list of {@link Detection} objects into a list of {@link Recognition} objects // to match the interface of other inference method, such as using the recognizeImage(final Bitmap bitmap) { return recognitions; } + /** + * See http://jpegclub.org/exif_orientation.html for info + * + * @param cameraOrientation which is the degrees of the Image CameraX is providing + * @return orientation to be used with ImageProcessingOptions + */ + private static ImageProcessingOptions.Orientation getOrientation(int cameraOrientation) { + ImageProcessingOptions.Orientation orientation; + switch (cameraOrientation / 90) { + case 1: + orientation = ImageProcessingOptions.Orientation.LEFT_TOP; + break; + case 2: + orientation = ImageProcessingOptions.Orientation.BOTTOM_LEFT; + break; + case 3: + orientation = ImageProcessingOptions.Orientation.RIGHT_BOTTOM; + break; + default: + orientation = ImageProcessingOptions.Orientation.TOP_RIGHT; + } + + return orientation; + } + @Override - public void enableStatLogging(final boolean logStats) {} + public void enableStatLogging(final boolean logStats) { + } @Override public String getStatString() { @@ -144,5 +203,56 @@ public void setUseNNAPI(boolean isChecked) { private void recreateDetector() { objectDetector.close(); objectDetector = ObjectDetector.createFromBufferAndOptions(modelBuffer, optionsBuilder.build()); + + } + + public static Matrix getTransformationMatrix( + final int srcWidth, + final int srcHeight, + final int dstWidth, + final int dstHeight, + final int applyRotation, + final boolean maintainAspectRatio) { + final Matrix matrix = new Matrix(); + + // Translate so center of image is at origin. + matrix.postTranslate(-srcWidth / 2f, -srcHeight / 2f); + + if (applyRotation != 0) { + // Rotate around origin. + matrix.postRotate(applyRotation); + } + + // Account for the already applied rotation, if any, and then determine how + // much scaling is needed for each axis. + final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; + + final int inWidth = transpose ? srcHeight : srcWidth; + final int inHeight = transpose ? srcWidth : srcHeight; + + // Apply scaling if necessary. + if (inWidth != dstWidth || inHeight != dstHeight) { + final float scaleFactorX = dstWidth / (float) inWidth; + final float scaleFactorY = dstHeight / (float) inHeight; + + if (maintainAspectRatio) { + // Scale by minimum factor so that dst is filled completely while + // maintaining the aspect ratio. Some image may fall off the edge. + final float scaleFactor = Math.min(scaleFactorX, scaleFactorY); + matrix.postScale(scaleFactor, scaleFactor); + } else { + // Scale exactly to fill dst from src. + matrix.postScale(scaleFactorX, scaleFactorY); + } + } + + // Translate back from origin centered reference to destination frame. + if (applyRotation == 90) { + matrix.postTranslate(dstWidth / 3f, dstHeight / 2f); + } else if (applyRotation == 0 || applyRotation == 180) { + matrix.postTranslate(dstWidth / 2f, dstHeight / 3f); + } + + return matrix; } -} +} \ No newline at end of file