diff --git a/android/vision/.gitignore b/android/vision/.gitignore new file mode 100644 index 00000000..b52c533c --- /dev/null +++ b/android/vision/.gitignore @@ -0,0 +1,4 @@ +# Project exclude paths +/.gradle/ +/app/build/ +/app/build/intermediates/javac/debug/classes/ \ No newline at end of file diff --git a/android/vision/README.md b/android/vision/README.md new file mode 100644 index 00000000..883d17de --- /dev/null +++ b/android/vision/README.md @@ -0,0 +1,45 @@ + +# LabGraph Vision Object Detection Android Demo + +### Overview + +This is an object detection app that continuously detects the objects (bounding boxes, classes, and confidence) in the frames of a video imported by the device gallery, with the option to use a quantized [MobileNetV2](https://storage.cloud.google.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv2_ssd_256_uint8.tflite) [EfficientDet Lite 0](https://storage.googleapis.com/mediapipe-tasks/object_detector/efficientdet_lite0_uint8.tflite), or [EfficientDet Lite2](https://storage.googleapis.com/mediapipe-tasks/object_detector/efficientdet_lite2_uint8.tflite) model. + +The model files are downloaded by a Gradle script when you build and run the app. You don't need to do any steps to download TFLite models into the project explicitly unless you wish to use your own models. If you do use your own models, place them into the app's *assets* directory. + +This application should be run on a physical Android device to take advantage of the gallery, though it will enable you to use an emulator for opening locally stored files. + +## Build the demo using Android Studio + +### Prerequisites + +* The **[Android Studio](https://developer.android.com/studio/index.html)** + IDE. This sample has been tested on Android Studio Flamingo. + +* A physical Android device with a minimum OS version of SDK 24 (Android 7.0 - + Nougat) with developer mode enabled. The process of enabling developer mode + may vary by device. You may also use an Android emulator with more limited + functionality. + +### Building + +* Open Android Studio. From the Welcome screen, select Open an existing + Android Studio project. + +* From the Open File or Project window that appears, navigate to and select + the labgraph/android/vision directory. Click OK. You may + be asked if you trust the project. Select Trust. + +* If it asks you to do a Gradle Sync, click OK. + +* With your Android device connected to your computer and developer mode + enabled, click on the green Run arrow in Android Studio. + +### Models used + +Downloading, extraction, and placing the models into the *assets* folder is +managed automatically by the **download.gradle** file. + +### Results + +The results of the detection are logged into the LogCat console under the "Result" field. diff --git a/android/vision/app/build.gradle b/android/vision/app/build.gradle new file mode 100644 index 00000000..eab08a0b --- /dev/null +++ b/android/vision/app/build.gradle @@ -0,0 +1,106 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply plugin: 'kotlin-kapt' +apply plugin: "androidx.navigation.safeargs" +apply plugin: 'de.undercouch.download' + +android { + compileSdkVersion 32 + defaultConfig { + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + applicationId "labgraph_vision.objectdetection" + minSdkVersion 24 + targetSdkVersion 32 + versionCode 1 + versionName "1.0.0" + } + + dataBinding { + enabled = true + } + + compileOptions { + sourceCompatibility rootProject.ext.java_version + targetCompatibility rootProject.ext.java_version + } + + kotlinOptions { + jvmTarget = rootProject.ext.java_version + } + + buildTypes { + release { + minifyEnabled false + proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' + } + } + + buildFeatures { + viewBinding true + } + androidResources { + noCompress 'tflite' + } + +} + +// import DownloadModels task +project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets' + +// Download default models; if you wish to use your own models then +// place them in the "assets" directory and comment out this line. +apply from:'download_models.gradle' + +dependencies { + // Kotlin lang + implementation 'androidx.core:core-ktx:1.6.0' + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version" + implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.5.0' + + // App compat and UI things + implementation 'androidx.appcompat:appcompat:1.3.1' + implementation 'androidx.lifecycle:lifecycle-runtime-ktx:2.3.1' + implementation 'androidx.constraintlayout:constraintlayout:2.0.4' + implementation 'com.google.android.material:material:1.0.0' + implementation 'androidx.localbroadcastmanager:localbroadcastmanager:1.0.0' + implementation 'androidx.fragment:fragment-ktx:1.5.4' + + // Navigation library + def nav_version = "2.3.5" + implementation "androidx.navigation:navigation-fragment-ktx:$nav_version" + implementation "androidx.navigation:navigation-ui-ktx:$nav_version" + + // CameraX core library + def camerax_version = '1.1.0' + implementation "androidx.camera:camera-core:$camerax_version" + + // CameraX Camera2 extensions + implementation "androidx.camera:camera-camera2:$camerax_version" + + // CameraX Lifecycle library + implementation "androidx.camera:camera-lifecycle:$camerax_version" + + // CameraX View class + implementation "androidx.camera:camera-view:$camerax_version" + + //WindowManager + implementation 'androidx.window:window:1.0.0-alpha09' + + implementation 'com.google.mediapipe:tasks-vision:0.10.0' +} diff --git a/android/vision/app/download_models.gradle b/android/vision/app/download_models.gradle new file mode 100644 index 00000000..49d712db --- /dev/null +++ b/android/vision/app/download_models.gradle @@ -0,0 +1,29 @@ +/* + * Copyright 2023 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +task downloadModelFile0(type: Download) { + src 'https://storage.googleapis.com/mediapipe-models/object_detector/efficientdet_lite0/float32/1/efficientdet_lite0.tflite' + dest project.ext.ASSET_DIR + '/efficientdet-lite0.tflite' + overwrite false +} + +task downloadModelFile1(type: Download) { + src 'https://storage.googleapis.com/mediapipe-models/object_detector/efficientdet_lite2/float32/1/efficientdet_lite2.tflite' + dest project.ext.ASSET_DIR + '/efficientdet-lite2.tflite' + overwrite false +} + +preBuild.dependsOn downloadModelFile0, downloadModelFile1 \ No newline at end of file diff --git a/android/vision/app/src/main/AndroidManifest.xml b/android/vision/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..e6e266e2 --- /dev/null +++ b/android/vision/app/src/main/AndroidManifest.xml @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/android/vision/app/src/main/assets/efficientdet-lite0.tflite b/android/vision/app/src/main/assets/efficientdet-lite0.tflite new file mode 100644 index 00000000..74fa351d Binary files /dev/null and b/android/vision/app/src/main/assets/efficientdet-lite0.tflite differ diff --git a/android/vision/app/src/main/assets/efficientdet-lite2.tflite b/android/vision/app/src/main/assets/efficientdet-lite2.tflite new file mode 100644 index 00000000..fffedaa4 Binary files /dev/null and b/android/vision/app/src/main/assets/efficientdet-lite2.tflite differ diff --git a/android/vision/app/src/main/java/labgraph_vision/objectdetection/MainActivity.kt b/android/vision/app/src/main/java/labgraph_vision/objectdetection/MainActivity.kt new file mode 100644 index 00000000..1409a094 --- /dev/null +++ b/android/vision/app/src/main/java/labgraph_vision/objectdetection/MainActivity.kt @@ -0,0 +1,50 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package labgraph_vision.objectdetection + +import android.os.Bundle +import androidx.appcompat.app.AppCompatActivity +import androidx.navigation.fragment.NavHostFragment +import androidx.navigation.ui.setupWithNavController +import labgraph_vision.objectdetection.databinding.ActivityMainBinding + +/** + * Main entry point into our app. This app follows the single-activity pattern, and all + * functionality is implemented in the form of fragments. + */ +class MainActivity : AppCompatActivity() { + + private lateinit var activityMainBinding: ActivityMainBinding + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + activityMainBinding = ActivityMainBinding.inflate(layoutInflater) + setContentView(activityMainBinding.root) + + val navHostFragment = + supportFragmentManager.findFragmentById(R.id.fragment_container) as NavHostFragment + val navController = navHostFragment.navController + activityMainBinding.navigation.setupWithNavController(navController) + activityMainBinding.navigation.setOnNavigationItemReselectedListener { + // ignore the reselection + } + } + + override fun onBackPressed() { + finish() + } +} diff --git a/android/vision/app/src/main/java/labgraph_vision/objectdetection/MainViewModel.kt b/android/vision/app/src/main/java/labgraph_vision/objectdetection/MainViewModel.kt new file mode 100644 index 00000000..54a291bc --- /dev/null +++ b/android/vision/app/src/main/java/labgraph_vision/objectdetection/MainViewModel.kt @@ -0,0 +1,51 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package labgraph_vision.objectdetection + +import androidx.lifecycle.ViewModel + +/** + * This ViewModel is used to store object detector helper settings + */ +class MainViewModel : ViewModel() { + private var _delegate: Int = ObjectDetectorHelper.DELEGATE_CPU + private var _threshold: Float = + ObjectDetectorHelper.THRESHOLD_DEFAULT + private var _maxResults: Int = + ObjectDetectorHelper.MAX_RESULTS_DEFAULT + private var _model: Int = ObjectDetectorHelper.MODEL_EFFICIENTDETV0 + + val currentDelegate: Int get() = _delegate + val currentThreshold: Float get() = _threshold + val currentMaxResults: Int get() = _maxResults + val currentModel: Int get() = _model + + fun setDelegate(delegate: Int) { + _delegate = delegate + } + + fun setThreshold(threshold: Float) { + _threshold = threshold + } + + fun setMaxResults(maxResults: Int) { + _maxResults = maxResults + } + + fun setModel(model: Int) { + _model = model + } +} diff --git a/android/vision/app/src/main/java/labgraph_vision/objectdetection/ObjectDetectorHelper.kt b/android/vision/app/src/main/java/labgraph_vision/objectdetection/ObjectDetectorHelper.kt new file mode 100644 index 00000000..a962dd62 --- /dev/null +++ b/android/vision/app/src/main/java/labgraph_vision/objectdetection/ObjectDetectorHelper.kt @@ -0,0 +1,238 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package labgraph_vision.objectdetection + +import android.content.Context +import android.graphics.Bitmap +import android.media.MediaMetadataRetriever +import android.net.Uri +import android.os.SystemClock +import android.util.Log +import com.google.mediapipe.framework.image.BitmapImageBuilder +import com.google.mediapipe.tasks.core.BaseOptions +import com.google.mediapipe.tasks.core.Delegate +import com.google.mediapipe.tasks.vision.core.RunningMode +import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetectionResult +import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetector + +class ObjectDetectorHelper( + var threshold: Float = THRESHOLD_DEFAULT, + var maxResults: Int = MAX_RESULTS_DEFAULT, + var currentDelegate: Int = DELEGATE_CPU, + var currentModel: Int = MODEL_EFFICIENTDETV0, + var runningMode: RunningMode = RunningMode.IMAGE, + val context: Context, + // The listener is only used when running in RunningMode.LIVE_STREAM + var objectDetectorListener: DetectorListener? = null +) { + + // For this example this needs to be a var so it can be reset on changes. If the ObjectDetector + // will not change, a lazy val would be preferable. + private var objectDetector: ObjectDetector? = null + + init { + setupObjectDetector() + } + + fun clearObjectDetector() { + objectDetector?.close() + objectDetector = null + } + + // Initialize the object detector using current settings on the + // thread that is using it. CPU can be used with detectors + // that are created on the main thread and used on a background thread, but + // the GPU delegate needs to be used on the thread that initialized the detector + fun setupObjectDetector() { + // Set general detection options, including number of used threads + val baseOptionsBuilder = BaseOptions.builder() + + // Use the specified hardware for running the model. Default to CPU + when (currentDelegate) { + DELEGATE_CPU -> { + baseOptionsBuilder.setDelegate(Delegate.CPU) + } + DELEGATE_GPU -> { + // Is there a check for GPU being supported? + baseOptionsBuilder.setDelegate(Delegate.GPU) + } + } + + val modelName = + when (currentModel) { + MODEL_EFFICIENTDETV0 -> "efficientdet-lite0.tflite" + MODEL_EFFICIENTDETV2 -> "efficientdet-lite2.tflite" + else -> "efficientdet-lite0.tflite" + } + + baseOptionsBuilder.setModelAssetPath(modelName) + + try { + val optionsBuilder = + ObjectDetector.ObjectDetectorOptions.builder() + .setBaseOptions(baseOptionsBuilder.build()) + .setScoreThreshold(threshold) + .setRunningMode(runningMode) + .setMaxResults(maxResults) + + when (runningMode) { + RunningMode.IMAGE, + RunningMode.VIDEO -> optionsBuilder.setRunningMode(runningMode) + + } + + val options = optionsBuilder.build() + objectDetector = ObjectDetector.createFromOptions(context, options) + } catch (e: IllegalStateException) { + objectDetectorListener?.onError( + "Object detector failed to initialize. See error logs for details" + ) + Log.e(TAG, "TFLite failed to load model with error: " + e.message) + } catch (e: RuntimeException) { + objectDetectorListener?.onError( + "Object detector failed to initialize. See error logs for " + + "details", GPU_ERROR + ) + Log.e( + TAG, + "Object detector failed to load model with error: " + e.message + ) + } + } + + // Accepts the URI for a video file loaded from the user's gallery and attempts to run + // object detection inference on the video. This process will evaluate every frame in + // the video and attach the results to a bundle that will be returned. + fun detectVideoFile( + videoUri: Uri, + inferenceIntervalMs: Long + ): ResultBundle? { + + if (runningMode != RunningMode.VIDEO) { + throw IllegalArgumentException( + "Attempting to call detectVideoFile" + + " while not using RunningMode.VIDEO" + ) + } + + if (objectDetector == null) return null + + // Inference time is the difference between the system time at the start and finish of the + // process + val startTime = SystemClock.uptimeMillis() + + var didErrorOccurred = false + + // Load frames from the video and run the object detection model. + val retriever = MediaMetadataRetriever() + retriever.setDataSource(context, videoUri) + val videoLengthMs = + retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION) + ?.toLong() + + // Note: We need to read width/height from frame instead of getting the width/height + // of the video directly because MediaRetriever returns frames that are smaller than the + // actual dimension of the video file. + val firstFrame = retriever.getFrameAtTime(0) + val width = firstFrame?.width + val height = firstFrame?.height + + // If the video is invalid, returns a null detection result + if ((videoLengthMs == null) || (width == null) || (height == null)) return null + + // Next, we'll get one frame every frameInterval ms, then run detection on these frames. + val resultList = mutableListOf() + val numberOfFrameToRead = videoLengthMs.div(inferenceIntervalMs) + + for (i in 0..numberOfFrameToRead) { + val timestampMs = i * inferenceIntervalMs // ms + + retriever + .getFrameAtTime( + timestampMs * 1000, // convert from ms to micro-s + MediaMetadataRetriever.OPTION_CLOSEST + ) + ?.let { frame -> + // Convert the video frame to ARGB_8888 which is required by the MediaPipe + val argb8888Frame = + if (frame.config == Bitmap.Config.ARGB_8888) frame + else frame.copy(Bitmap.Config.ARGB_8888, false) + + // Convert the input Bitmap object to an MPImage object to run inference + val mpImage = BitmapImageBuilder(argb8888Frame).build() + + // Run object detection using MediaPipe Object Detector API + objectDetector?.detectForVideo(mpImage, timestampMs) + ?.let { detectionResult -> + resultList.add(detectionResult) + } + ?: { + didErrorOccurred = true + objectDetectorListener?.onError( + "ResultBundle could not be returned" + + " in detectVideoFile" + ) + } + } + ?: run { + didErrorOccurred = true + objectDetectorListener?.onError( + "Frame at specified time could not be" + + " retrieved when detecting in video." + ) + } + } + Log.d("Result", resultList.toString()) + retriever.release() + + val inferenceTimePerFrameMs = + (SystemClock.uptimeMillis() - startTime).div(numberOfFrameToRead) + + return if (didErrorOccurred) { + null + } else { + ResultBundle(resultList, inferenceTimePerFrameMs, height, width) + } + } + + // Wraps results from inference, the time it takes for inference to be performed, and + // the input image and height for properly scaling UI to return back to callers + data class ResultBundle( + val results: List, + val inferenceTime: Long, + val inputImageHeight: Int, + val inputImageWidth: Int, + ) + + companion object { + const val DELEGATE_CPU = 0 + const val DELEGATE_GPU = 1 + const val MODEL_EFFICIENTDETV0 = 0 + const val MODEL_EFFICIENTDETV2 = 1 + const val MAX_RESULTS_DEFAULT = 3 + const val THRESHOLD_DEFAULT = 0.5F + const val OTHER_ERROR = 0 + const val GPU_ERROR = 1 + + const val TAG = "ObjectDetectorHelper" + } + + // Used to pass results or errors back to the calling class + interface DetectorListener { + fun onError(error: String, errorCode: Int = OTHER_ERROR) + fun onResults(resultBundle: ResultBundle) + } +} diff --git a/android/vision/app/src/main/java/labgraph_vision/objectdetection/OverlayView.kt b/android/vision/app/src/main/java/labgraph_vision/objectdetection/OverlayView.kt new file mode 100644 index 00000000..9815ac3c --- /dev/null +++ b/android/vision/app/src/main/java/labgraph_vision/objectdetection/OverlayView.kt @@ -0,0 +1,137 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package labgraph_vision.objectdetection + +import android.content.Context +import android.graphics.* +import android.util.AttributeSet +import android.view.View +import androidx.core.content.ContextCompat +import com.google.mediapipe.tasks.vision.objectdetector.ObjectDetectionResult +import kotlin.math.min + +class OverlayView(context: Context?, attrs: AttributeSet?) : + View(context, attrs) { + + private var results: ObjectDetectionResult? = null + private var boxPaint = Paint() + private var textBackgroundPaint = Paint() + private var textPaint = Paint() + + private var scaleFactor: Float = 1f + + private var bounds = Rect() + + init { + initPaints() + } + + fun clear() { + results = null + textPaint.reset() + textBackgroundPaint.reset() + boxPaint.reset() + invalidate() + initPaints() + } + + private fun initPaints() { + textBackgroundPaint.color = Color.BLACK + textBackgroundPaint.style = Paint.Style.FILL + textBackgroundPaint.textSize = 50f + + textPaint.color = Color.WHITE + textPaint.style = Paint.Style.FILL + textPaint.textSize = 50f + + boxPaint.color = ContextCompat.getColor(context!!, R.color.mp_primary) + boxPaint.strokeWidth = 8F + boxPaint.style = Paint.Style.STROKE + } + + override fun draw(canvas: Canvas) { + super.draw(canvas) + + results?.let { + for (detection in it.detections()) { + val boundingBox = detection.boundingBox() + + val top = boundingBox.top * scaleFactor + val bottom = boundingBox.bottom * scaleFactor + val left = boundingBox.left * scaleFactor + val right = boundingBox.right * scaleFactor + + // Draw bounding box around detected objects + val drawableRect = RectF(left, top, right, bottom) + canvas.drawRect(drawableRect, boxPaint) + + // Create text to display alongside detected objects + val drawableText = + detection.categories()[0].categoryName() + + " " + + String.format( + "%.2f", + detection.categories()[0].score() + ) + + // Draw rect behind display text + textBackgroundPaint.getTextBounds( + drawableText, + 0, + drawableText.length, + bounds + ) + val textWidth = bounds.width() + val textHeight = bounds.height() + canvas.drawRect( + left, + top, + left + textWidth + Companion.BOUNDING_RECT_TEXT_PADDING, + top + textHeight + Companion.BOUNDING_RECT_TEXT_PADDING, + textBackgroundPaint + ) + + // Draw text for detected object + canvas.drawText( + drawableText, + left, + top + bounds.height(), + textPaint + ) + } + } + } + + fun setResults( + detectionResults: ObjectDetectionResult, + imageHeight: Int, + imageWidth: Int, + ) { + results = detectionResults + + // Images, videos and camera live streams are displayed in FIT_START mode. So we need to scale + // up the bounding box to match with the size that the images/videos/live streams being + // displayed. + scaleFactor = min(width * 1f / imageWidth, height * 1f / imageHeight) + + invalidate() + } + + companion object { + private const val BOUNDING_RECT_TEXT_PADDING = 8 + } +} diff --git a/android/vision/app/src/main/java/labgraph_vision/objectdetection/fragments/GalleryFragment.kt b/android/vision/app/src/main/java/labgraph_vision/objectdetection/fragments/GalleryFragment.kt new file mode 100644 index 00000000..f9e6bd7b --- /dev/null +++ b/android/vision/app/src/main/java/labgraph_vision/objectdetection/fragments/GalleryFragment.kt @@ -0,0 +1,353 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package labgraph_vision.objectdetection.fragments + +import android.net.Uri +import android.os.Bundle +import android.os.SystemClock +import android.util.Log +import android.view.LayoutInflater +import android.view.View +import android.view.ViewGroup +import android.widget.AdapterView +import android.widget.Toast +import androidx.activity.result.contract.ActivityResultContracts +import androidx.fragment.app.Fragment +import androidx.fragment.app.activityViewModels +import labgraph_vision.objectdetection.MainViewModel +import labgraph_vision.objectdetection.ObjectDetectorHelper +import labgraph_vision.objectdetection.databinding.FragmentGalleryBinding +import com.google.mediapipe.tasks.vision.core.RunningMode +import java.util.concurrent.Executors +import java.util.concurrent.ScheduledExecutorService +import java.util.concurrent.TimeUnit + +class GalleryFragment : Fragment(), ObjectDetectorHelper.DetectorListener { + + enum class MediaType { + IMAGE, + VIDEO, + UNKNOWN + } + + private var _fragmentGalleryBinding: FragmentGalleryBinding? = null + private val fragmentGalleryBinding + get() = _fragmentGalleryBinding!! + private lateinit var objectDetectorHelper: ObjectDetectorHelper + + /** Blocking ML operations are performed using this executor */ + private lateinit var backgroundExecutor: ScheduledExecutorService + private val viewModel: MainViewModel by activityViewModels() + + private val getContent = + registerForActivityResult(ActivityResultContracts.OpenDocument()) { uri: Uri? -> + // Handle the returned Uri + uri?.let { mediaUri -> + when (val mediaType = loadMediaType(mediaUri)) { + MediaType.VIDEO -> runDetectionOnVideo(mediaUri) + MediaType.UNKNOWN -> { + updateDisplayView(mediaType) + Toast.makeText( + requireContext(), + "Unsupported data type.", + Toast.LENGTH_SHORT + ).show() + } + } + } + } + + override fun onCreateView( + inflater: LayoutInflater, + container: ViewGroup?, + savedInstanceState: Bundle? + ): View { + _fragmentGalleryBinding = + FragmentGalleryBinding.inflate(inflater, container, false) + + return fragmentGalleryBinding.root + } + + override fun onViewCreated(view: View, savedInstanceState: Bundle?) { + super.onViewCreated(view, savedInstanceState) + fragmentGalleryBinding.fabGetContent.setOnClickListener { + getContent.launch(arrayOf("image/*", "video/*")) + } + + initBottomSheetControls() + } + + override fun onPause() { + fragmentGalleryBinding.overlay.clear() + if (fragmentGalleryBinding.videoView.isPlaying) { + fragmentGalleryBinding.videoView.stopPlayback() + } + fragmentGalleryBinding.videoView.visibility = View.GONE + super.onPause() + } + + private fun initBottomSheetControls() { + updateControlsUi() + // When clicked, lower detection score threshold floor + fragmentGalleryBinding.bottomSheetLayout.thresholdMinus.setOnClickListener { + if (viewModel.currentThreshold >= 0.1) { + viewModel.setThreshold(viewModel.currentThreshold - 0.1f) + updateControlsUi() + } + } + + // When clicked, raise detection score threshold floor + fragmentGalleryBinding.bottomSheetLayout.thresholdPlus.setOnClickListener { + if (viewModel.currentThreshold <= 0.8) { + viewModel.setThreshold(viewModel.currentThreshold + 0.1f) + updateControlsUi() + } + } + + // When clicked, reduce the number of objects that can be detected at a time + fragmentGalleryBinding.bottomSheetLayout.maxResultsMinus.setOnClickListener { + if (viewModel.currentMaxResults > 1) { + viewModel.setMaxResults(viewModel.currentMaxResults - 1) + updateControlsUi() + } + } + + // When clicked, increase the number of objects that can be detected at a time + fragmentGalleryBinding.bottomSheetLayout.maxResultsPlus.setOnClickListener { + if (viewModel.currentMaxResults < 5) { + viewModel.setMaxResults(viewModel.currentMaxResults + 1) + updateControlsUi() + } + } + + // When clicked, change the underlying hardware used for inference. Current options are CPU + // GPU, and NNAPI + fragmentGalleryBinding.bottomSheetLayout.spinnerDelegate.setSelection( + viewModel.currentDelegate, + false + ) + fragmentGalleryBinding.bottomSheetLayout.spinnerDelegate.onItemSelectedListener = + object : AdapterView.OnItemSelectedListener { + override fun onItemSelected( + p0: AdapterView<*>?, + p1: View?, + p2: Int, + p3: Long + ) { + + viewModel.setDelegate(p2) + updateControlsUi() + } + + override fun onNothingSelected(p0: AdapterView<*>?) { + /* no op */ + } + } + + // When clicked, change the underlying model used for object detection + fragmentGalleryBinding.bottomSheetLayout.spinnerModel.setSelection( + viewModel.currentModel, + false + ) + fragmentGalleryBinding.bottomSheetLayout.spinnerModel.onItemSelectedListener = + object : AdapterView.OnItemSelectedListener { + override fun onItemSelected( + p0: AdapterView<*>?, + p1: View?, + p2: Int, + p3: Long + ) { + viewModel.setModel(p2) + updateControlsUi() + } + + override fun onNothingSelected(p0: AdapterView<*>?) { + /* no op */ + } + } + } + + // Update the values displayed in the bottom sheet. Reset detector. + private fun updateControlsUi() { + if (fragmentGalleryBinding.videoView.isPlaying) { + fragmentGalleryBinding.videoView.stopPlayback() + } + fragmentGalleryBinding.videoView.visibility = View.GONE + fragmentGalleryBinding.imageResult.visibility = View.GONE + fragmentGalleryBinding.overlay.clear() + fragmentGalleryBinding.bottomSheetLayout.maxResultsValue.text = + viewModel.currentMaxResults.toString() + fragmentGalleryBinding.bottomSheetLayout.thresholdValue.text = + String.format("%.2f", viewModel.currentThreshold) + + fragmentGalleryBinding.overlay.clear() + fragmentGalleryBinding.tvPlaceholder.visibility = View.VISIBLE + } + + private fun runDetectionOnVideo(uri: Uri) { + setUiEnabled(false) + updateDisplayView(MediaType.VIDEO) + + with(fragmentGalleryBinding.videoView) { + setVideoURI(uri) + // mute the audio + setOnPreparedListener { it.setVolume(0f, 0f) } + requestFocus() + } + + backgroundExecutor = Executors.newSingleThreadScheduledExecutor() + backgroundExecutor.execute { + + objectDetectorHelper = + ObjectDetectorHelper( + context = requireContext(), + threshold = viewModel.currentThreshold, + currentDelegate = viewModel.currentDelegate, + currentModel = viewModel.currentModel, + maxResults = viewModel.currentMaxResults, + runningMode = RunningMode.VIDEO, + objectDetectorListener = this + ) + + activity?.runOnUiThread { + fragmentGalleryBinding.videoView.visibility = View.GONE + fragmentGalleryBinding.progress.visibility = View.VISIBLE + } + + objectDetectorHelper.detectVideoFile(uri, VIDEO_INTERVAL_MS) + ?.let { resultBundle -> + activity?.runOnUiThread { displayVideoResult(resultBundle) } + } + ?: run { + activity?.runOnUiThread { + fragmentGalleryBinding.progress.visibility = View.GONE + } + Log.e(TAG, "Error running object detection.") + } + + objectDetectorHelper.clearObjectDetector() + } + } + + // Setup and display the video. + private fun displayVideoResult(result: ObjectDetectorHelper.ResultBundle) { + + fragmentGalleryBinding.videoView.visibility = View.VISIBLE + fragmentGalleryBinding.progress.visibility = View.GONE + + fragmentGalleryBinding.videoView.start() + val videoStartTimeMs = SystemClock.uptimeMillis() + + backgroundExecutor.scheduleAtFixedRate( + { + activity?.runOnUiThread { + val videoElapsedTimeMs = + SystemClock.uptimeMillis() - videoStartTimeMs + val resultIndex = + videoElapsedTimeMs.div(VIDEO_INTERVAL_MS).toInt() + + if (resultIndex >= result.results.size || fragmentGalleryBinding.videoView.visibility == View.GONE) { + // The video playback has finished so we stop drawing bounding boxes + backgroundExecutor.shutdown() + } else { + fragmentGalleryBinding.overlay.setResults( + result.results[resultIndex], + result.inputImageHeight, + result.inputImageWidth + ) + + setUiEnabled(true) + + fragmentGalleryBinding.bottomSheetLayout.inferenceTimeVal.text = + String.format("%d ms", result.inferenceTime) + } + } + }, + 0, + VIDEO_INTERVAL_MS, + TimeUnit.MILLISECONDS + ) + } + + private fun updateDisplayView(mediaType: MediaType) { + fragmentGalleryBinding.overlay.clear() + fragmentGalleryBinding.imageResult.visibility = + if (mediaType == MediaType.IMAGE) View.VISIBLE else View.GONE + fragmentGalleryBinding.videoView.visibility = + if (mediaType == MediaType.VIDEO) View.VISIBLE else View.GONE + fragmentGalleryBinding.tvPlaceholder.visibility = + if (mediaType == MediaType.UNKNOWN) View.VISIBLE else View.GONE + } + + // Check the type of media that user selected. + private fun loadMediaType(uri: Uri): MediaType { + val mimeType = context?.contentResolver?.getType(uri) + mimeType?.let { + if (mimeType.startsWith("image")) return MediaType.IMAGE + if (mimeType.startsWith("video")) return MediaType.VIDEO + } + + return MediaType.UNKNOWN + } + + private fun setUiEnabled(enabled: Boolean) { + fragmentGalleryBinding.fabGetContent.isEnabled = enabled + fragmentGalleryBinding.bottomSheetLayout.spinnerModel.isEnabled = + enabled + fragmentGalleryBinding.bottomSheetLayout.thresholdMinus.isEnabled = + enabled + fragmentGalleryBinding.bottomSheetLayout.thresholdPlus.isEnabled = + enabled + fragmentGalleryBinding.bottomSheetLayout.maxResultsMinus.isEnabled = + enabled + fragmentGalleryBinding.bottomSheetLayout.maxResultsPlus.isEnabled = + enabled + fragmentGalleryBinding.bottomSheetLayout.spinnerDelegate.isEnabled = + enabled + } + + private fun detectError() { + activity?.runOnUiThread { + fragmentGalleryBinding.progress.visibility = View.GONE + setUiEnabled(true) + updateDisplayView(MediaType.UNKNOWN) + } + } + + override fun onError(error: String, errorCode: Int) { + detectError() + activity?.runOnUiThread { + Toast.makeText(requireContext(), error, Toast.LENGTH_SHORT).show() + if (errorCode == ObjectDetectorHelper.GPU_ERROR) { + fragmentGalleryBinding.bottomSheetLayout.spinnerDelegate.setSelection( + ObjectDetectorHelper.DELEGATE_CPU, false + ) + } + } + } + + override fun onResults(resultBundle: ObjectDetectorHelper.ResultBundle) { + // no-op + } + + companion object { + private const val TAG = "GalleryFragment" + + // Value used to get frames at specific intervals for inference (e.g. every 300ms) + private const val VIDEO_INTERVAL_MS = 300L + } +} diff --git a/android/vision/app/src/main/res/color/bg_nav_item.xml b/android/vision/app/src/main/res/color/bg_nav_item.xml new file mode 100644 index 00000000..274accf0 --- /dev/null +++ b/android/vision/app/src/main/res/color/bg_nav_item.xml @@ -0,0 +1,20 @@ + + + + + + diff --git a/android/vision/app/src/main/res/color/selector_ic.xml b/android/vision/app/src/main/res/color/selector_ic.xml new file mode 100644 index 00000000..6292281a --- /dev/null +++ b/android/vision/app/src/main/res/color/selector_ic.xml @@ -0,0 +1,21 @@ + + + + + + + diff --git a/android/vision/app/src/main/res/drawable/ic_baseline_add_24.xml b/android/vision/app/src/main/res/drawable/ic_baseline_add_24.xml new file mode 100644 index 00000000..70046c48 --- /dev/null +++ b/android/vision/app/src/main/res/drawable/ic_baseline_add_24.xml @@ -0,0 +1,5 @@ + + + diff --git a/android/vision/app/src/main/res/drawable/ic_baseline_photo_camera_24.xml b/android/vision/app/src/main/res/drawable/ic_baseline_photo_camera_24.xml new file mode 100644 index 00000000..43a4fd5f --- /dev/null +++ b/android/vision/app/src/main/res/drawable/ic_baseline_photo_camera_24.xml @@ -0,0 +1,6 @@ + + + + diff --git a/android/vision/app/src/main/res/drawable/ic_baseline_photo_library_24.xml b/android/vision/app/src/main/res/drawable/ic_baseline_photo_library_24.xml new file mode 100644 index 00000000..caacbefe --- /dev/null +++ b/android/vision/app/src/main/res/drawable/ic_baseline_photo_library_24.xml @@ -0,0 +1,5 @@ + + + diff --git a/android/vision/app/src/main/res/drawable/ic_launcher_foreground.xml b/android/vision/app/src/main/res/drawable/ic_launcher_foreground.xml new file mode 100644 index 00000000..16fbe61f --- /dev/null +++ b/android/vision/app/src/main/res/drawable/ic_launcher_foreground.xml @@ -0,0 +1,41 @@ + + + + + + + + + + + + diff --git a/android/vision/app/src/main/res/drawable/ic_minus.xml b/android/vision/app/src/main/res/drawable/ic_minus.xml new file mode 100644 index 00000000..a64b853e --- /dev/null +++ b/android/vision/app/src/main/res/drawable/ic_minus.xml @@ -0,0 +1,9 @@ + + + diff --git a/android/vision/app/src/main/res/drawable/ic_plus.xml b/android/vision/app/src/main/res/drawable/ic_plus.xml new file mode 100644 index 00000000..5e95f206 --- /dev/null +++ b/android/vision/app/src/main/res/drawable/ic_plus.xml @@ -0,0 +1,24 @@ + + + + diff --git a/android/vision/app/src/main/res/drawable/icn_chevron_up.png b/android/vision/app/src/main/res/drawable/icn_chevron_up.png new file mode 100644 index 00000000..67e1975a Binary files /dev/null and b/android/vision/app/src/main/res/drawable/icn_chevron_up.png differ diff --git a/android/vision/app/src/main/res/drawable/labgraph_vision.xml b/android/vision/app/src/main/res/drawable/labgraph_vision.xml new file mode 100644 index 00000000..d4752b13 --- /dev/null +++ b/android/vision/app/src/main/res/drawable/labgraph_vision.xml @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/android/vision/app/src/main/res/layout/activity_main.xml b/android/vision/app/src/main/res/layout/activity_main.xml new file mode 100644 index 00000000..d79e6ca5 --- /dev/null +++ b/android/vision/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/android/vision/app/src/main/res/layout/fragment_gallery.xml b/android/vision/app/src/main/res/layout/fragment_gallery.xml new file mode 100644 index 00000000..c88d0bc0 --- /dev/null +++ b/android/vision/app/src/main/res/layout/fragment_gallery.xml @@ -0,0 +1,89 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/android/vision/app/src/main/res/layout/info_bottom_sheet.xml b/android/vision/app/src/main/res/layout/info_bottom_sheet.xml new file mode 100644 index 00000000..2c924cb6 --- /dev/null +++ b/android/vision/app/src/main/res/layout/info_bottom_sheet.xml @@ -0,0 +1,220 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/android/vision/app/src/main/res/mipmap-hdpi/ic_launcher.png b/android/vision/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 00000000..2d2fd07d Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-hdpi/ic_launcher.png differ diff --git a/android/vision/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/android/vision/app/src/main/res/mipmap-hdpi/ic_launcher_round.png new file mode 100644 index 00000000..50f1d73b Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ diff --git a/android/vision/app/src/main/res/mipmap-mdpi/ic_launcher.png b/android/vision/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 00000000..afbe3ddf Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/android/vision/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/android/vision/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 00000000..7cdb3cd7 Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/android/vision/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/android/vision/app/src/main/res/mipmap-xhdpi/ic_launcher.png new file mode 100644 index 00000000..c8c00e92 Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ diff --git a/android/vision/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/android/vision/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png new file mode 100644 index 00000000..f5bcd8c9 Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ diff --git a/android/vision/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/android/vision/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 00000000..fab12be5 Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ diff --git a/android/vision/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/android/vision/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png new file mode 100644 index 00000000..49905959 Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ diff --git a/android/vision/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/android/vision/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 00000000..c834177f Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ diff --git a/android/vision/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/android/vision/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png new file mode 100644 index 00000000..6bffe6ad Binary files /dev/null and b/android/vision/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ diff --git a/android/vision/app/src/main/res/navigation/nav_graph.xml b/android/vision/app/src/main/res/navigation/nav_graph.xml new file mode 100644 index 00000000..02326e52 --- /dev/null +++ b/android/vision/app/src/main/res/navigation/nav_graph.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/android/vision/app/src/main/res/values/colors.xml b/android/vision/app/src/main/res/values/colors.xml new file mode 100644 index 00000000..fec38786 --- /dev/null +++ b/android/vision/app/src/main/res/values/colors.xml @@ -0,0 +1,27 @@ + + + + #007F8B + #12B5CB + #EEEEEE + #FFFFFF + #EEEEEE + @android:color/black + #FFFFFFFF + #DDFFFFFF + #AAFFFFFF + diff --git a/android/vision/app/src/main/res/values/dimens.xml b/android/vision/app/src/main/res/values/dimens.xml new file mode 100644 index 00000000..9eb81dd3 --- /dev/null +++ b/android/vision/app/src/main/res/values/dimens.xml @@ -0,0 +1,34 @@ + + + + + 4dp + 64dp + + + 20sp + 16dp + 50dp + 16dp + 48dp + 10dp + 160dp + 240dp + 3 + + 16dp + \ No newline at end of file diff --git a/android/vision/app/src/main/res/values/strings.xml b/android/vision/app/src/main/res/values/strings.xml new file mode 100644 index 00000000..7ccffe00 --- /dev/null +++ b/android/vision/app/src/main/res/values/strings.xml @@ -0,0 +1,51 @@ + + + + LabGraph Vision + + Bottom sheet expandable indicator + + Decreasing maximum detected results + button + Increasing maximum detected results + button + Decreasing threshold of detected object + results button + Increasing threshold of detected object + results button + + Inference Time + Frames per Second + Threshold + Max Results + Delegate + ML Model + Camera + Gallery + Click + to add a video + to begin object detection. + + + CPU + GPU + + + + EfficientDet Lite0 + EfficientDet Lite2 + + diff --git a/android/vision/app/src/main/res/values/styles.xml b/android/vision/app/src/main/res/values/styles.xml new file mode 100644 index 00000000..0e41c833 --- /dev/null +++ b/android/vision/app/src/main/res/values/styles.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + diff --git a/android/vision/build.gradle b/android/vision/build.gradle new file mode 100644 index 00000000..1d72fb3e --- /dev/null +++ b/android/vision/build.gradle @@ -0,0 +1,48 @@ +/* + * Copyright 2022 The TensorFlow Authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Top-level build file where you can add configuration options common to all sub-projects/modules. +buildscript { + // Top-level variables used for versioning + ext.kotlin_version = '1.5.21' + ext.java_version = JavaVersion.VERSION_1_8 + + repositories { + google() + mavenCentral() + } + dependencies { + classpath 'com.android.tools.build:gradle:7.1.2' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + classpath 'androidx.navigation:navigation-safe-args-gradle-plugin:2.4.1' + classpath 'de.undercouch:gradle-download-task:4.1.2' + + + // NOTE: Do not place your application dependencies here; they belong + // in the individual module build.gradle files + } +} + +allprojects { + repositories { + google() + mavenCentral() + } +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/android/vision/gradle.properties b/android/vision/gradle.properties new file mode 100644 index 00000000..777b6aad --- /dev/null +++ b/android/vision/gradle.properties @@ -0,0 +1,3 @@ +org.gradle.jvmargs=-Xmx1536m +android.enableJetifier=true +android.useAndroidX=true \ No newline at end of file diff --git a/android/vision/gradle/wrapper/gradle-wrapper.jar b/android/vision/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000..41d9927a Binary files /dev/null and b/android/vision/gradle/wrapper/gradle-wrapper.jar differ diff --git a/android/vision/gradle/wrapper/gradle-wrapper.properties b/android/vision/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..41dfb879 --- /dev/null +++ b/android/vision/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/android/vision/gradlew b/android/vision/gradlew new file mode 100644 index 00000000..1b6c7873 --- /dev/null +++ b/android/vision/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/android/vision/gradlew.bat b/android/vision/gradlew.bat new file mode 100644 index 00000000..107acd32 --- /dev/null +++ b/android/vision/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/android/vision/local.properties b/android/vision/local.properties new file mode 100644 index 00000000..1ea42506 --- /dev/null +++ b/android/vision/local.properties @@ -0,0 +1,8 @@ +## This file must *NOT* be checked into Version Control Systems, +# as it contains information specific to your local configuration. +# +# Location of the SDK. This is only used by Gradle. +# For customization when using a Version Control System, please read the +# header note. +#Tue Aug 01 00:25:21 CDT 2023 +sdk.dir=C\:\\Users\\kritg\\AppData\\Local\\Android\\Sdk diff --git a/android/vision/settings.gradle b/android/vision/settings.gradle new file mode 100644 index 00000000..9d495b34 --- /dev/null +++ b/android/vision/settings.gradle @@ -0,0 +1 @@ +include ':app' \ No newline at end of file