YOLOv5 on Android – Coding

Android

This article explains how to use YOLOv5 to detect objects in Android camera streams in two separate articles. The previous article explained the settings. This article explains the code.

MainActivity.kt

Modify MainActivity.kt as follows:

package com.example.android_ncnn import android.Manifest import android.content.pm.PackageManager import android.graphics.* import android.os.Build import android.os.Bundle import android.util.Log import android.widget.Toast import androidx.appcompat.app.AppCompatActivity import androidx.camera.core.CameraSelector import androidx.camera.core.ImageAnalysis import androidx.camera.core.ImageProxy import androidx.camera.lifecycle.ProcessCameraProvider import androidx.core.app.ActivityCompat import androidx.core.content.ContextCompat import com.example.android_ncnn.databinding.ActivityMainBinding import com.tencent.yolov5ncnn.YoloV5Ncnn import java.nio.ByteBuffer import java.util.concurrent.ExecutorService import java.util.concurrent.Executors typealias Yolov5NcnnListener = (objects: Array<YoloV5Ncnn.Obj>?, bitmap: Bitmap?) -> Unit class MainActivity : AppCompatActivity() { private lateinit var viewBinding: ActivityMainBinding private lateinit var cameraExecutor: ExecutorService private var yolov5ncnn = YoloV5Ncnn() private class Yolov5NcnnAnalyzer( private val yolov5ncnn: YoloV5Ncnn, private val listener: Yolov5NcnnListener ) : ImageAnalysis.Analyzer { override fun analyze(image: ImageProxy) { val planes = image.planes val buffer: ByteBuffer = planes[0].buffer val mat = Matrix().apply { postRotate(image.imageInfo.rotationDegrees.toFloat()) } val bitmap = Bitmap.createBitmap( image.width, image.height, Bitmap.Config.ARGB_8888 ).apply { copyPixelsFromBuffer(buffer) }.let { Bitmap.createBitmap(it, 0, 0, it.width, it.height, mat, false) } val objects: Array<YoloV5Ncnn.Obj>? = yolov5ncnn.Detect(bitmap, true) ?: yolov5ncnn.Detect(bitmap, false) listener(objects, bitmap) image.close() } } override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) viewBinding = ActivityMainBinding.inflate(layoutInflater) setContentView(viewBinding.root) yolov5ncnn.Init(assets) // Request camera permissions if (allPermissionsGranted()) { startCamera() } else { ActivityCompat.requestPermissions( this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS ) } cameraExecutor = Executors.newSingleThreadExecutor() } override fun onRequestPermissionsResult( requestCode: Int, permissions: Array<String>, grantResults: IntArray ) { super.onRequestPermissionsResult(requestCode, permissions, grantResults) if (requestCode == REQUEST_CODE_PERMISSIONS) { if (allPermissionsGranted()) { startCamera() } else { Toast.makeText( this, "Permissions not granted by the user.", Toast.LENGTH_SHORT ).show() finish() } } } private fun startCamera() { val cameraProviderFuture = ProcessCameraProvider.getInstance(this) cameraProviderFuture.addListener({ // Used to bind the lifecycle of cameras to the lifecycle owner val cameraProvider: ProcessCameraProvider = cameraProviderFuture.get() val imageAnalyzer = ImageAnalysis.Builder() .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST) .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888) .build() .apply { setAnalyzer( cameraExecutor, Yolov5NcnnAnalyzer(yolov5ncnn) { objects: Array<Obj>?, bitmap: Bitmap? -> showObjects(objects, bitmap) }, ) } // Select back camera as a default val cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA try { // Unbind use cases before rebinding cameraProvider.unbindAll() // Bind use cases to camera cameraProvider.bindToLifecycle(this, cameraSelector, imageAnalyzer) } catch (exc: Exception) { Log.e(TAG, "Use case binding failed", exc) } }, ContextCompat.getMainExecutor(this)) } private fun showObjects(objects: Array<YoloV5Ncnn.Obj>?, bitmap: Bitmap?) { if (objects == null || bitmap == null) { //viewBinding.imageView.setImageBitmap(bitmap) return } // draw objects on bitmap val rgba: Bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true) val colors = intArrayOf( Color.rgb(54, 67, 244), Color.rgb(99, 30, 233), Color.rgb(176, 39, 156), Color.rgb(183, 58, 103), Color.rgb(181, 81, 63), Color.rgb(243, 150, 33), Color.rgb(244, 169, 3), Color.rgb(212, 188, 0), Color.rgb(136, 150, 0), Color.rgb(80, 175, 76), Color.rgb(74, 195, 139), Color.rgb(57, 220, 205), Color.rgb(59, 235, 255), Color.rgb(7, 193, 255), Color.rgb(0, 152, 255), Color.rgb(34, 87, 255), Color.rgb(72, 85, 121), Color.rgb(158, 158, 158), Color.rgb(139, 125, 96) ) val canvas = Canvas(rgba) val paint = Paint().apply { style = Paint.Style.STROKE strokeWidth = 4f } val textbgpaint = Paint().apply { color = Color.WHITE style = Paint.Style.FILL } val textpaint = Paint().apply { color = Color.BLACK textSize = 26f textAlign = Paint.Align.LEFT } objects.indices.forEach { i -> paint.color = colors[i % 19] canvas.drawRect( objects[i].x, objects[i].y, objects[i].x + objects[i].w, objects[i].y + objects[i].h, paint ) // draw filled text inside image run { val text = objects[i].label + " = " + String.format( "%.1f", objects[i].prob * 100 ) + "%" val text_width = textpaint.measureText(text) val text_height = -textpaint.ascent() + textpaint.descent() var x = objects[i].x var y = objects[i].y - text_height if (y < 0) y = 0f if (x + text_width > rgba.width) x = rgba.width - text_width canvas.drawRect(x, y, x + text_width, y + text_height, textbgpaint) canvas.drawText(text, x, y - textpaint.ascent(), textpaint) } } runOnUiThread { viewBinding.imageView.setImageBitmap(rgba) } } private fun allPermissionsGranted() = REQUIRED_PERMISSIONS.all { ContextCompat.checkSelfPermission( baseContext, it ) == PackageManager.PERMISSION_GRANTED } override fun onDestroy() { super.onDestroy() cameraExecutor.shutdown() } companion object { private const val TAG = "android_ncnn" private const val FILENAME_FORMAT = "yyyy-MM-dd-HH-mm-ss-SSS" private const val REQUEST_CODE_PERMISSIONS = 10 private val REQUIRED_PERMISSIONS = mutableListOf( Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO ).apply { if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.P) { add(Manifest.permission.WRITE_EXTERNAL_STORAGE) } }.toTypedArray() } }
Code language: Kotlin (kotlin)

As a main MainActivity class block,

  • onCreate function
  • startCamera function
  • analyze function in Yolov5NcnnAnalyzer class
  • showObjects class

These are explained below.

onCreate

onCreate function is the first callback function to be called in the Activity class.

Call yolov5ncnn.Init(assets) to initialise yolov5. The assets given as arguments correspond to the assets directory copied in the previous step and reads assets/yolov5s.param and assets/yolov5s.bin.

Call startCamera() once the necessary permissions to start the camera have been granted.

override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) viewBinding = ActivityMainBinding.inflate(layoutInflater) setContentView(viewBinding.root) yolov5ncnn.Init(assets) // Request camera permissions if (allPermissionsGranted()) { startCamera() } else { ActivityCompat.requestPermissions( this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS ) } cameraExecutor = Executors.newSingleThreadExecutor() }
Code language: Kotlin (kotlin)

startCamera

startCamera uses the image analysis functionality of the CameraX library. This function allows images to be analysed without having to save them to a file.

By specifying setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888) as a setting in ImageAnalysis.Builder(), the handling of images used in the analyse() function can be simplified.

The Yolov5NcnnAnalyzer instance is passed as an argument to the setAnalyzer function; Kotlin code recommends that if the final argument of a function is a function object, it should be written outside round brackets (=()) and the call to Yolov5NcnnAnalyzer, equivalent to the following statement.

Yolov5NcnnAnalyzer(yolov5ncnn, { objects: Array<Obj>?, bitmap: Bitmap? -> showObjects(objects, bitmap) }),
Code language: CSS (css)

analyze

The inner class Yolov5NcnnAnalyzer inherits from ImageAnalysis.Analyzer and registers the created instance with the setAnalyzer function in the startCamera function. By doing this, the analyze function is called each time an image from the camera is ready. The analyze function takes a variable of type ImageProxy as an argument, but sets ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888 as the image format so that Bitmaps can be created easily.

You can get the number of degrees the image needs to be rotated with image.imageInfo.rotationDegrees. This value is used to rotate the image when creating the Bitmap.

yolov5ncnn.Detect function, the second argument is a flag indicating whether the gpu is used or not. If false is returned, the flag is set to false and the function is called again with the setting of no gpu.

val objects: Array<YoloV5Ncnn.Obj>? = yolov5ncnn.Detect(bitmap, true) ?: yolov5ncnn.Detect(bitmap, false)
Code language: JavaScript (javascript)

showObjects

This showObjects is copied from https://github.com/nihui/ncnn-android-yolov5. However, this function is called from the analyse callback, so use runOnUiThread to update the screen.

All of the codes are in https://github.com/otamajakusi/ncnn-mobile/tree/main/android

Thank you for reading! If you found this article valuable and would like to support it, consider becoming a sponsor through GitHub Sponsors. Your support will help me continue to produce high-quality articles like this one. Every little bit truly helps and is greatly appreciated. Thank you in advance for considering to sponsor my work.

References

GitHub - nihui/ncnn-android-yolov5: The YOLOv5 object detection android example
The YOLOv5 object detection android example . Contribute to nihui/ncnn-android-yolov5 development by creating an account on GitHub.
CameraX の概要  |  Android デベロッパー  |  Android Developers
Getting Started with CameraX  |  Android Developers
This codelab introduces how to create a camera app that uses CameraX to show a viewfinder, take photos and analyze an image stream from the camera.
ncnn-mobile/android at main · otamajakusi/ncnn-mobile
ncnn for Android and iOS sample. Contribute to otamajakusi/ncnn-mobile development by creating an account on GitHub.