I build an app with multiple fragments and single activity. I have a detection fragment
private val PERMISSIONS_REQUIRED = arrayOf(Manifest.permission.CAMERA)
class DetectionFragment : Fragment(), ObjectDetectorHelper.DetectorListener {
private var _binding: FragmentDetectionBinding? = null
private val binding get() = _binding!!
private val TAG = "ObjectDetection"
private lateinit var objectDetectorHelper: ObjectDetectorHelper
private lateinit var bitmapBuffer: Bitmap
private var preview: Preview? = null
private var imageAnalyzer: ImageAnalysis? = null
private var camera: Camera? = null
private var cameraProvider: ProcessCameraProvider? = null
/** Blocking camera operations are performed using this executor */
private lateinit var cameraExecutor: ExecutorService
private val requestPermissionLauncher =
registerForActivityResult(
ActivityResultContracts.RequestPermission()
) { ...
}
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
// Inflate the layout for this fragment
_binding = FragmentDetectionBinding.inflate(layoutInflater, container, false)
return binding.root
}
override fun onDestroyView() {
_binding = null
super.onDestroyView()
// Shut down our background executor
cameraExecutor.shutdown()
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
onBack()
when {
//checkpermission thing
}
objectDetectorHelper = ObjectDetectorHelper(
context = requireContext(),
objectDetectorListener = this)
// Initialize our background executor
cameraExecutor = Executors.newSingleThreadExecutor()
// Wait for the views to be properly laid out
binding.viewFinder.post {
// Set up the camera and its use cases
setUpCamera()
}
// Attach listeners to UI control widgets
initBottomSheetControls()
}
private fun initBottomSheetControls(){
// When clicked, lower detection score threshold floor
binding.bottomSheetLayout.thresholdMinus.setOnClickListener {
if (objectDetectorHelper.threshold >= 0.1) {
objectDetectorHelper.threshold -= 0.1f
updateControlsUi()
}
}
// When clicked, raise detection score threshold floor
binding.bottomSheetLayout.thresholdPlus.setOnClickListener {
if (objectDetectorHelper.threshold <= 0.8) {
objectDetectorHelper.threshold += 0.1f
updateControlsUi()
}
}
// When clicked, reduce the number of objects that can be detected at a time
binding.bottomSheetLayout.maxResultsMinus.setOnClickListener {
if (objectDetectorHelper.maxResults > 1) {
objectDetectorHelper.maxResults--
updateControlsUi()
}
}
// When clicked, increase the number of objects that can be detected at a time
binding.bottomSheetLayout.maxResultsPlus.setOnClickListener {
if (objectDetectorHelper.maxResults < 5) {
objectDetectorHelper.maxResults++
updateControlsUi()
}
}
// When clicked, decrease the number of threads used for detection
binding.bottomSheetLayout.threadsMinus.setOnClickListener {
if (objectDetectorHelper.numThreads > 1) {
objectDetectorHelper.numThreads--
updateControlsUi()
}
}
// When clicked, increase the number of threads used for detection
binding.bottomSheetLayout.threadsPlus.setOnClickListener {
if (objectDetectorHelper.numThreads < 4) {
objectDetectorHelper.numThreads++
updateControlsUi()
}
}
// When clicked, change the underlying hardware used for inference. Current options are CPU
// GPU, and NNAPI
binding.bottomSheetLayout.spinnerDelegate.setSelection(0, false)
binding.bottomSheetLayout.spinnerDelegate.onItemSelectedListener =
object : AdapterView.OnItemSelectedListener {
override fun onItemSelected(p0: AdapterView<*>?, p1: View?, p2: Int, p3: Long) {
objectDetectorHelper.currentDelegate = p2
updateControlsUi()
}
override fun onNothingSelected(p0: AdapterView<*>?) {
/* no op */
}
}
// When clicked, change the underlying model used for object detection
binding.bottomSheetLayout.spinnerModel.setSelection(0, false)
binding.bottomSheetLayout.spinnerModel.onItemSelectedListener =
object : AdapterView.OnItemSelectedListener {
override fun onItemSelected(p0: AdapterView<*>?, p1: View?, p2: Int, p3: Long) {
objectDetectorHelper.currentModel = p2
updateControlsUi()
}
override fun onNothingSelected(p0: AdapterView<*>?) {
/* no op */
}
}
}
private fun updateControlsUi() {
binding.bottomSheetLayout.maxResultsValue.text =
objectDetectorHelper.maxResults.toString()
binding.bottomSheetLayout.thresholdValue.text =
String.format("%.2f", objectDetectorHelper.threshold)
binding.bottomSheetLayout.threadsValue.text =
objectDetectorHelper.numThreads.toString()
// Needs to be cleared instead of reinitialized because the GPU
// delegate needs to be initialized on the thread using it when applicable
objectDetectorHelper.clearObjectDetector()
binding.overlay.clear()
}
// Initialize CameraX, and prepare to bind the camera use cases
private fun setUpCamera() {
val cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext())
cameraProviderFuture.addListener(
{
// CameraProvider
cameraProvider = cameraProviderFuture.get()
// Build and bind the camera use cases
bindCameraUseCases()
},
ContextCompat.getMainExecutor(requireContext())
)
}
@Suppress("DEPRECATION")
private fun bindCameraUseCases() {
// CameraProvider
val cameraProvider =
cameraProvider ?: throw IllegalStateException("Camera initialization failed.")
// CameraSelector - makes assumption that we're only using the back camera
val cameraSelector =
CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build()
// Preview. Only using the 4:3 ratio because this is the closest to our models
preview =
Preview.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetRotation(binding.viewFinder.display.rotation)
.build()
// ImageAnalysis. Using RGBA 8888 to match how our models work
imageAnalyzer =
ImageAnalysis.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_4_3)
.setTargetRotation(binding.viewFinder.display.rotation)
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
.build()
// The analyzer can then be assigned to the instance
.also {
it.setAnalyzer(cameraExecutor) { image ->
if (!::bitmapBuffer.isInitialized) {
// The image rotation and RGB image buffer are initialized only once
// the analyzer has started running
bitmapBuffer = Bitmap.createBitmap(
image.width,
image.height,
Bitmap.Config.ARGB_8888
)
}
detectObjects(image)
}
}
// Must unbind the use-cases before rebinding them
cameraProvider.unbindAll()
try {
// A variable number of use-cases can be passed here -
// camera provides access to CameraControl & CameraInfo
camera = cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageAnalyzer)
// Attach the viewfinder's surface provider to preview use case
preview?.setSurfaceProvider(binding.viewFinder.surfaceProvider)
} catch (exc: Exception) {
Log.e(TAG, "Use case binding failed", exc)
}
}
private fun detectObjects(image: ImageProxy) {
// Copy out RGB bits to the shared bitmap buffer
image.use { bitmapBuffer.copyPixelsFromBuffer(image.planes[0].buffer) }
val imageRotation = image.imageInfo.rotationDegrees
// Pass Bitmap and rotation to the object detector helper for processing and detection
objectDetectorHelper.detect(bitmapBuffer, imageRotation)
}
override fun onConfigurationChanged(newConfig: Configuration) {
super.onConfigurationChanged(newConfig)
imageAnalyzer?.targetRotation = binding.viewFinder.display.rotation
}
override fun onDestroy() {
super.onDestroy()
_binding = null
}
override fun onError(error: String) {
activity?.runOnUiThread {
Toast.makeText(requireContext(), error, Toast.LENGTH_SHORT).show()
}
}
// Update UI after objects have been detected. Extracts original image height/width
// to scale and place bounding boxes properly through OverlayView
override fun onResults(
results: MutableList<Detection>?,
inferenceTime: Long,
imageHeight: Int,
imageWidth: Int
) {
activity?.runOnUiThread {
binding.bottomSheetLayout.inferenceTimeVal.text =
String.format("%d ms", inferenceTime)
// Pass necessary information to OverlayView for drawing on the canvas
binding.overlay.setResults(
results ?: LinkedList<Detection>(),
imageHeight,
imageWidth
)
// Force a redraw
binding.overlay.invalidate()
}
}
}
And I have this error when I press back button on this screen and immediately close the app.
FATAL EXCEPTION: main
Process: id.naufalfajar.go, PID: 5383
at id.naufalfajar.go.view.detection.DetectionFragment.getBinding(DetectionFragment.kt:34)
at id.naufalfajar.go.view.detection.DetectionFragment.onResults$lambda$14(DetectionFragment.kt:316)
at id.naufalfajar.go.view.detection.DetectionFragment.$r8$lambda$TqTYva0DaM8jzJqCVNkqO0J7EE4(Unknown Source:0)
at id.naufalfajar.go.view.detection.DetectionFragment$$ExternalSyntheticLambda10.run(Unknown Source:10)
at android.os.Handler.handleCallback(Handler.java:883)
at android.os.Handler.dispatchMessage(Handler.java:100)
at android.os.Looper.loop(Looper.java:224)
at android.app.ActivityThread.main(ActivityThread.java:7590)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:539)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:950)
The error is in this line of code.
private val binding get() = _binding!! //DetectionFragment.kt:34
activity?.runOnUiThread {
binding.bottomSheetLayout.inferenceTimeVal.text =
String.format("%d ms", inferenceTime) //DetectionFragment.kt:316
...
}
My guessing is because I have a thread running on my background and when I press back, the binding is set to null.
Anyone know the solution to this?