androidcameraandroid-camerax

Android CameraX - preview black/not visible after fragment resume


I am using CameraX, preview, image and video capture. I have created a fragment class that uses binding to lifecycle to avoid all manual camera preview state management etc (as recommended by Google). Problem is that sometimes if I put the app into background and then foreground, the camera preview is black and does not show preview. There is no way to restore it apart from recreating the fragment. Any hints what I may be doing wrong?

import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.util.Size;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.Preview;
import androidx.camera.core.UseCaseGroup;
import androidx.camera.core.VideoCapture;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;

import com.google.common.util.concurrent.ListenableFuture;
import com.google.mlkit.vision.face.Face;
import 

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

public class CameraViewFragment extends BaseFragment {

    private static final int REQUEST_CAMERA_PERMISSIONS = 100;
    private CameraViewFragmentListener listener;

    private final Executor executor = Executors.newSingleThreadExecutor();
    private Camera camera;
    private Handler handler = new Handler();
    private boolean isRecording;
    private Preview preview;
    private ImageCapture imageCapture;
    private VideoCapture videoCapture;
    private ImageAnalysis imageAnalysis;
    private FaceDetector faceDetector;

    public interface CameraViewFragmentListener {
        void onCameraPictureTaken(byte[] buffer);

        void onCameraPermissionsRejected();

        void onCameraVideoRecorded(Uri file);

        void onCameraVideoRecordError(Throwable ex);

        void onCameraFacesDetect(List<Face> faces);

        void onCameraFacesDetectError(Exception e);

        enum CameraFeature {
            PREVIEW,
            IMAGE_CAPTURE,
            VIDEO_CAPTURE,
            FACE_DETECTION
        }
        Set<CameraFeature> cameraGetRequestedFeatures();
    }

    private FragmentCameraViewBinding binding;

    @Nullable
    @Override
    public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
        binding = FragmentCameraViewBinding.inflate(inflater, container, false);
        return binding.getRoot();
    }

    @Override
    public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
        super.onViewCreated(view, savedInstanceState);
        int rc = ActivityCompat.checkSelfPermission(requireContext(), Manifest.permission.CAMERA);
        if (rc == PackageManager.PERMISSION_GRANTED) {
            startCamera();
        } else {
            requestCameraPermission();
        }
    }

    private void requestCameraPermission() {
        requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSIONS);
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        if (requestCode == REQUEST_CAMERA_PERMISSIONS) {
            if (permissions.length == 1 && permissions[0].equals(Manifest.permission.CAMERA) && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
                Logger.log(Logger.info, "[VRM] Camera permission granted.");
                startCamera();//createCameraSource();
            } else {
                Logger.log(Logger.error, "[VRM] Camera permission NOT granted.");
                if (listener != null) {
                    listener.onCameraPermissionsRejected();
                }
                requestCameraPermission();
            }
        }
    }

    /**
     * Restarts the camera.
     */
    @Override
    public void onResume() {
        super.onResume();

        //startCamera();
    }

    /**
     * Stops the camera.
     */
    @Override
    public void onPause() {
        super.onPause();

    }

    /**
     * Releases the resources associated with the camera source, the associated detector, and the
     * rest of the processing pipeline.
     */
    @Override
    public void onDestroy() {
        super.onDestroy();

    }

    @Override
    public void onAttach(Context context) {
        super.onAttach(context);
        if (context instanceof CameraViewFragmentListener) {
            listener = (CameraViewFragmentListener) context;
        } else if (getParentFragment() instanceof CameraViewFragmentListener) {
            listener = (CameraViewFragmentListener) getParentFragment();
        }
    }

    @Override
    public void onDetach() {
        super.onDetach();
        listener = null;
    }


    private void startCamera() {
        final ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(requireContext());
        cameraProviderFuture.addListener(() -> {
            try {
                ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
                Set<CameraViewFragmentListener.CameraFeature> features = listener.cameraGetRequestedFeatures();
                bindUseCases(cameraProvider, features);
            } catch (ExecutionException | InterruptedException e) {
                // No errors need to be handled for this Future.
                // This should never be reached.
                Logger.log(Logger.error, "Exception while initializing CameraX: {}", e);
            }
        }, ContextCompat.getMainExecutor(requireContext()));
    }

    void bindUseCases(@NonNull ProcessCameraProvider cameraProvider, Set<CameraViewFragmentListener.CameraFeature> features) {
        cameraProvider.unbindAll();
        final CameraSelector cameraSelector = new CameraSelector.Builder()
                .requireLensFacing(CameraSelector.LENS_FACING_FRONT)
                .build();
        final WindowManager wm = (WindowManager) requireContext().getSystemService(Context.WINDOW_SERVICE);
        final UseCaseGroup.Builder useCaseGroupBuilder = new UseCaseGroup.Builder();
        if (features.contains(CameraViewFragmentListener.CameraFeature.PREVIEW)) {
            preview = new Preview.Builder()
                    .setTargetAspectRatio(AspectRatio.RATIO_4_3)
                    //.setTargetResolution(new Size(480, 640))
                    .build();
            binding.preview.setScaleType(PreviewView.ScaleType.FILL_CENTER);
            preview.setSurfaceProvider(binding.preview.getSurfaceProvider());
            useCaseGroupBuilder.addUseCase(preview);
        } else {
            preview = null;
        }

        if (features.contains(CameraViewFragmentListener.CameraFeature.IMAGE_CAPTURE)) {
            imageCapture = new ImageCapture.Builder()
                    .setCameraSelector(cameraSelector)
                    .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
                    //.setTargetAspectRatio(AspectRatio.RATIO_4_3)
                    .setTargetResolution(new Size(480, 640))
                    .setTargetRotation(wm.getDefaultDisplay().getRotation())
                    .build();
            useCaseGroupBuilder.addUseCase(imageCapture);
        } else {
            imageCapture = null;
        }

        if (features.contains(CameraViewFragmentListener.CameraFeature.VIDEO_CAPTURE)) {
            videoCapture = new VideoCapture.Builder()
                    .setCameraSelector(cameraSelector)
                    //.setTargetResolution(new Size(480, 640))
                    .setBitRate(1000)
                    .setTargetAspectRatio(AspectRatio.RATIO_4_3)
                    .setTargetRotation(wm.getDefaultDisplay().getRotation())
                    .build();
            useCaseGroupBuilder.addUseCase(videoCapture);
        } else {
            videoCapture = null;
        }

        if (features.contains(CameraViewFragmentListener.CameraFeature.FACE_DETECTION)) {
            imageAnalysis = new ImageAnalysis.Builder()
                    .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                    .build();

            faceDetector = new FaceDetector(new FaceDetector.FaceDetectorListener() {
                @Override
                public void onFaceDetectSuccess(List<Face> faces) {
                    if (listener!=null)
                        listener.onCameraFacesDetect(faces);
                }

                @Override
                public void onFaceDetectError(Exception e) {
                    if (listener!=null)
                        listener.onCameraFacesDetectError(e);
                }
            });
            imageAnalysis.setAnalyzer(executor, faceDetector.createAnalyzer());
            useCaseGroupBuilder.addUseCase(imageAnalysis);
        } else {
            imageAnalysis = null;
        }
        camera = cameraProvider.bindToLifecycle(this, cameraSelector, useCaseGroupBuilder.build());
    }


    public void requestTakePicture() {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        ImageCapture.OutputFileOptions outputFileOptions = new ImageCapture.OutputFileOptions.Builder(bos).build();
        imageCapture.takePicture(outputFileOptions, executor, new ImageCapture.OnImageSavedCallback() {
            @Override
            public void onImageSaved(@NonNull ImageCapture.OutputFileResults outputFileResults) {
                handler.post(() -> {
                    //Toast.makeText(MainActivity.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
                    if (listener != null) {
                        listener.onCameraPictureTaken(bos.toByteArray());
                    }
                });
            }

            @Override
            public void onError(@NonNull ImageCaptureException error) {
                error.printStackTrace();
            }
        });
    }

    public void startVideoRecording(File file) {
        VideoCapture.OutputFileOptions outputFileOptions = new VideoCapture.OutputFileOptions
                .Builder(file)
                .build();
        videoCapture.startRecording(outputFileOptions, executor, new VideoCapture.OnVideoSavedCallback() {
            @Override
            public void onVideoSaved(@NonNull VideoCapture.OutputFileResults outputFileResults) {
                handler.post(() -> {
                    isRecording = false;
                    //Toast.makeText(MainActivity.this, "Image Saved successfully", Toast.LENGTH_SHORT).show();
                    if (listener != null) {
                        listener.onCameraVideoRecorded(outputFileResults.getSavedUri());
                    }
                });
            }

            @Override
            public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
                isRecording = false;
                if (listener != null) {
                    listener.onCameraVideoRecordError(cause);
                }
            }
        });
        isRecording = true;
    }

    public void stopVideoRecording() {
        videoCapture.stopRecording();
        isRecording = false;
    }

    public boolean isRecording() {
        return isRecording;
    }
}

Solution

  • There was an issue raised for it here: https://issuetracker.google.com/issues/147354615

    Using the suggested solution, you should be re-assigning the preview in onResume(), for example:

    private void setPreview() {
        if (getLifecycle().getCurrentState() != Lifecycle.State.DESTROYED) {
            Preview preview = new Preview.Builder().setTargetAspectRatio(AspectRatio.RATIO_4_3).build();
            CameraSelector cameraSelector = new CameraSelector.Builder()
                    .requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
            preview.setSurfaceProvider(previewView.getSurfaceProvider());
            cam = cameraProvider.bindToLifecycle(StockOCRFragment.this, cameraSelector, imageCapture, preview);
        }
    }
    
    @Override
    public void onPause() {
        super.onPause();
        stopped = true;
        if (cameraProvider != null) {
            cameraProvider.unbindAll();
        }
    }
    
    @Override
    public void onResume() {
        super.onResume();
        if (stopped && cameraProvider != null) {
            setPreview();
        }
    }