iosavcapturesessionavcapturedeviceavcapture

Stop camera capturing session in iOS camera


I capture images from iPhone camera using AVCaptureSession, AVCaptureDeviceInput, AVCaptureVideoDataOutput.

Image capturing is implemented as

        dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
        [self setSessionQueue:sessionQueue];
        //we will use a separate dispatch session not to block the main queue in processing
        dispatch_queue_t  im_processingQueue = dispatch_queue_create("im_processing queue", DISPATCH_QUEUE_SERIAL);
        [self setIm_processingQueue:im_processingQueue];

        dispatch_async(sessionQueue, ^{
            [self setBackgroundRecordingID:UIBackgroundTaskInvalid];

            NSError *error = nil;

            AVCaptureDevice *videoDevice = [RecordViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];

            AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];

            if (error)
            {
                NSLog(@"%@", error);
            }
            [(AVCaptureVideoPreviewLayer *)[[self previewView] layer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
            if ([session canAddInput:videoDeviceInput])
            {
                [session addInput:videoDeviceInput];
                [self setVideoDeviceInput:videoDeviceInput];

                dispatch_async(dispatch_get_main_queue(), ^{
                    // Why are we dispatching this to the main queue?
                    // Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
                    // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                    //[self previewView] layer

                    [[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)[[UIApplication sharedApplication] statusBarOrientation]];
                });
            }

            AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
            AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];

            if (error)
            {
                NSLog(@"%@", error);
            }

            if ([session canAddInput:audioDeviceInput])
            {
                [session addInput:audioDeviceInput];
            }

            AVCaptureVideoDataOutput *vid_Output = [[AVCaptureVideoDataOutput alloc] init];
            [vid_Output setSampleBufferDelegate:self queue:im_processingQueue];
            vid_Output.alwaysDiscardsLateVideoFrames = YES;
            // Set the video output to store frame in BGRA (It is supposed to be faster)
            NSDictionary* videoSettings = @{(__bridge NSString*)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]};
            [vid_Output setVideoSettings:videoSettings];
            if ([session canAddOutput:vid_Output])
            {
                [session addOutput:vid_Output];
                AVCaptureConnection *connection = [vid_Output connectionWithMediaType:AVMediaTypeVideo];
                if ([connection isVideoStabilizationSupported])
                    //[connection setEnablesVideoStabilizationWhenAvailable:YES];
                    connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
                [self setVid_Output:vid_Output];

            }

      });

Inside viewWillAppear, capture session is run as

- (void)viewWillAppear:(BOOL)animated
{
    //[super viewWillAppear:YES];
    dispatch_async([self sessionQueue], ^{
        [self addObserver:self forKeyPath:@"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];

        [self addObserver:self forKeyPath:@"vid_Output.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];

        __weak RecordViewController *weakSelf = self;
        [self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
            RecordViewController *strongSelf = weakSelf;
            dispatch_async([strongSelf sessionQueue], ^{
                // Manually restarting the session since it must have been stopped due to an error.
                [[strongSelf session] startRunning];

            });
        }]];
        [[self session] startRunning];
    });
}

Then it is stopped as

- (void) stopCapturingCameraImages
{
    dispatch_async([self sessionQueue], ^{
        [[self session] stopRunning];

        [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
        [[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];

        [self removeObserver:self forKeyPath:@"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];

        [self removeObserver:self forKeyPath:@"vid_Output.recording" context:RecordingContext];
    });

}

The problem is at removing observers,

[self removeObserver:self forKeyPath:@"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];

[self removeObserver:self forKeyPath:@"vid_Output.recording" context:RecordingContext];

The program crashed after running these two removeObservers. What could be wrong?

EDIT:

stopCapturingCameraImages is called from
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{

    @autoreleasepool {
         [_processing Searchobject_using_CPU_CascadeClassifier_for:img with_return_Rect:_rects];

dispatch_async(dispatch_get_main_queue(), ^{
                    for (int lc = 0; lc < 
                    if(_rects.count >0){
                        [ self stopCapturingCameraImages];
                    }

                });

}

EDIT 1:

According to @SwiftArchitect's suggestion, I put if ([[self session] isRunning]). Then it works. I implemented as

- (void)viewWillDisappear:(BOOL)animated
{
    [super viewWillDisappear:YES];
    [self stopCapturingCameraImages];
}

- (void) stopCapturingCameraImages
{
    dispatch_async([self sessionQueue], ^{
        if ([[self session] isRunning]){
        [[self session] stopRunning];

        [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
        [[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];

            [self removeObserver:self forKeyPath:@"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];

            [self removeObserver:self forKeyPath:@"vid_Output.recording" context:RecordingContext];
        }
    });

}

Solution

  • By the time:

    dispatch_async([self sessionQueue], ^{
        // ...
    
        [self removeObserver:self forKeyPath:@"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
    
        [self removeObserver:self forKeyPath:@"vid_Output.recording" context:RecordingContext];
    });
    

    is executed, self (the UIViewController) may already have executed its viewWillDisappear, and removed the observer.

    The execution order of Whats is executed in dispatch_get_main_queue and sessionQueue is not necessarily what you expect, or even predictable.


    The fix may be as simple as adding a check like if [[self session] isRunning] prior executing the removeObserver, short of adding a semaphore.