xcodemacosavfoundationcmsamplebufferref

AVFoundation image captured is dark


On osx i use AVFoundation to capture image from a USB camera, all work fine, but the image I get is darker compared to live video.

Device capture configuration

-(BOOL)prepareCapture{
captureSession = [[AVCaptureSession alloc] init];
NSError *error;

imageOutput=[[AVCaptureStillImageOutput alloc] init];
NSNumber * pixelFormat = [NSNumber numberWithInt:k32BGRAPixelFormat];
[imageOutput setOutputSettings:[NSDictionary dictionaryWithObject:pixelFormat forKey:(id)kCVPixelBufferPixelFormatTypeKey]];

videoOutput=[[AVCaptureMovieFileOutput alloc] init];

AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:MyVideoDevice error:&error];
if (videoInput) {
    [captureSession beginConfiguration];
    [captureSession addInput:videoInput];
    [captureSession setSessionPreset:AVCaptureSessionPresetHigh];
    //[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    [captureSession addOutput:imageOutput];
    [captureSession addOutput:videoOutput];
    [captureSession commitConfiguration];
}
else {
    // Handle the failure.
    return NO;
}
return YES;
}

Add view for live preview

-(void)settingPreview:(NSView*)View{
// Attach preview to session
previewView = View;
CALayer *previewViewLayer = [previewView layer];
[previewViewLayer setBackgroundColor:CGColorGetConstantColor(kCGColorBlack)];
AVCaptureVideoPreviewLayer *newPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
[newPreviewLayer setFrame:[previewViewLayer bounds]];
[newPreviewLayer setAutoresizingMask:kCALayerWidthSizable | kCALayerHeightSizable];
[previewViewLayer addSublayer:newPreviewLayer];
//[self setPreviewLayer:newPreviewLayer];
[captureSession startRunning];
}

Code to capture the image

-(void)captureImage{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
    for (AVCaptureInputPort *port in [connection inputPorts]) {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
 ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
     CFDictionaryRef exifAttachments =
     CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
     if (exifAttachments) {
         // Do something with the attachments.
     }
     // Continue as appropriate.
     //IMG is a global NSImage
     IMG = [self imageFromSampleBuffer:imageSampleBuffer];
     [[self delegate] imageReady:IMG];
}];
}

Create a NSImage from sample buffer data, i think the problem is here

- (NSImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);

// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                             bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);

// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
NSImage * image = [[NSImage alloc] initWithCGImage:quartzImage size:NSZeroSize];
// Release the Quartz image
CGImageRelease(quartzImage);

return (image);
}

Solution

  • Solution found

    The problem was in imageFromSampleBuffer I used this code and the picture is perfect

    // Continue as appropriate.
         //IMG = [self imageFromSampleBuffer:imageSampleBuffer];
    
         CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
    
         if (imageBuffer) {
             CVBufferRetain(imageBuffer);
    
             NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
    
             IMG = [[NSImage alloc] initWithSize: [imageRep size]];
             [IMG addRepresentation: imageRep];
    
             CVBufferRelease(imageBuffer);
         }
    

    Code found in this answer