Hello There I am rotating and applying image filters by GPUImage on vide live stream The task is consuming more time than expected resulting over-heating of iPhone Can anybody help me out in optimising my code Following is my used code:
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{
//return if invalid sample buffer
if (!CMSampleBufferIsValid(sampleBuffer)) {
return;
}
//Get CG Image from sample buffer
CGImageRef cgImageFromBuffer = [self cgImageFromSampleBuffer:sampleBuffer];
if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){
return;
}
//We need rotation to perform
UIImage *rotatedPlainImage = [UIUtils rotateImage:[UIImage imageWithCGImage:cgImageFromBuffer] byDegree:90];
if (rotatedPlainImage == nil) {
CFRelease(cgImageFromBuffer);
return;
}
//Apply image filter using GPU Image on CGImage
CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage];
//Convert back in CMSamplbuffer
CMSampleBufferRef outputBufffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer];
//Pass to custom encode of Red5Pro to server for live stream
[self.encoder encodeFrame:outputBufffer ofType:r5_media_type_video_custom];
//Release data if needed
CFRelease(outputBufffer);
CFRelease(filteredCGImage);
CFRelease(cgImageFromBuffer);
}
- (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/* CVBufferRelease(imageBuffer); */ // do not call this!
return newImage;
}
- (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
CIImage *nm = [CIImage imageWithCGImage:imageRef];
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)nm.extent.size.width, (size_t)nm.extent.size.height, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
CIContext *ciContext = [CIContext contextWithOptions: nil];
[ciContext render:nm toCVPixelBuffer:pixelBuffer];
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
CVPixelBufferRelease(pixelBuffer);
CFRelease(videoInfo);
return oBuf;
}
I used OpenGL 2.0 and Accelerate Framework
Accelerate framework to rotate CMSampleBuffer
Now without filter the time is 3 - 8 milliseconds
With Filters its 7-21 milliseconds
OpenGL to make CI Image render fast on CVPixelBuffer
@implementation ColorsVideoSource{
CIContext *coreImageContext;
}
- (instancetype)init{
if((self = [super init]) != nil){
EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
GLKView *glView = [[GLKView alloc] initWithFrame:CGRectMake(0.0, 0.0, 360.0, 480.0) context:glContext];
coreImageContext = [CIContext contextWithEAGLContext:glView.context];
}
return self;
}
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{
if (!CMSampleBufferIsValid(sampleBuffer)) {
return;
}
CVPixelBufferRef rotateBuffer = [self correctBufferOrientation:sampleBuffer];
CGImageRef cgImageFromBuffer = [self cgImageFromImageBuffer:rotateBuffer];
if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){
return;
}
UIImage *rotatedPlainImage = [UIImage imageWithCGImage:cgImageFromBuffer];
if (rotatedPlainImage == nil) {
CFRelease(rotateBuffer);
CFRelease(cgImageFromBuffer);
return;
}
if (_currentFilterType == SWPublisherFilterNone) {
if (_needPreviewImage) {
_previewImage = rotatedPlainImage;
}
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, &videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
CFRelease(videoInfo);
if(!self.pauseEncoding){
@try {
[self.encoder encodeFrame:oBuf ofType:r5_media_type_video_custom];
} @catch (NSException *exception) {
NSLog(@"Encoder error: %@", exception);
}
}
CFRelease(oBuf);
}
else {
CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage];
if (_needPreviewImage) {
_previewImage = [UIImage imageWithCGImage:filteredCGImage];
}
CMSampleBufferRef outputBuffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer];
if(!self.pauseEncoding){
@try {
[self.encoder encodeFrame:outputBuffer ofType:r5_media_type_video_custom];
} @catch (NSException *exception) {
NSLog(@"Encoder error: %@", exception);
}
}
CFRelease(outputBuffer);
CFRelease(filteredCGImage);
}
CFRelease(rotateBuffer);
CFRelease(cgImageFromBuffer);
}
#pragma mark - Methods Refactored GPUImage - Devanshu
- (CVPixelBufferRef)correctBufferOrientation:(CMSampleBufferRef)sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t currSize = bytesPerRow * height * sizeof(unsigned char);
size_t bytesPerRowOut = 4 * height * sizeof(unsigned char);
void *srcBuff = CVPixelBufferGetBaseAddress(imageBuffer);
/* rotationConstant:
* 0 -- rotate 0 degrees (simply copy the data from src to dest)
* 1 -- rotate 90 degrees counterclockwise
* 2 -- rotate 180 degress
* 3 -- rotate 270 degrees counterclockwise
*/
uint8_t rotationConstant = 3;
unsigned char *dstBuff = (unsigned char *)malloc(currSize);
vImage_Buffer inbuff = {srcBuff, height, width, bytesPerRow};
vImage_Buffer outbuff = {dstBuff, width, height, bytesPerRowOut};
uint8_t bgColor[4] = {0, 0, 0, 0};
vImage_Error err = vImageRotate90_ARGB8888(&inbuff, &outbuff, rotationConstant, bgColor, 0);
if (err != kvImageNoError) NSLog(@"%ld", err);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CVPixelBufferRef rotatedBuffer = NULL;
CVPixelBufferCreateWithBytes(NULL,
height,
width,
kCVPixelFormatType_32BGRA,
outbuff.data,
bytesPerRowOut,
freePixelBufferDataAfterRelease,
NULL,
NULL,
&rotatedBuffer);
return rotatedBuffer;
}
void freePixelBufferDataAfterRelease(void *releaseRefCon, const void *baseAddress)
{
// Free the memory we malloced for the vImage rotation
free((void *)baseAddress);
}
- (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
return [self cgImageFromImageBuffer:imageBuffer];
}
- (CGImageRef)cgImageFromImageBuffer:(CVImageBufferRef) imageBuffer // Create a CGImageRef from sample buffer data
{
CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
return newImage;
}
- (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
CIImage *theCoreImage = [CIImage imageWithCGImage:imageRef];
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)theCoreImage.extent.size.width, (size_t)theCoreImage.extent.size.height, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
[coreImageContext render:theCoreImage toCVPixelBuffer:pixelBuffer];
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
CVPixelBufferRelease(pixelBuffer);
CFRelease(videoInfo);
return oBuf;
}