iosobjective-cmp4avassetwriteravassetreader

AVAssetReader / AVAssetWriter Join mp4 file with different resolutions


I'm coding an iPad app in which I need to join mp4 file with different resolution. To do so I'm using a combination of AVAssetReader to read the mp4 source files and AVAssetWriter to write those source files in a single mp4 output file.

I've tried to use AVAssetExportSession but the problem I had was there was black frames between the different joined files.

The problem I'm facing now is that everything seems OK but the completion handler of the AVAssetWriter is never called.

Here is my selector taking as input a list of mp4 file URL, a single output file URL and a completion handler.

- (void)resizeAndJoinVideosAtURLs:(NSArray *)videoURLs toOutputURL:(NSURL *)outputURL withHandler:(void(^)(NSURL *fileURL))handler
{
    /*
     First step: create the writer and writer input
     */
    NSError *error = nil;
    self.videoAssetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:&error];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:640], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    videoWriterInput.expectsMediaDataInRealTime = NO;

    if([self.videoAssetWriter canAddInput:videoWriterInput])
    {
        [self.videoAssetWriter addInput:videoWriterInput];
        [self.videoAssetWriter startWriting];
        [self.videoAssetWriter startSessionAtSourceTime:kCMTimeZero];

        /*
         Second step: for each video URL given create a reader and an reader input
         */

        for(NSURL *videoURL in videoURLs)
        {
            NSLog(@"Processing file: %@",videoURL);
            AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
            AVAssetReader *videoAssetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:&error];
            AVAssetTrack *videoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
            NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

            AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
            videoAssetTrackOutput.alwaysCopiesSampleData = NO;

            if([videoAssetReader canAddOutput:videoAssetTrackOutput])
            {
                [videoAssetReader addOutput:videoAssetTrackOutput];
                [videoAssetReader startReading];

                /*
                 Step three: copy the buffers from the reader to the writer
                 */
                while ([videoAssetReader status] == AVAssetReaderStatusReading)
                {
                    if(![videoWriterInput isReadyForMoreMediaData]) continue;

                    CMSampleBufferRef buffer = [videoAssetTrackOutput copyNextSampleBuffer];
                    if(buffer)
                    {
                        [videoWriterInput appendSampleBuffer:buffer];
                        CFRelease(buffer);
                    }
                }


            } else NSLog(@"ERROR: %@",error);
        }

       [videoWriterInput markAsFinished];

    } else NSLog(@"ERROR: %@",error);

    __weak ClipBuilder *weakself = self;
    [self.videoAssetWriter finishWritingWithCompletionHandler:^{
        handler(outputURL);
        weakself.videoAssetWriter = nil;
    }];
}

My output file exist and the AVAssetWriter exist since it is a property but still the completion handler is not called. What can explain this?

Thanks for your help.

What can explain that?


Solution

  • Here is the solution I finally implemented to join mp4 file with different resolution with a combination of AVAssetReader / AVAssetWriter.

    - (void)reencodeComposition:(AVComposition *)composition toMP4File:(NSURL *)mp4FileURL withCompletionHandler:(void (^)(void))handler
    {
        self.status = EncoderStatusEncoding;
    
        /*
         Create the asset writer to write the file on disk
         */
    
        NSError *error = nil;
        if([[NSFileManager defaultManager] fileExistsAtPath:mp4FileURL.path isDirectory:nil])
        {
            if(![[NSFileManager defaultManager] removeItemAtPath:mp4FileURL.path error:&error])
            {
                [self failWithError:error withCompletionHandler:handler];
                return;
            }
        }
    
        self.assetWriter = [[AVAssetWriter alloc] initWithURL:mp4FileURL fileType:AVFileTypeMPEG4 error:&error];
    
        if(self.assetWriter)
        {
            /*
             Get the audio and video track of the composition
             */
            AVAssetTrack *videoAssetTrack = [composition tracksWithMediaType:AVMediaTypeVideo].firstObject;
            AVAssetTrack *audioAssetTrack = [composition tracksWithMediaType:AVMediaTypeAudio].firstObject;
    
            NSDictionary *videoSettings = @{AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey:@(self.imageWidth), AVVideoHeightKey:@(self.imageHeight)};
    
            /*
             Add an input to be able to write the video in the file
             */
            AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
            videoWriterInput.expectsMediaDataInRealTime = YES;
    
            if([self.assetWriter canAddInput:videoWriterInput])
            {
                [self.assetWriter addInput:videoWriterInput];
    
                /*
                 Add an input to be able to write the audio in the file
                 */
    // Use this only if you know the format
    //            CMFormatDescriptionRef audio_fmt_desc_ = nil;
    //
    //            AudioStreamBasicDescription audioFormat;
    //            bzero(&audioFormat, sizeof(audioFormat));
    //            audioFormat.mSampleRate = 44100;
    //            audioFormat.mFormatID   = kAudioFormatMPEG4AAC;
    //            audioFormat.mFramesPerPacket = 1024;
    //            audioFormat.mChannelsPerFrame = 2;
    //            int bytes_per_sample = sizeof(float);
    //            audioFormat.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked;
    //            
    //            audioFormat.mBitsPerChannel = bytes_per_sample * 8;
    //            audioFormat.mBytesPerPacket = bytes_per_sample * 2;
    //            audioFormat.mBytesPerFrame = bytes_per_sample * 2;
    //            
    //            CMAudioFormatDescriptionCreate(kCFAllocatorDefault,&audioFormat,0,NULL,0,NULL,NULL,&audio_fmt_desc_);
    //            
    //             AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:audio_fmt_desc_];
    //            
    //            AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:((__bridge CMAudioFormatDescriptionRef)audioAssetTrack.formatDescriptions.firstObject)];
    
                audioWriterInput.expectsMediaDataInRealTime = YES;
    
                if([self.assetWriter canAddInput:audioWriterInput])
                {
                    [self.assetWriter addInput:audioWriterInput];
                    [self.assetWriter startWriting];
                    [self.assetWriter startSessionAtSourceTime:kCMTimeZero];
    
                    /*
                     Create the asset reader to read the mp4 files on the disk
                     */
                    AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:composition error:&error];
                    NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    
                    /*
                     Add an output to be able to retrieve the video in the files
                     */
                    AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
                    videoAssetTrackOutput.alwaysCopiesSampleData = NO;
    
                    if([assetReader canAddOutput:videoAssetTrackOutput])
                    {
                        [assetReader addOutput:videoAssetTrackOutput];
                        /*
                         Add an output to be able to retrieve the video in the files
                         */
                        AVAssetReaderTrackOutput *audioAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:audioAssetTrack outputSettings:nil];
                        videoAssetTrackOutput.alwaysCopiesSampleData = NO;
    
                        if([assetReader canAddOutput:audioAssetTrackOutput])
                        {
                            [assetReader addOutput:audioAssetTrackOutput];
    
                            [assetReader startReading];
    
                            /*
                             Read the mp4 files until the end and copy them in the output file
                             */
                            dispatch_group_t encodingGroup = dispatch_group_create();
    
                            dispatch_group_enter(encodingGroup);
                            [audioWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
                                while ([audioWriterInput isReadyForMoreMediaData])
                                {
                                    CMSampleBufferRef nextSampleBuffer = [audioAssetTrackOutput copyNextSampleBuffer];
    
                                    if (nextSampleBuffer)
                                    {
                                        [audioWriterInput appendSampleBuffer:nextSampleBuffer];
                                        CFRelease(nextSampleBuffer);
                                    }
                                    else
                                    {
                                        [audioWriterInput markAsFinished];
                                        dispatch_group_leave(encodingGroup);
                                        break;
                                    }
                                }
                            }];
    
                            dispatch_group_enter(encodingGroup);
                            [videoWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
                                while ([videoWriterInput isReadyForMoreMediaData])
                                {
                                    CMSampleBufferRef nextSampleBuffer = [videoAssetTrackOutput copyNextSampleBuffer];
    
                                    if (nextSampleBuffer)
                                    {
                                        [videoWriterInput appendSampleBuffer:nextSampleBuffer];
                                        CFRelease(nextSampleBuffer);
                                    }
                                    else
                                    {
                                        [videoWriterInput markAsFinished];
                                        dispatch_group_leave(encodingGroup);
                                        break;
                                    }
                                }
                            }];
    
                            dispatch_group_wait(encodingGroup, DISPATCH_TIME_FOREVER);
    
                        } else [self failWithError:error withCompletionHandler:handler];
                    } else [self failWithError:error withCompletionHandler:handler];
                } else [self failWithError:error withCompletionHandler:handler];
            } else [self failWithError:error withCompletionHandler:handler];
    
            __weak Encoder *weakself = self;
            [self.assetWriter finishWritingWithCompletionHandler:^{
                self.status = EncoderStatusCompleted;
                handler();
                weakself.assetWriter = nil;
                self.encodingQueue = nil;
            }];
        }
        else [self failWithError:error withCompletionHandler:handler];
    }
    

    With

    - (dispatch_queue_t)encodingQueue
    {
        if(!_encodingQueue)
        {
            _encodingQueue = dispatch_queue_create("com.myProject.encoding", NULL);
        }
        return _encodingQueue;
    }
    

    This implementation was for my project TS2MP4 but I won't need it finally.