When scheduling files or buffers with AVAudioPlayerNode
, you can submit a completion callback. You can also specify for what event the callback should be called, the options being:
dataConsumed
dataRendered
dataPlayedBack
In most cases, this works as expected. However, when scheduling files one after the other, the callbacks for files other than the last one are not called if the type is dataRendered
or dataPlayedBack
.
Here's a self-contained example demonstrating the issue. (This code can be pasted over the contents of the 'ViewController.swift' file in the Xcode iOS 'App' template. The project will need to include the audio file referenced in the code in order to work.)
import AVFoundation
import UIKit
class ViewController: UIViewController {
private let engine = AVAudioEngine()
private let filePlayer = AVAudioPlayerNode()
private let bufferPlayer = AVAudioPlayerNode()
private var file: AVAudioFile! = nil
private var buffer: AVAudioPCMBuffer! = nil
override func viewDidLoad() {
super.viewDidLoad()
let url = Bundle.main.resourceURL!.appendingPathComponent("audio.m4a")
file = try! AVAudioFile(forReading: url)
buffer = AVAudioPCMBuffer(
pcmFormat: file.processingFormat,
frameCapacity: AVAudioFrameCount(file.length)
)
try! file.read(into: buffer)
let format = file.processingFormat
engine.attach(filePlayer)
engine.attach(bufferPlayer)
engine.connect(filePlayer, to: engine.mainMixerNode, format: format)
engine.connect(bufferPlayer, to: engine.mainMixerNode, format: format)
try! engine.start()
filePlayer.play()
bufferPlayer.play()
for i in 0 ..< 3 {
filePlayer.scheduleFile(
file, at: nil, completionCallbackType: .dataPlayedBack
) {
type in print("File \(i)")
}
}
for i in 0 ..< 3 {
filePlayer.scheduleBuffer(
buffer, at: nil, completionCallbackType: .dataPlayedBack
) {
type in print("Buff \(i)")
}
}
}
}
Here's the output I get:
File 2
Buff 0
Buff 1
Buff 2
As you can see, the callbacks are called for all three instances of the buffer, but are only called for the last instance of the file.
Again, it's only for dataRendered
or dataPlayedBack
that the callbacks aren't called. For dataConsumed
, it works correctly.
Has anyone encountered this? Can anyone confirm this behavior? It seems like a bug, but it's also possible I could be doing something wrong.
Edit:
Here's another version of the code in response to an idea presented in the comments. In this version, instead of scheduling the same file instance three times, three instances of the same file are scheduled in succession:
import AVFoundation
import UIKit
class ViewController: UIViewController {
private let engine = AVAudioEngine()
private let filePlayer = AVAudioPlayerNode()
private let bufferPlayer = AVAudioPlayerNode()
private var files = [AVAudioFile]()
private var buffer: AVAudioPCMBuffer! = nil
override func viewDidLoad() {
super.viewDidLoad()
let url = Bundle.main.resourceURL!.appendingPathComponent("audio.m4a")
for _ in 0 ..< 3 {
files.append(try! AVAudioFile(forReading: url))
}
let file = files[0]
buffer = AVAudioPCMBuffer(
pcmFormat: file.processingFormat,
frameCapacity: AVAudioFrameCount(file.length)
)
try! file.read(into: buffer)
let format = file.processingFormat
engine.attach(filePlayer)
engine.attach(bufferPlayer)
engine.connect(filePlayer, to: engine.mainMixerNode, format: format)
engine.connect(bufferPlayer, to: engine.mainMixerNode, format: format)
try! engine.start()
filePlayer.play()
bufferPlayer.play()
for i in 0 ..< 3 {
filePlayer.scheduleFile(
files[i], at: nil, completionCallbackType: .dataPlayedBack
) {
type in print("File \(i)")
}
}
for i in 0 ..< 3 {
filePlayer.scheduleBuffer(
buffer, at: nil, completionCallbackType: .dataPlayedBack
) {
type in print("Buff \(i)")
}
}
}
}
The results are the same. The practicality of reading the same file multiple times aside, this is an interesting diagnostic, as it provides some more information about the behavior. If it's a bug, it seems to be independent of whether the scheduled files are the same AVAudioFile instances or not.
Since AVAudioPlayerNode
is essentially a wrapper for kAudioUnitSubType_ScheduledSoundPlayer
(presumably with some of the file reading and buffering code from kAudioUnitSubType_AudioFilePlayer
thrown in but using ExtAudioFile
instead) I did an experiment to see if the lower-level counterpart exhibited the same behavior.
This is not exactly an apples-to-apples comparison but it seems that kAudioUnitSubType_ScheduledSoundPlayer
works as expected so this may be a bug in AVAudioPlayerNode
.
The code I used for testing is below. kAudioUnitSubType_ScheduledSoundPlayer
is used to schedule three slices (buffers). They are from the same file but it's irrelevant because kAudioUnitSubType_ScheduledSoundPlayer
only knows about buffers and not files.
The callbacks are invoked as expected for all three slices. So it seems the problem is likely how AVAudioPlayerNode
handles these callbacks internally and routes them to a non-real-time dispatch queue (since the callbacks for kAudioUnitSubType_ScheduledSoundPlayer
are handled on the HAL's real-time IO thread and clients can't be trusted not to block the IO thread).
// ViewController.m
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
@import os.log;
@interface ViewController ()
{
AUGraph _graph;
AUNode _player;
AUNode _mixer;
AUNode _output;
ScheduledAudioSlice _slice [3];
AVAudioPCMBuffer *_buf;
}
- (void)scheduledAudioSliceCompleted:(ScheduledAudioSlice *)slice;
@end
void myScheduledAudioSliceCompletionProc(void * __nullable userData, ScheduledAudioSlice *slice)
{
// ⚠️ WARNING ⚠️
// THIS FUNCTION IS CALLED FROM THE REAL TIME RENDERING THREAD.
// OBJ-C USE HERE IS FOR TESTING CALLBACK FUNCTIONALITY ONLY
// OBJ-C IS NOT REAL TIME SAFE
// DO NOT DO THIS IN PRODUCTION CODE!!!
[(__bridge ViewController *)userData scheduledAudioSliceCompleted:slice];
}
@implementation ViewController
- (void)dealloc {
[self closeGraph];
}
- (void)viewDidLoad {
[super viewDidLoad];
[self openGraph];
[self schedule];
[self startPlayer];
[self startGraph];
}
-(OSStatus)openGraph {
OSStatus result = NewAUGraph(&_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "NewAUGraph failed: %d", result);
return result;
}
// The graph will look like:
// Player -> MultiChannelMixer -> Output
AudioComponentDescription desc;
// Player
desc.componentType = kAudioUnitType_Generator;
desc.componentSubType = kAudioUnitSubType_ScheduledSoundPlayer;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = kAudioComponentFlag_SandboxSafe;
desc.componentFlagsMask = 0;
result = AUGraphAddNode(_graph, &desc, &_player);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphAddNode failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Mixer
desc.componentType = kAudioUnitType_Mixer;
desc.componentSubType = kAudioUnitSubType_MultiChannelMixer;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = kAudioComponentFlag_SandboxSafe;
desc.componentFlagsMask = 0;
result = AUGraphAddNode(_graph, &desc, &_mixer);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphAddNode failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Output
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_HALOutput;
desc.componentFlags = kAudioComponentFlag_SandboxSafe;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlagsMask = 0;
result = AUGraphAddNode(_graph, &desc, &_output);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphAddNode failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Connections
result = AUGraphConnectNodeInput(_graph, _player, 0, _mixer, 0);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphConnectNodeInput failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
result = AUGraphConnectNodeInput(_graph, _mixer, 0, _output, 0);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphConnectNodeInput failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Open the graph
result = AUGraphOpen(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphOpen failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Set the mixer's volume on the input and output
AudioUnit au = NULL;
result = AUGraphNodeInfo(_graph, _mixer, NULL, &au);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphNodeInfo failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
result = AudioUnitSetParameter(au, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, 1.f, 0);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetParameter (kMultiChannelMixerParam_Volume, kAudioUnitScope_Input) failed: %d", result);
result = AudioUnitSetParameter(au, kMultiChannelMixerParam_Volume, kAudioUnitScope_Output, 0, 1.f, 0);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetParameter (kMultiChannelMixerParam_Volume, kAudioUnitScope_Output) failed: %d", result);
// Initialize the graph
result = AUGraphInitialize(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphInitialize failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
return noErr;
}
- (OSStatus)closeGraph {
Boolean graphIsRunning = NO;
OSStatus result = AUGraphIsRunning(_graph, &graphIsRunning);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphIsRunning failed: %d", result);
return result;
}
if(graphIsRunning) {
result = AUGraphStop(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphStop failed: %d", result);
return result;
}
}
Boolean graphIsInitialized = false;
result = AUGraphIsInitialized(_graph, &graphIsInitialized);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphIsInitialized failed: %d", result);
return result;
}
if(graphIsInitialized) {
result = AUGraphUninitialize(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphUninitialize failed: %d", result);
return result;
}
}
result = AUGraphClose(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphClose failed: %d", result);
return result;
}
result = DisposeAUGraph(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
return result;
}
_graph = NULL;
_player = -1;
_mixer = -1;
_output = -1;
return noErr;
}
- (OSStatus)startGraph {
OSStatus result = AUGraphStart(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphStart failed: %d", result);
return result;
}
return noErr;
}
- (OSStatus)stopGraph {
OSStatus result = AUGraphStop(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphStop failed: %d", result);
return result;
}
return noErr;
}
- (OSStatus)startPlayer {
AudioUnit au;
OSStatus result = AUGraphNodeInfo(_graph, _player, NULL, &au);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphNodeInfo failed: %d", result);
return result;
}
AudioTimeStamp ts = {0};
ts.mFlags = kAudioTimeStampSampleTimeValid;
ts.mSampleTime = 0;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleStartTimeStamp, kAudioUnitScope_Global, 0, &ts, sizeof(ts));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
return noErr;
}
- (OSStatus)schedule {
AudioUnit au;
OSStatus result = AUGraphNodeInfo(_graph, _player, NULL, &au);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphNodeInfo failed: %d", result);
return result;
}
AVAudioFile *file = [[AVAudioFile alloc] initForReading:[NSURL fileURLWithPath:@"/tmp/test.wav" isDirectory:NO] commonFormat:AVAudioPCMFormatFloat32 interleaved:NO error:nil];
if(!file)
return paramErr;
_buf = [[AVAudioPCMBuffer alloc] initWithPCMFormat:file.processingFormat frameCapacity:(file.processingFormat.sampleRate * 2)];
if(![file readIntoBuffer:_buf error:nil])
return paramErr;
AudioTimeStamp ts = {0};
ts.mFlags = kAudioTimeStampSampleTimeValid;
ts.mSampleTime = 0;
_slice[0].mTimeStamp = ts;
_slice[0].mCompletionProc = myScheduledAudioSliceCompletionProc;
_slice[0].mCompletionProcUserData = (__bridge void *)self;
_slice[0].mNumberFrames = _buf.frameLength;
_slice[0].mBufferList = _buf.mutableAudioBufferList;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleAudioSlice, kAudioUnitScope_Global, 0, &_slice[0], sizeof(_slice[0]));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
ts.mSampleTime += _slice[0].mNumberFrames;
_slice[1] = _slice[0];
_slice[1].mTimeStamp = ts;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleAudioSlice, kAudioUnitScope_Global, 0, &_slice[1], sizeof(_slice[1]));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
ts.mSampleTime += _slice[1].mNumberFrames;
_slice[2] = _slice[1];
_slice[2].mTimeStamp = ts;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleAudioSlice, kAudioUnitScope_Global, 0, &_slice[2], sizeof(_slice[2]));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
return noErr;
}
- (void)scheduledAudioSliceCompleted:(ScheduledAudioSlice *)slice {
if(slice == &_slice[0])
NSLog(@"_slice[0] scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x", slice, slice->mFlags);
else if(slice == &_slice[1])
NSLog(@"_slice[1] scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x", slice, slice->mFlags);
else if(slice == &_slice[2])
NSLog(@"_slice[2] scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x", slice, slice->mFlags);
else
NSLog(@"scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x for unknown slice", slice, slice->mFlags);
}
@end
Output:
XXX _slice[0] scheduledAudioSliceCompleted:0x7f82ee41add0, mFlags = 0x03
XXX _slice[1] scheduledAudioSliceCompleted:0x7f82ee41ae40, mFlags = 0x03
XXX _slice[2] scheduledAudioSliceCompleted:0x7f82ee41aeb0, mFlags = 0x03
mFlags
of 0x03
equates to kScheduledAudioSliceFlag_Complete | kScheduledAudioSliceFlag_BeganToRender
.