I've been poking around with AVAudioEngine and I'm having trouble integrating AVAudioUnitEffect classes. For example, with AVAudioUnitDelay...
@implementation ViewController {
AVAudioEngine *engine;
AVAudioPlayerNode *player;
}
...
- (IBAction)playButtonHit:(id)sender {
if (!player){
    NSURL *bandsURL = [[NSBundle mainBundle] URLForResource:@"Bands With Managers" withExtension:@"mp3"];
    AVAudioFile *file = [[AVAudioFile alloc] initForReading:bandsURL error:nil];
    engine = [[AVAudioEngine alloc] init];
    player = [[AVAudioPlayerNode alloc] init];
    [engine attachNode:player];
    AVAudioUnitDelay *delay = [[AVAudioUnitDelay alloc] init];
    delay.wetDryMix = 50;
    [engine connect:player to:delay format:file.processingFormat];
    [engine connect:delay to:[engine outputNode] format:file.processingFormat];
    [player scheduleFile:file atTime:nil completionHandler:nil];
    [engine prepare];
    [engine startAndReturnError:nil];
}
[player play];  
}
When the method is called the app crashes and I get this error: "* Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: [_nodes containsObject: node1] && [_nodes containsObject: node2]'"
I'm modeling this after some of the examples from the "AVAudioEngine in Practice" session from WWDC. I know there's probably something obvious I'm missing but can't figure it out....