Grave e reproduza áudio com o AVAssetWriter

Reduzi bastante esta questão e espero alguma ajuda.

Basicamente, esta classe tem dois métodos, um para iniciar a gravação de áudio (-recordMode) e outro para reproduzir áudio (playMode). Eu atualmente tenho essa classe em um projeto com um controlador de visualização única com dois botões que chamam os métodos correspondentes (rec, play). Não há outras variáveis, a classe é autônoma.

No entanto, não vai jogar / rec nada e não consigo descobrir o porquê. Quando eu tento reproduzir o arquivo eu recebo um tamanho de arquivo de 0 e um erro porque você não pode iniciar oAVAudioPlayer com umnil referência de curso. Mas eu não entendo porque o arquivo está vazio ou porqueself.outputPath énil.

arquivo .h

#import <AVFoundation/AVFoundation.h>

@interface MicCommunicator : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>

@property(nonatomic,retain) NSURL *outputPath;
@property(nonatomic,retain) AVCaptureSession * captureSession;
@property(nonatomic,retain) AVCaptureAudioDataOutput * output;

-(void)beginStreaming;
-(void)playMode;
-(void)recordMode;

@end

arquivo .m:

@implementation MicCommunicator {
    AVAssetWriter *assetWriter;
    AVAssetWriterInput *assetWriterInput;
}

@synthesize captureSession = _captureSession;
@synthesize output = _output;
@synthesize outputPath = _outputPath;

-(id)init {
    if ((self = [super init])) {
        NSArray *searchPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        self.outputPath = [NSURL fileURLWithPath:[[searchPaths objectAtIndex:0] stringByAppendingPathComponent:@"micOutput.output"]];

        AudioChannelLayout acl;
        bzero(&acl, sizeof(acl));
        acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; //kAudioChannelLayoutTag_Stereo;
        NSDictionary *audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                             [NSNumber numberWithInt: kAudioFormatULaw],AVFormatIDKey,        
                                             [NSNumber numberWithFloat:8000.0],AVSampleRateKey,//was 44100.0
                                             [NSData dataWithBytes: &acl length: sizeof( AudioChannelLayout ) ], AVChannelLayoutKey,
                                             [NSNumber numberWithInt:1],AVNumberOfChannelsKey,
                                             [NSNumber numberWithInt:8000.0],AVEncoderBitRateKey,
                                             nil];

        assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
        [assetWriterInput setExpectsMediaDataInRealTime:YES];

        assetWriter = [[AVAssetWriter assetWriterWithURL:_outputPath fileType:AVFileTypeWAVE error:nil] retain];
        [assetWriter addInput:assetWriterInput];
    }
    return self;
}

-(void)dealloc {
    [assetWriter release];
    [super dealloc];
}

//conveniance methods

-(void)playMode
{
    [self stopRecording];

    NSError *error;
    AVAudioPlayer * audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:self.outputPath error:&error];
    audioPlayer.numberOfLoops = -1;

    if (audioPlayer == nil){
        NSLog(@"error: %@",[error description]);        
    }else{ 
        NSLog(@"playing");  
        [audioPlayer play];
    }
}

-(void)recordMode
{
        [self beginStreaming];    
}

-(void)stopRecording
{
    [self.captureSession stopRunning];
    [assetWriterInput markAsFinished];
    [assetWriter  finishWriting];

    NSDictionary *outputFileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:[NSString stringWithFormat:@"%@",self.outputPath] error:nil];
    NSLog (@"done. file size is %llu", [outputFileAttributes fileSize]);
}

//starts audio recording
-(void)beginStreaming {
    self.captureSession = [[AVCaptureSession alloc] init];
    AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    NSError *error = nil;
    AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
    if (audioInput)
        [self.captureSession addInput:audioInput];
    else {
        NSLog(@"No audio input found.");
        return;
    }

    AVCaptureAudioDataOutput *output = [[AVCaptureAudioDataOutput alloc] init];

    dispatch_queue_t outputQueue = dispatch_queue_create("micOutputDispatchQueue", NULL);
    [output setSampleBufferDelegate:self queue:outputQueue];
    dispatch_release(outputQueue);

    [self.captureSession addOutput:output];
    [assetWriter startWriting];
    [self.captureSession startRunning];
}

//callback
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    AudioBufferList audioBufferList;
    NSMutableData *data= [[NSMutableData alloc] init];
    CMBlockBufferRef blockBuffer;
    CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);

    //for (int y = 0; y < audioBufferList.mNumberBuffers; y++) {
    //  AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
    //  Float32 *frame = (Float32*)audioBuffer.mData;
    //          
    //  [data appendBytes:frame length:audioBuffer.mDataByteSize];
    //}

    // append [data bytes] to your NSOutputStream 


    // These two lines write to disk, you may not need this, just providing an example
    [assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
    [assetWriterInput appendSampleBuffer:sampleBuffer];

    CFRelease(blockBuffer);
    blockBuffer=NULL;
    [data release];
}

@end