Unterbrechen und Fortsetzen der Videoaufnahme mit AVCaptureMovieFileOutput und AVCaptureVideoDataOutput unter iOS

Ich muss Funktionen implementieren, um die Videoaufnahme in einer einzigen Sitzung wiederholt anzuhalten und fortzusetzen, aber jedes neue Segment (die erfassten Segmente nach jeder Pause) wird derselben Videodatei mit hinzugefügtAVFoundation. Jedes Mal, wenn ich auf "Stopp" drücke und dann erneut auf "Aufnehmen", wird eine neue Videodatei im Dokumentverzeichnis meines iPhones gespeichert und die Aufnahme in eine neue Datei gestartet. Ich muss in der Lage sein, die "Aufnahme / Stopp" -Taste zu drücken, nur Video und Audio aufzunehmen, wenn die Aufnahme aktiv ist. Wenn die "Fertig" -Taste gedrückt wird, muss eine einzige AV-Datei mit allen Segmenten zusammen vorliegen. Und all dies muss in derselben Aufnahme- / Vorschau-Sitzung geschehen.

Ich benutze nichtAVAssetWriterInput.

Ich kann mir nur vorstellen, dies zu versuchen, indem Sie auf die Schaltfläche "Fertig" klicken und jede einzelne Ausgabedatei zu einer einzigen Datei zusammenfassen.

Dieser Code funktioniert für iOS 5, aber nicht für iOS 6. Tatsächlich für iOS 6, wenn ich die Aufnahme zum ersten Mal pausiere (Aufnahme beenden)AVCaptureFileOutputRecordingDelegate Methode (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) wird aber danach beim starten der aufnahme die delegate methode aufgerufen (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) wird erneut aufgerufen, aber zum Zeitpunkt der Beendigung der Aufnahme nicht.

Ich brauche eine Lösung für dieses Problem. Bitte hilf mir.

//View LifeCycle
- (void)viewDidLoad
{
[super viewDidLoad];

self.finalRecordedVideoName = [self stringWithNewUUID];

arrVideoName = [[NSMutableArray alloc]initWithCapacity:0];
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0];

CaptureSession = [[AVCaptureSession alloc] init];


captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if ([captureDevices count] > 0)
{
    NSError *error;
    VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
    if (!error)
    {
        if ([CaptureSession canAddInput:VideoInputDevice])
            [CaptureSession addInput:VideoInputDevice];
        else
            NSLog(@"Couldn't add video input");
    }
    else
    {
        NSLog(@"Couldn't create video input");
    }
}
else
{
    NSLog(@"Couldn't create video capture device");
}



//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
AVCaptureVideoPreviewLayer *layer  = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession];

[self setPreviewLayer:layer];


UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation;

NSLog(@"%d",currentOrientation);

if (currentOrientation == UIDeviceOrientationPortrait)
{
    PreviewLayer.orientation = AVCaptureVideoOrientationPortrait;
}
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown)
{
    PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if (currentOrientation == UIDeviceOrientationLandscapeRight)
{
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
}
else if (currentOrientation == UIDeviceOrientationLandscapeLeft)
{
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft;
}

[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];

//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];

[VideoDataOutput setVideoSettings:videoSettings];

Float64 TotalSeconds = 60;          //Total seconds
int32_t preferredTimeScale = 30;    //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties];           //(We call a method as it also has to be done after changing camera)
AVCaptureConnection *videoConnection = nil;

for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
    NSLog(@"%@", connection);
    for ( AVCaptureInputPort *port in [connection inputPorts] )
    {
        NSLog(@"%@", port);
        if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
        }
    }
}

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
}    NSLog(@"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow];

//----- DISPLAY THE PREVIEW LAYER -----

CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348);

[self.PreviewLayer setBounds:layerRect];
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))];

if ([CaptureSession canAddOutput:MovieFileOutput])
    [CaptureSession addOutput:MovieFileOutput];
    [CaptureSession addOutput:VideoDataOutput];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
CameraView = [[UIView alloc] init];
[videoPreviewLayer addSubview:CameraView];
[videoPreviewLayer sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];

//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}

#pragma mark - IBACtion Methods
-(IBAction)btnStartAndStopPressed:(id)sender
{
UIButton *StartAndStopButton = (UIButton*)sender;
if ([StartAndStopButton isSelected] == NO)
{
    [StartAndStopButton setSelected:YES];
    [btnPauseAndResume setEnabled:YES];
    [btnBack setEnabled:NO];
    [btnSwitchCameraInput setHidden:YES];

    NSDate *date = [NSDate date];
    NSLog(@" date %@",date);

    NSArray *paths                  =   NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *recordedFileName = nil;
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date];
    NSString *documentsDirectory    =   [paths objectAtIndex:0];
    self.outputPath                 =   [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]];
    NSLog(@"%@",self.outputPath);

    [arrVideoName addObject:recordedFileName];

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
    {
        NSError *error;
        if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
        {
            //Error - handle if requried
        }
    }
    //Start recording
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
    recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(VideoRecording) userInfo:nil repeats:YES];

}
else
{
    [StartAndStopButton setSelected:NO];
    [btnPauseAndResume setEnabled:NO];
    [btnBack setEnabled:YES];
    [btnSwitchCameraInput setHidden:NO];

    NSLog(@"STOP RECORDING");
    WeAreRecording = NO;

    [MovieFileOutput stopRecording];
    [((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:@"Please wait...."];

    if ([recordingTimer isValid])
    {
        [recordingTimer invalidate];
        recordingTimer = nil;
        recordingTime = 30;
    }

    stopRecording = YES;
}
}

- (IBAction)btnPauseAndResumePressed:(id)sender
{
UIButton *PauseAndResumeButton = (UIButton*)sender;
if (PauseAndResumeButton.selected == NO)
{
    PauseAndResumeButton.selected = YES;
    NSLog(@"recording paused");
    WeAreRecording = NO;

    [MovieFileOutput stopRecording];
    [self pauseTimer:recordingTimer];

    [btnStartAndStop setEnabled:NO];
    [btnBack setEnabled:YES];
    [btnSwitchCameraInput setHidden:NO];
}
else
{
    PauseAndResumeButton.selected = NO;
    NSLog(@"recording resumed");

    [btnStartAndStop setEnabled:YES];
    [btnBack setEnabled:NO];
    [btnSwitchCameraInput setHidden:YES];

    WeAreRecording = YES;

    NSDate *date = [NSDate date];
    NSLog(@" date %@",date);

    NSArray *paths                  =   NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
    NSString *recordedFileName = nil;
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date];
    NSString *documentsDirectory    =   [paths objectAtIndex:0];
    self.outputPath                 =   [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]];
    NSLog(@"%@",self.outputPath);

    [arrVideoName addObject:recordedFileName];

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
    {
        NSError *error;
        if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
        {
            //Error - handle if requried
        }
    }
    [self resumeTimer:recordingTimer];
    //Start recording
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
}

- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];

[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);

if (CaptureConnection.supportsVideoMinFrameDuration)
    CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
    CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);

CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}

- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
    if ([Device position] == Position)
    {
         NSLog(@"%d",Position);
        return Device;
    }
}
return nil;
}

#pragma mark - AVCaptureFileOutputRecordingDelegate Method

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{

if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer];

for(AVCaptureConnection *captureConnection in [captureOutput connections])
{
    if ([captureConnection isVideoOrientationSupported])
    {
        AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
        [captureConnection setVideoOrientation:orientation];
    }
}     

 UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation];

 CGAffineTransform t;

if (curOr == UIDeviceOrientationPortrait) 
{
     t = CGAffineTransformMakeRotation(-M_PI / 2);
} 
else if (curOr == UIDeviceOrientationPortraitUpsideDown)
{
     t = CGAffineTransformMakeRotation(M_PI / 2);
} 
else if (curOr == UIDeviceOrientationLandscapeRight) 
{
     t = CGAffineTransformMakeRotation(M_PI);
}
else
{
     t = CGAffineTransformMakeRotation(0);
}
}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(@"didFinishRecordingToOutputFileAtURL - enter");
NSLog(@"output file url : %@", [outputFileURL absoluteString]);

BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
    // A problem occurred: Find out if the recording was successful.
    id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
    if (value)
    {
        RecordedSuccessfully = [value boolValue];
    }
}
AVCaptureConnection *videoConnection=nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
    NSLog(@"%@", connection);
    for ( AVCaptureInputPort *port in [connection inputPorts] )
    {
        NSLog(@"%@", port);
        if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
        }
    }
}

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
}    NSLog(@"Setting image quality");

NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[videoData writeToFile:self.outputPath atomically:NO];

[arrOutputUrl addObject:outputFileURL];

if (stopRecording)
{
    [self mergeMultipleVideo];
}
}

//Method to merge multiple audios
-(void)mergeMultipleVideo
{
mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

CMTime nextClipStartTime = kCMTimeZero;
NSLog(@"Array of output file url : %@", arrOutputUrl);
if (arrOutputUrl.count > 0)
{
    for(int i = 0 ;i < [arrOutputUrl count];i++)
    {
        AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil];

        CMTimeRange timeRangeInAsset;
        timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]);

        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
        nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
    }
}

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",self.finalRecordedVideoName]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
    dispatch_async(dispatch_get_main_queue(), ^{
        [self exportDidFinish:exportSession path:myPathDocs];
    });
}];
}

-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath
{
NSLog(@"session.status : %d",session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
    NSURL *outputURL = session.outputURL;

    NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
    [videoData writeToFile:outputVideoPath atomically:NO];

    if ([arrVideoName count] > 0)
    {
        for (int i = 0; i < [arrVideoName count]; i++)
        {
            NSArray* documentPaths  = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
            NSString* fullFilePath  = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:@"%@",[arrVideoName objectAtIndex:i]]];

            NSLog(@"Full path of file to be deleted: %@",fullFilePath);

            NSFileManager *fileManager  =   [NSFileManager defaultManager];
            NSError *error;

            if ([fileManager fileExistsAtPath:fullFilePath])
            {
                [fileManager removeItemAtPath:fullFilePath error:&error];
            }
        }
        [arrVideoName removeAllObjects];
    }
    if (arrOutputUrl.count > 0)
    {
        [arrOutputUrl removeAllObjects];
    }
    [((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view];
    [self.view addSubview:afterRecordingPopupView];
}
}

Antworten auf die Frage(1)

Ihre Antwort auf die Frage