Probleme bei der Videobearbeitung unter iOS

Ich arbeite derzeit an einer iOS-App, die die gewünschte Anzahl von Videos zusammenführt. Sobald der Benutzer auf die Schaltfläche zum Zusammenführen der Videos tippt, werden die Videos zusammengefügt und dann mit AVPlayer wiedergegeben als:

CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
CMTime transitionDuration = CMTimeMake(1, 1); // Default transition duration is one second.

// Add two video tracks and two audio tracks.
AVMutableCompositionTrack *compositionVideoTracks[2];
AVMutableCompositionTrack *compositionAudioTracks[2];
compositionVideoTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]);
CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]);

// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
for (i = 0; i < [self.selectedAssets count]; i++ )
{
    NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
    AVURLAsset *asset = [self.selectedAssets objectAtIndex:i];

    NSLog(@"number of tracks %d",asset.tracks.count);

    CMTimeRange assetTimeRange;
    assetTimeRange.start = kCMTimeZero;
    assetTimeRange.duration = asset.duration;
    NSValue *clipTimeRange = [NSValue valueWithCMTimeRange:assetTimeRange];
    CMTimeRange timeRangeInAsset;
    if (clipTimeRange)
        timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
    else
        timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);

    AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];

    AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];

    // Remember the time range in which this clip should pass through.
    // Every clip after the first begins with a transition.
    // Every clip before the last ends with a transition.
    // Exclude those transitions from the pass through time ranges.
    passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration);
    if (i > 0) {
        passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration);
        passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
    }
    if (i+1 < [self.selectedAssets count]) {
        passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
    }

    // The end of this clip will overlap the start of the next by transitionDuration.
    // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
    nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
    nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration);

    // Remember the time range for the transition to the next item.
    transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration);
}

// Set up the video composition if we are to perform crossfade or push transitions between clips.
NSMutableArray *instructions = [NSMutableArray array];

// Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A".
for (i = 0; i < [self.selectedAssets count]; i++ )
{
    NSInteger alternatingIndex = i % 2; // alternating targets

    // Pass through clip i.
    AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    passThroughInstruction.timeRange = passThroughTimeRanges[i];
    AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];

    passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
    [instructions addObject:passThroughInstruction];

    AVMutableVideoCompositionLayerInstruction *fromLayer;

    AVMutableVideoCompositionLayerInstruction *toLayer;

    if (i+1 < [self.selectedAssets count])
    {
        // Add transition from clip i to clip i+1.

        AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        transitionInstruction.timeRange = transitionTimeRanges[i];
        fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
        toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]];


        // Fade out the fromLayer by setting a ramp from 1.0 to 0.0.
        [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]];

        transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil];
        [instructions addObject:transitionInstruction];



    }

    AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[self.selectedItemsURL objectAtIndex:i] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];

    AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];



    CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
    CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
    CGAffineTransform transform = sourceVideoTrack.preferredTransform;

    self.videoComposition.renderSize = sourceVideoTrack.naturalSize;
    if (size.width > size.height) {

        [fromLayer setTransform:transform atTime:sourceAsset.duration];
    } else {


        float s = size.width/size.height;


        CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));

        float x = (size.height - size.width*s)/2;

        CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));

        [fromLayer setTransform:newer atTime:sourceAsset.duration];
    }



}

self.videoComposition.instructions = instructions;

self.videoComposition.frameDuration = CMTimeMake(1, 30);



NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];

NSURL *url = [NSURL fileURLWithPath:myPathDocs];

self.exporter = [[AVAssetExportSession alloc] initWithAsset:self.mixComposition presetName:AVAssetExportPresetMediumQuality];
self.exporter.outputURL=url;
self.exporter.outputFileType = AVFileTypeQuickTimeMovie;
self.exporter.videoComposition = self.videoComposition;
self.exporter.shouldOptimizeForNetworkUse = YES;

self.playerItem = [AVPlayerItem playerItemWithAsset:self.mixComposition];
self.playerItem.videoComposition = self.videoComposition;
AVPlayer *player = [AVPlayer playerWithPlayerItem:self.playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:CGRectMake(0, 0, self.imageView.frame.size.width, self.imageView.frame.size.height)];
[[[self imageView] layer] addSublayer:playerLayer];
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[player play];

[[NSNotificationCenter defaultCenter]
 addObserver:self selector:@selector(checkPlayEnded) name:AVPlayerItemDidPlayToEndTimeNotification object:self.playerItem];

Ich stehe derzeit vor folgenden Problemen:

Wenn sich ein Video im Hochformat und das andere im Querformat befindet, wie kann ich das Hochformatvideo im Querformat drehen, da meine Ansicht im Querformat ausgerichtet ist, das Hochformatvideo jedoch sein Original beibehält? (Ich lade Videos, die in der Kamerarolle gespeichert sind, und zeichne sie nicht in meiner App auf.)

Wenn ich das oben erwähnte Problem vernachlässige, funktionieren sie gut, wenn ich eine beliebige Anzahl von Videos zusammenführe. Sobald ich das neue Video in meiner Bibliothek speichere und es dann erneut in meine App lade und versuche, dieses Video mit einem anderen neuen Video zu verbinden, wird die Auflösung gestört, obwohl beide Videos, wenn sie in der App separat abgespielt werden, sehr gut funktionieren. Wie kann ich das lösen?

(Ich habe versucht, dem Tutorial zur Videobearbeitung auf der WWDC 2010 zu folgen, sodass dieser Code von dort extrahiert wird.)

Antworten auf die Frage(1)

Ihre Antwort auf die Frage