Swift AVasset-Videos-Array zusammenführen

Ich möchte das AVAsset- @ zusammenführarrayVideos in ein einzelnes Video und speichern Sie es auf der Kamerarolle. Raywenderlich.com hat ein tollesLernprogram wo zwei Videos zu einem zusammengeführt werden. Ich habe den folgenden Code erstellt, aber das Video, das ich nach dem Exportieren in die Kamerarolle erhalte, enthält nur das erste und das letzte Video aus dem Array (mit Ausnahme der übrigen Videos in der Mitte vonarrayVideos). Vermisse ich hier etwas?

var arrayVideos = [AVAsset]() //Videos Array    
var atTimeM: CMTime = CMTimeMake(0, 0)
var lastAsset: AVAsset!
var layerInstructionsArray = [AVVideoCompositionLayerInstruction]()
var completeTrackDuration: CMTime = CMTimeMake(0, 1)
var videoSize: CGSize = CGSize(width: 0.0, height: 0.0)

func mergeVideoArray(){

    let mixComposition = AVMutableComposition()
    for videoAsset in arrayVideos{
        let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        do {
            if videoAsset == arrayVideos.first{
                atTimeM = kCMTimeZero
            } else{
                atTimeM = lastAsset!.duration
            }
            try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)  
            videoSize = videoTrack.naturalSize
        } catch let error as NSError {
            print("error: \(error)")
        }
        completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration)
        let videoInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        if videoAsset != arrayVideos.last{
            videoInstruction.setOpacity(0.0, at: videoAsset.duration)
        }
        layerInstructionsArray.append(videoInstruction)
        lastAsset = videoAsset            
    }

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, completeTrackDuration)
    mainInstruction.layerInstructions = layerInstructionsArray        

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(1, 30)
    mainComposition.renderSize = CGSize(width: videoSize.width, height: videoSize.height)

    let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
    let dateFormatter = DateFormatter()
    dateFormatter.dateStyle = .long
    dateFormatter.timeStyle = .short
    let date = dateFormatter.string(from: NSDate() as Date)
    let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
    let url = NSURL(fileURLWithPath: savePath)

    let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
    exporter!.outputURL = url as URL
    exporter!.outputFileType = AVFileTypeQuickTimeMovie
    exporter!.shouldOptimizeForNetworkUse = true
    exporter!.videoComposition = mainComposition
    exporter!.exportAsynchronously {

        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exporter!.outputURL!)
        }) { saved, error in
            if saved {
                let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                alertController.addAction(defaultAction)
                self.present(alertController, animated: true, completion: nil)
            } else{
                print("video erro: \(error)")

            }
        }
    }
} 

Antworten auf die Frage(6)

Ihre Antwort auf die Frage