Superponga dos videos con AVFoundation
Estoy tratando de superponer dos videos, con el video en primer plano siendo algo alfa transparente. He estado siguiendo elDocumentos de Apple tanto comoEste tutorial.
Cada vez que trato de pasar dos videos del mismo videomi código no se estrella; sin embargo, cuando intento alimentarlo con dos videos diferentes, recibo este error:
VideoMaskingUtils.exportVideo Error: Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.})
VideoMaskingUtils.exportVideo Description: <AVAssetExportSession: 0x1556be30, asset = <AVMutableComposition: 0x15567f10 tracks = (
"<AVMutableCompositionTrack: 0x15658030 trackID = 1, mediaType = vide, editCount = 1>",
"<AVMutableCompositionTrack: 0x1556e250 trackID = 2, mediaType = vide, editCount = 1>"
)>, presetName = AVAssetExportPresetHighestQuality, outputFileType = public.mpeg-4
Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}
Entiendo queno puedes guardar un video con un canal alfa en iOS: quiero aplanar los dos videos en un video opaco.
Al intentar superponer los dos videos y aplicar un estilo de imágenes incrustadas con CATransforms, se bloquea; simplemente superponiéndolos (sin alfa o cualquier otro efecto aplicado) Se agradece cualquier ayuda.
Aquí está mi código (con ambos enfoques):
class func overlay(video firstAsset: AVURLAsset, withSecondVideo secondAsset: AVURLAsset, andAlpha alpha: Float) {
let mixComposition = AVMutableComposition()
let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let firstMediaTrack = firstAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return }
guard let secondMediaTrack = secondAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return }
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), ofTrack: firstMediaTrack, atTime: kCMTimeZero)
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), ofTrack: secondMediaTrack, atTime: kCMTimeZero)
} catch (let error) {
print(error)
}
let width = max(firstMediaTrack.naturalSize.width, secondMediaTrack.naturalSize.width)
let height = max(firstMediaTrack.naturalSize.height, secondMediaTrack.naturalSize.height)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(width, height)
videoComposition.frameDuration = firstMediaTrack.minFrameDuration
let firstApproach = false
if firstApproach {
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
mainInstruction.backgroundColor = UIColor.redColor().CGColor
let firstlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
firstlayerInstruction.setTransform(firstAsset.preferredTransform, atTime: kCMTimeZero)
let secondInstruction = AVMutableVideoCompositionInstruction()
secondInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
let backgroundColor = UIColor(colorLiteralRed: 1.0, green: 1.0, blue: 1.0, alpha: alpha)
secondInstruction.backgroundColor = backgroundColor.CGColor
let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack)
secondlayerInstruction.setTransform(secondAsset.preferredTransform, atTime: kCMTimeZero)
secondInstruction.layerInstructions = [secondlayerInstruction]
mainInstruction.layerInstructions = [firstlayerInstruction]//, secondlayerInstruction]
videoComposition.instructions = [mainInstruction, secondInstruction]
} else {
let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstMediaTrack)
firstLayerInstruction.setTransform(firstMediaTrack.preferredTransform, atTime: kCMTimeZero)
firstLayerInstruction.setOpacity(1.0, atTime: kCMTimeZero)
let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondMediaTrack)
secondlayerInstruction.setTransform(secondMediaTrack.preferredTransform, atTime: kCMTimeZero)
secondlayerInstruction.setOpacity(alpha, atTime: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration))
instruction.layerInstructions = [firstLayerInstruction, secondlayerInstruction]
videoComposition.instructions = [instruction]
}
let outputUrl = VideoMaskingUtils.getPathForTempFileNamed("output.mov")
VideoMaskingUtils.exportCompositedVideo(mixComposition, toURL: outputUrl, withVideoComposition: videoComposition)
VideoMaskingUtils.removeTempFileAtPath(outputUrl.absoluteString)
}
Aquí está miexportCompositedVideo
función.
private class func exportCompositedVideo(compiledVideo: AVMutableComposition, toURL outputUrl: NSURL, withVideoComposition videoComposition: AVMutableVideoComposition) {
guard let exporter = AVAssetExportSession(asset: compiledVideo, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = outputUrl
exporter.videoComposition = videoComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case .Completed:
// we can be confident that there is a URL because
// we got this far. Otherwise it would've failed.
UISaveVideoAtPathToSavedPhotosAlbum(exporter.outputURL!.path!, nil, nil, nil)
print("VideoMaskingUtils.exportVideo SUCCESS!")
if exporter.error != nil {
print("VideoMaskingUtils.exportVideo Error: \(exporter.error)")
print("VideoMaskingUtils.exportVideo Description: \(exporter.description)")
}
NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error)
break
case .Exporting:
let progress = exporter.progress
print("VideoMaskingUtils.exportVideo \(progress)")
NSNotificationCenter.defaultCenter().postNotificationName("videoExportProgress", object: progress)
break
case .Failed:
print("VideoMaskingUtils.exportVideo Error: \(exporter.error)")
print("VideoMaskingUtils.exportVideo Description: \(exporter.description)")
NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error)
break
default: break
}
})
}