我正在按照以下代码从 UIImage Array* 创建 视频 。从一个图像过渡到另一个图像时,此处没有动画。我想添加一些像这样的照片过渡效果: *
这些动画可以通过UIView.transition()&来完成UIView.animate()。
UIView.transition()
UIView.animate()
但是,如何在从UIImage数组制作视频时应用这些过渡动画?我搜索了很多,但没有找到任何东西。
我也尝试过 HJImagesToVideo,但它仅提供Crossfade过渡。
Crossfade
我在同一问题上停留了一段时间。由于这是一个很大的答案,可能会让您感到无聊,但我希望它会为您提供帮助。这是我在Swift 3中的答案。
理论:
首先, 您必须使用图像阵列制作视频。关于此问题有很多解决方案。只是搜索一下,希望您能得到一个。如果没有,请参见下面的代码。 其次, 您可能知道可以在视频上添加CALayer。如果不是,请搜索一下如何在视频上添加CALayer。 第三, 您必须知道如何为CALayer设置动画。Core Animation将为您完成这项工作。关于此的大量示例代码可用。搜索它。现在,您有了CALayer,并且知道如何为其设置动画了。 最后, 您必须在视频中添加此动画CALayer。
码:
1.使用图像数组创建视频
let outputSize = CGSize(width: 1920, height: 1280) let imagesPerSecond: TimeInterval = 3 //each image will be stay for 3 secs var selectedPhotosArray = [UIImage]() var imageArrayToVideoURL = NSURL() let audioIsEnabled: Bool = false //if your video has no sound var asset: AVAsset! func buildVideoFromImageArray() { for image in 0..<5 { selectedPhotosArray.append(UIImage(named: "\(image + 1).JPG")!) //name of the images: 1.JPG, 2.JPG, 3.JPG, 4.JPG, 5.JPG } imageArrayToVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video1.MP4") removeFileAtURLIfExists(url: imageArrayToVideoURL) guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileTypeMPEG4) else { fatalError("AVAssetWriter error") } let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any] guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaTypeVideo) else { fatalError("Negative : Can't apply the Output settings...") } let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings) let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))] let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) if videoWriter.canAdd(videoWriterInput) { videoWriter.add(videoWriterInput) } if videoWriter.startWriting() { let zeroTime = CMTimeMake(Int64(imagesPerSecond),Int32(1)) videoWriter.startSession(atSourceTime: zeroTime) assert(pixelBufferAdaptor.pixelBufferPool != nil) let media_queue = DispatchQueue(label: "mediaInputQueue") videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in let fps: Int32 = 1 let framePerSecond: Int64 = Int64(self.imagesPerSecond) let frameDuration = CMTimeMake(Int64(self.imagesPerSecond), fps) var frameCount: Int64 = 0 var appendSucceeded = true while (!self.selectedPhotosArray.isEmpty) { if (videoWriterInput.isReadyForMoreMediaData) { let nextPhoto = self.selectedPhotosArray.remove(at: 0) let lastFrameTime = CMTimeMake(frameCount * framePerSecond, fps) let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration) var pixelBuffer: CVPixelBuffer? = nil let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer) if let pixelBuffer = pixelBuffer, status == 0 { let managedPixelBuffer = pixelBuffer CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) let data = CVPixelBufferGetBaseAddress(managedPixelBuffer) let rgbColorSpace = CGColorSpaceCreateDeviceRGB() let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue) context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height))) let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio) let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0 let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0 context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height)) CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime) } else { print("Failed to allocate pixel buffer") appendSucceeded = false } } if !appendSucceeded { break } frameCount += 1 } videoWriterInput.markAsFinished() videoWriter.finishWriting { () -> Void in print("-----video1 url = \(self.imageArrayToVideoURL)") self.asset = AVAsset(url: self.imageArrayToVideoURL as URL) self.exportVideoWithAnimation() } }) } } func removeFileAtURLIfExists(url: NSURL) { if let filePath = url.path { let fileManager = FileManager.default if fileManager.fileExists(atPath: filePath) { do{ try fileManager.removeItem(atPath: filePath) } catch let error as NSError { print("Couldn't remove existing destination file: \(error)") } } } }
2.将动画添加到创建的视频中(请仔细阅读所有评论的部分。我认为通过阅读这些内容,一些问题会变得清楚。)
func exportVideoWithAnimation() { let composition = AVMutableComposition() let track = asset?.tracks(withMediaType: AVMediaTypeVideo) let videoTrack:AVAssetTrack = track![0] as AVAssetTrack let timerange = CMTimeRangeMake(kCMTimeZero, (asset?.duration)!) let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID()) do { try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero) compositionVideoTrack.preferredTransform = videoTrack.preferredTransform } catch { print(error) } //if your video has sound, you don’t need to check this if audioIsEnabled { let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) for audioTrack in (asset?.tracks(withMediaType: AVMediaTypeAudio))! { do { try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero) } catch { print(error) } } } let size = videoTrack.naturalSize let videolayer = CALayer() videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height) let parentlayer = CALayer() parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height) parentlayer.addSublayer(videolayer) //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //this is the animation part var time = [0.00001, 3, 6, 9, 12] //I used this time array to determine the start time of a frame animation. Each frame will stay for 3 secs, thats why their difference is 3 var imgarray = [UIImage]() for image in 0..<5 { imgarray.append(UIImage(named: "\(image + 1).JPG")!) let nextPhoto = imgarray[image] let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height let aspectRatio = min(horizontalRatio, verticalRatio) let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio) let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0 let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0 ///I showed 10 animations here. You can uncomment any of this and export a video to see the result. ///#1. left->right/// let blackLayer = CALayer() blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) blackLayer.backgroundColor = UIColor.black.cgColor let imageLayer = CALayer() imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) imageLayer.contents = imgarray[image].cgImage blackLayer.addSublayer(imageLayer) let animation = CABasicAnimation() animation.keyPath = "position.x" animation.fromValue = -videoTrack.naturalSize.width animation.toValue = 2 * (videoTrack.naturalSize.width) animation.duration = 3 animation.beginTime = CFTimeInterval(time[image]) animation.fillMode = kCAFillModeForwards animation.isRemovedOnCompletion = false blackLayer.add(animation, forKey: "basic") ///#2. right->left/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 2 * videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.x" // animation.fromValue = 2 * (videoTrack.naturalSize.width) // animation.toValue = -videoTrack.naturalSize.width // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") ///#3. top->bottom/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 0, y: 2 * videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.y" // animation.fromValue = 2 * videoTrack.naturalSize.height // animation.toValue = -videoTrack.naturalSize.height // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") ///#4. bottom->top/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 0, y: -videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.y" // animation.fromValue = -videoTrack.naturalSize.height // animation.toValue = 2 * videoTrack.naturalSize.height // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") ///#5. opacity(1->0)(left->right)/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.x" // animation.fromValue = -videoTrack.naturalSize.width // animation.toValue = 2 * (videoTrack.naturalSize.width) // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") // // let fadeOutAnimation = CABasicAnimation(keyPath: "opacity") // fadeOutAnimation.fromValue = 1 // fadeOutAnimation.toValue = 0 // fadeOutAnimation.duration = 3 // fadeOutAnimation.beginTime = CFTimeInterval(time[image]) // fadeOutAnimation.isRemovedOnCompletion = false // blackLayer.add(fadeOutAnimation, forKey: "opacity") ///#6. opacity(1->0)(right->left)/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 2 * videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.x" // animation.fromValue = 2 * videoTrack.naturalSize.width // animation.toValue = -videoTrack.naturalSize.width // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") // // let fadeOutAnimation = CABasicAnimation(keyPath: "opacity") // fadeOutAnimation.fromValue = 1 // fadeOutAnimation.toValue = 0 // fadeOutAnimation.duration = 3 // fadeOutAnimation.beginTime = CFTimeInterval(time[image]) // fadeOutAnimation.isRemovedOnCompletion = false // blackLayer.add(fadeOutAnimation, forKey: "opacity") ///#7. opacity(1->0)(top->bottom)/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 0, y: 2 * videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.y" // animation.fromValue = 2 * videoTrack.naturalSize.height // animation.toValue = -videoTrack.naturalSize.height // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") // // let fadeOutAnimation = CABasicAnimation(keyPath: "opacity") // fadeOutAnimation.fromValue = 1 // fadeOutAnimation.toValue = 0 // fadeOutAnimation.duration = 3 // fadeOutAnimation.beginTime = CFTimeInterval(time[image]) // fadeOutAnimation.isRemovedOnCompletion = false // blackLayer.add(fadeOutAnimation, forKey: "opacity") ///#8. opacity(1->0)(bottom->top)/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 0, y: -videoTrack.naturalSize.height, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let animation = CABasicAnimation() // animation.keyPath = "position.y" // animation.fromValue = -videoTrack.naturalSize.height // animation.toValue = 2 * videoTrack.naturalSize.height // animation.duration = 3 // animation.beginTime = CFTimeInterval(time[image]) // animation.fillMode = kCAFillModeForwards // animation.isRemovedOnCompletion = false // blackLayer.add(animation, forKey: "basic") // // let fadeOutAnimation = CABasicAnimation(keyPath: "opacity") // fadeOutAnimation.fromValue = 1 // fadeOutAnimation.toValue = 0 // fadeOutAnimation.duration = 3 // fadeOutAnimation.beginTime = CFTimeInterval(time[image]) // fadeOutAnimation.isRemovedOnCompletion = false // blackLayer.add(fadeOutAnimation, forKey: "opacity") ///#9. scale(small->big->small)/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 0, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // blackLayer.opacity = 0 // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let scaleAnimation = CAKeyframeAnimation(keyPath: "transform.scale") // scaleAnimation.values = [0, 1.0, 0] // scaleAnimation.beginTime = CFTimeInterval(time[image]) // scaleAnimation.duration = 3 // scaleAnimation.isRemovedOnCompletion = false // blackLayer.add(scaleAnimation, forKey: "transform.scale") // // let fadeInOutAnimation = CABasicAnimation(keyPath: "opacity") // fadeInOutAnimation.fromValue = 1 // fadeInOutAnimation.toValue = 1 // fadeInOutAnimation.duration = 3 // fadeInOutAnimation.beginTime = CFTimeInterval(time[image]) // fadeInOutAnimation.isRemovedOnCompletion = false // blackLayer.add(fadeInOutAnimation, forKey: "opacity") ///#10. scale(big->small->big)/// // let blackLayer = CALayer() // blackLayer.frame = CGRect(x: 0, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height) // blackLayer.backgroundColor = UIColor.black.cgColor // blackLayer.opacity = 0 // // let imageLayer = CALayer() // imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height) // imageLayer.contents = imgarray[image].cgImage // blackLayer.addSublayer(imageLayer) // // let scaleAnimation = CAKeyframeAnimation(keyPath: "transform.scale") // scaleAnimation.values = [1, 0, 1] // scaleAnimation.beginTime = CFTimeInterval(time[image]) // scaleAnimation.duration = 3 // scaleAnimation.isRemovedOnCompletion = false // blackLayer.add(scaleAnimation, forKey: "transform.scale") // // let fadeOutAnimation = CABasicAnimation(keyPath: "opacity") // fadeOutAnimation.fromValue = 1 // fadeOutAnimation.toValue = 1 // fadeOutAnimation.duration = 3 // fadeOutAnimation.beginTime = CFTimeInterval(time[image]) // fadeOutAnimation.isRemovedOnCompletion = false // blackLayer.add(fadeOutAnimation, forKey: "opacity") parentlayer.addSublayer(blackLayer) } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// let layercomposition = AVMutableVideoComposition() layercomposition.frameDuration = CMTimeMake(1, 30) layercomposition.renderSize = size layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer) let instruction = AVMutableVideoCompositionInstruction() instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration) let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack) instruction.layerInstructions = [layerinstruction] layercomposition.instructions = [instruction] let animatedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video2.mp4") removeFileAtURLIfExists(url: animatedVideoURL) guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return} assetExport.videoComposition = layercomposition assetExport.outputFileType = AVFileTypeMPEG4 assetExport.outputURL = animatedVideoURL as URL assetExport.exportAsynchronously(completionHandler: { switch assetExport.status{ case AVAssetExportSessionStatus.failed: print("failed \(String(describing: assetExport.error))") case AVAssetExportSessionStatus.cancelled: print("cancelled \(String(describing: assetExport.error))") default: print("Exported") } }) }
不要忘记导入AVFoundation
AVFoundation