2016-11-10 13 views
9

यह कोड किसी वीडियो पर वॉटरमार्क और टेक्स्ट जोड़ने के लिए आईओएस 9 पर काम करता था, लेकिन आईओएस 10 के बाद से यह अब काम नहीं कर रहा है। एक iOS 10 bug है जो दायर किया गया है लेकिन ऐप्पल से कोई जवाब नहीं है। मैं वीडियो पर वॉटरमार्क और टेक्स्ट जोड़ने के लिए किसी भी कामकाज को लागू करने में सक्षम नहीं हूं। इस कोड के साथ वीडियो को सफलतापूर्वक निर्यात किया जाएगा लेकिन अधिकांश समय इसे निर्यात नहीं किया जाएगा।स्विफ्ट 3: वीडियो पर वॉटरमार्क कैसे जोड़ें? AVVideoCompositionCoreAnimationTool आईओएस 10 अंक

मुझे AVVideoCompositionCoreAnimationTool का उपयोग कैसे करना चाहिए ताकि यह आईओएस 9 पर किया गया हो।

let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition() 

videoComposition.frameDuration = CMTimeMake(1, 60) 
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height) 


let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction() 

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30)) 

// transformer is applied to set the video in portrait otherwise it is rotated by 90 degrees 
let transformer: AVMutableVideoCompositionLayerInstruction = 
    AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack) 

let t1: CGAffineTransform = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height)/2) 

let t2: CGAffineTransform = t1.rotated(by: CGFloat(M_PI_2)) 

var finalTransform: CGAffineTransform = t2 

transformer.setTransform(finalTransform, at: kCMTimeZero) 

instruction.layerInstructions = NSArray(object: transformer) as! [AVVideoCompositionLayerInstruction] 

videoComposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol] 



let mixComposition = AVMutableComposition() 
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 


do { 
    try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: clipVideoTrack, at: kCMTimeZero) 
} catch { 
    print(error) 
} 


//Add watermark 


let myImage = UIImage(named: "logo") 

let aLayer = CALayer() 
aLayer.contents = myImage!.cgImage 
aLayer.frame = CGRect(x: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-45))/self.view.bounds.width, y: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-40))/self.view.bounds.width, width: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width, height: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width) 

let titleLayer = CATextLayer() 
titleLayer.string = "text" 
titleLayer.font = UIFont(name: "helvetica", size: 0) 
titleLayer.fontSize = clipVideoTrack.naturalSize.height/16 
titleLayer.shadowOpacity = 0.5 
titleLayer.alignmentMode = kCAAlignmentCenter 
titleLayer.frame = CGRect(x: 0, y: 0, width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height/6) 
titleLayer.display() 


let videoSize = asset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize 
let parentLayer = CALayer() 
let videoLayer = CALayer() 
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height) 
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height) 

parentLayer.addSublayer(videoLayer) 
parentLayer.addSublayer(aLayer) 
parentLayer.addSublayer(titleLayer) 


videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer) 



do { try FileManager.default.removeItem(at: filePath) } 
catch let error as NSError { 
    NSLog("\(error), \(error.localizedDescription)") 
} 



var exportUrl: URL = filePath 
self.videoUrl = filePath as NSURL 


var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality) 

exporter!.videoComposition = videoComposition 
exporter!.outputFileType = AVFileTypeQuickTimeMovie 
exporter!.outputURL = URL(fileURLWithPath: exportUrl.path) 


exporter!.exportAsynchronously(completionHandler: { 

    DispatchQueue.main.async { 


     self.view.layer.addSublayer(self.avPlayerLayer) 

     let item = AVPlayerItem(url: exportUrl) 
     self.player.replaceCurrentItem(with: item) 

     if (self.player.currentItem != nil) { 
      print("Starting playback!") 
      self.player.play() 
     } 

    } 

}) 

कृपया ध्यान दें: अगर मैं AVVideoCompositionCoreAnimationTool तो वीडियो हमेशा निर्यात किया जाता है को दूर लेकिन इस, हंस कोई वॉटरमार्क और वीडियो पर पाठ। इसे कैसे काम करें ताकि AVVideoCompositionCoreAnimationToolAVAssetExportSession के साथ संघर्ष न करे?

कुछ ने workaroundcustomVideoCompositorClass और AVVideoCompositing प्रोटोकॉल के साथ कार्यान्वित किया है लेकिन यह काम करने के लिए उपयोग किए जाने के मुकाबले भारी कामकाज प्रतीत होता है।

उत्तर

2

मुझे यह जवाब यहां से मिला है और यह मेरे लिए काम कर रहा है। देखें कि यह आपके लिए काम कर रहा है या नहीं।

import UIKit 
import AssetsLibrary 
import AVFoundation 

enum QUWatermarkPosition { 
    case TopLeft 
    case TopRight 
    case BottomLeft 
    case BottomRight 
    case Default 
} 

class QUWatermarkManager: NSObject { 

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status: status, session: session, outputURL: outputURL) 
     } 
    } 

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status: status, session: session, outputURL: outputURL) 
     } 
    } 

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) ->())?) { 

     dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), {() -> Void in 
      var mixComposition = AVMutableComposition() 

      var compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 
      var clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack 
      compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero, error: nil) 
      clipVideoTrack.preferredTransform 

      let videoSize = clipVideoTrack.naturalSize 

      var parentLayer = CALayer() 
      var videoLayer = CALayer() 
      parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height) 
      videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height) 
      parentLayer.addSublayer(videoLayer) 

      if text != nil { 
       var titleLayer = CATextLayer() 
       titleLayer.backgroundColor = UIColor.redColor().CGColor 
       titleLayer.string = text 
       titleLayer.font = "Helvetica" 
       titleLayer.fontSize = 15 
       titleLayer.alignmentMode = kCAAlignmentCenter 
       titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height) 
       parentLayer.addSublayer(titleLayer) 
      } else if name != nil { 
       var watermarkImage = UIImage(named: name) 
       var imageLayer = CALayer() 
       imageLayer.contents = watermarkImage?.CGImage 

       var xPosition : CGFloat = 0.0 
       var yPosition : CGFloat = 0.0 
       let imageSize : CGFloat = 57.0 

       switch (position) { 
       case .TopLeft: 
        xPosition = 0 
        yPosition = 0 
        break 
       case .TopRight: 
        xPosition = videoSize.width - imageSize 
        yPosition = 0 
        break 
       case .BottomLeft: 
        xPosition = 0 
        yPosition = videoSize.height - imageSize 
        break 
       case .BottomRight, .Default: 
        xPosition = videoSize.width - imageSize 
        yPosition = videoSize.height - imageSize 
        break 
       default: 
        break 
       } 


       imageLayer.frame = CGRectMake(xPosition, yPosition, imageSize, imageSize) 
       imageLayer.opacity = 0.65 
       parentLayer.addSublayer(imageLayer) 
      } 

      var videoComp = AVMutableVideoComposition() 
      videoComp.renderSize = videoSize 
      videoComp.frameDuration = CMTimeMake(1, 30) 
      videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer) 

      var instruction = AVMutableVideoCompositionInstruction() 
      instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration) 
      var videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack 

      let layerInstruction = self.videoCompositionInstructionForTrack(compositionVideoTrack, asset: videoAsset) 

      instruction.layerInstructions = [layerInstruction] 
      videoComp.instructions = [instruction] 

      let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String 
      var dateFormatter = NSDateFormatter() 
      dateFormatter.dateStyle = .LongStyle 
      dateFormatter.timeStyle = .ShortStyle 
      let date = dateFormatter.stringFromDate(NSDate()) 
      let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov") 
      let url = NSURL(fileURLWithPath: savePath) 

      let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 
      exporter.outputURL = url 
      exporter.outputFileType = AVFileTypeQuickTimeMovie 
      exporter.shouldOptimizeForNetworkUse = true 
      exporter.videoComposition = videoComp 

      exporter.exportAsynchronouslyWithCompletionHandler() { 
       dispatch_async(dispatch_get_main_queue(), {() -> Void in 
        if exporter.status == AVAssetExportSessionStatus.Completed { 
         let outputURL = exporter.outputURL 
         if flag { 
          // Save to library 
          let library = ALAssetsLibrary() 
          if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) { 
           library.writeVideoAtPathToSavedPhotosAlbum(outputURL, 
            completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in 
             completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL) 
           }) 
          } 
         } else { 
          completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL) 
         } 

        } else { 
         // Error 
         completion!(status: exporter.status, session: exporter, outputURL: nil) 
        } 
       }) 
      } 
     }) 
    } 


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) { 
     var assetOrientation = UIImageOrientation.Up 
     var isPortrait = false 
     if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { 
      assetOrientation = .Right 
      isPortrait = true 
     } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { 
      assetOrientation = .Left 
      isPortrait = true 
     } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { 
      assetOrientation = .Up 
     } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { 
      assetOrientation = .Down 
     } 
     return (assetOrientation, isPortrait) 
    } 

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { 
     let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 
     let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack 

     var transform = assetTrack.preferredTransform 
     let assetInfo = orientationFromTransform(transform) 

     var scaleToFitRatio = UIScreen.mainScreen().bounds.width/assetTrack.naturalSize.width 
     if assetInfo.isPortrait { 
      scaleToFitRatio = UIScreen.mainScreen().bounds.width/assetTrack.naturalSize.height 
      let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio) 
      instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), 
       atTime: kCMTimeZero) 
     } else { 
      let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio) 
      var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width/2)) 
      if assetInfo.orientation == .Down { 
       let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI)) 
       let windowBounds = UIScreen.mainScreen().bounds 
       let yFix = assetTrack.naturalSize.height + windowBounds.height 
       let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix) 
       concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor) 
      } 
      instruction.setTransform(concat, atTime: kCMTimeZero) 
     } 

     return instruction 
    } 
} 
2

में @ User511 के जवाब

स्विफ्ट 3:

import UIKit 
import AssetsLibrary 
import AVFoundation 
import Photos 

enum QUWatermarkPosition { 
    case TopLeft 
    case TopRight 
    case BottomLeft 
    case BottomRight 
    case Default 
} 

class QUWatermarkManager: NSObject { 

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status, session, outputURL) 
     } 
    } 

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) ->())?) { 
     self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) ->() in 
      completion!(status, session, outputURL) 
     } 
    } 

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) ->())?) { 
     DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async { 

      let mixComposition = AVMutableComposition() 

      let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 
      let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] 
      do { 
       try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero) 
      } 
      catch { 
       print(error.localizedDescription) 
      } 

      let videoSize = clipVideoTrack.naturalSize 

      let parentLayer = CALayer() 
      let videoLayer = CALayer() 
      parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height) 
      videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height) 
      parentLayer.addSublayer(videoLayer) 

      if text != nil { 
       let titleLayer = CATextLayer() 
       titleLayer.backgroundColor = UIColor.red.cgColor 
       titleLayer.string = text 
       titleLayer.font = "Helvetica" as CFTypeRef 
       titleLayer.fontSize = 15 
       titleLayer.alignmentMode = kCAAlignmentCenter 
       titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height) 
       parentLayer.addSublayer(titleLayer) 
      } else if name != nil { 
       let watermarkImage = UIImage(named: name) 
       let imageLayer = CALayer() 
       imageLayer.contents = watermarkImage?.cgImage 

       var xPosition : CGFloat = 0.0 
       var yPosition : CGFloat = 0.0 
       let imageSize : CGFloat = 57.0 

       switch (position) { 
       case .TopLeft: 
        xPosition = 0 
        yPosition = 0 
        break 
       case .TopRight: 
        xPosition = videoSize.width - imageSize 
        yPosition = 0 
        break 
       case .BottomLeft: 
        xPosition = 0 
        yPosition = videoSize.height - imageSize 
        break 
       case .BottomRight, .Default: 
        xPosition = videoSize.width - imageSize 
        yPosition = videoSize.height - imageSize 
        break 
       } 


       imageLayer.frame = CGRect(x: xPosition, y: yPosition, width: imageSize, height: imageSize) 
       imageLayer.opacity = 0.65 
       parentLayer.addSublayer(imageLayer) 
      } 

      let videoComp = AVMutableVideoComposition() 
      videoComp.renderSize = videoSize 
      videoComp.frameDuration = CMTimeMake(1, 30) 
      videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer) 

      let instruction = AVMutableVideoCompositionInstruction() 
      instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration) 
      _ = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack 

      let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset) 

      instruction.layerInstructions = [layerInstruction] 
      videoComp.instructions = [instruction] 

      let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] 
      let dateFormatter = DateFormatter() 
      dateFormatter.dateStyle = .long 
      dateFormatter.timeStyle = .short 
      let date = dateFormatter.string(from: Date()) 
      let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov") 

      let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) 
      exporter?.outputURL = url 
      exporter?.outputFileType = AVFileTypeQuickTimeMovie 
      exporter?.shouldOptimizeForNetworkUse = true 
      exporter?.videoComposition = videoComp 

      exporter?.exportAsynchronously() { 
       DispatchQueue.main.async { 

        if exporter?.status == AVAssetExportSessionStatus.completed { 
         let outputURL = exporter?.outputURL 
         if flag { 
          // Save to library 
//       let library = ALAssetsLibrary() 

          if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) { 
           PHPhotoLibrary.shared().performChanges({ 
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!) 
           }) { saved, error in 
            if saved { 
             completion!(AVAssetExportSessionStatus.completed, exporter, outputURL) 
            } 
           } 
          } 

//       if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) { 
//        library.writeVideoAtPathToSavedPhotosAlbum(outputURL, 
//                   completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in 
//                    
//                   completion!(AVAssetExportSessionStatus.Completed, exporter, outputURL) 
//        }) 
//       } 
         } else { 
          completion!(AVAssetExportSessionStatus.completed, exporter, outputURL) 
         } 

        } else { 
         // Error 
         completion!(exporter?.status, exporter, nil) 
        } 
       } 
      } 
     } 
    } 


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) { 
     var assetOrientation = UIImageOrientation.up 
     var isPortrait = false 
     if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { 
      assetOrientation = .right 
      isPortrait = true 
     } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { 
      assetOrientation = .left 
      isPortrait = true 
     } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { 
      assetOrientation = .up 
     } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { 
      assetOrientation = .down 
     } 
     return (assetOrientation, isPortrait) 
    } 

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { 
     let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 
     let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0] 

     let transform = assetTrack.preferredTransform 
     let assetInfo = orientationFromTransform(transform: transform) 

     var scaleToFitRatio = UIScreen.main.bounds.width/assetTrack.naturalSize.width 
     if assetInfo.isPortrait { 
      scaleToFitRatio = UIScreen.main.bounds.width/assetTrack.naturalSize.height 
      let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) 
      instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), 
            at: kCMTimeZero) 
     } else { 
      let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) 
      var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width/2)) 
      if assetInfo.orientation == .down { 
       let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi)) 
       let windowBounds = UIScreen.main.bounds 
       let yFix = assetTrack.naturalSize.height + windowBounds.height 
       let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix) 
       concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor) 
      } 
      instruction.setTransform(concat, at: kCMTimeZero) 
     } 

     return instruction 
    } 
} 
0

मेरे लिए यह समाधान काम, सुपर आसान और सबसे तेज़

func addWatermark(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) { 
let mixComposition = AVMutableComposition() 
let asset = AVAsset(url: inputURL) 
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0] 
let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration) 

    let compositionVideoTrack:AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))! 

do { 
    try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero) 
    compositionVideoTrack.preferredTransform = videoTrack.preferredTransform 
} catch { 
    print(error) 
} 

let watermarkFilter = CIFilter(name: "CISourceOverCompositing")! 
let watermarkImage = CIImage(image: UIImage(named: "waterMark")!) 
let videoComposition = AVVideoComposition(asset: asset) { (filteringRequest) in 
    let source = filteringRequest.sourceImage.clampedToExtent() 
    watermarkFilter.setValue(source, forKey: "inputBackgroundImage") 
    let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0) 
    watermarkFilter.setValue(watermarkImage?.transformed(by: transform), forKey: "inputImage") 
    filteringRequest.finish(with: watermarkFilter.outputImage!, context: nil) 
} 

guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480) else { 
    handler(nil) 

    return 
} 

exportSession.outputURL = outputURL 
exportSession.outputFileType = AVFileType.mp4 
exportSession.shouldOptimizeForNetworkUse = true 
exportSession.videoComposition = videoComposition 
exportSession.exportAsynchronously {() -> Void in 
    handler(exportSession) 
} 

}

संबंधित मुद्दे