2017-08-10 25 views
10

मान लिया जाये कि हम दो वीडियो परिसंपत्तियों (AVAsset वस्तुओं) है, उन्हें खाली और मुख्य, जहां मुख्य यादृच्छिक सीमित लंबाई का एक वीडियो है कहते हैं, के 2-5 मिनट का कहना है कि करने देते हैं और खाली हमेशा होता है एक 4 दूसरा वीडियो है, हम निम्न क्रम में वीडियो मर्ज करना चाहते हैं:वीडियो के आकार को बढ़ाने के लिए AVMutableComposition का क्या कारण बन रहा है? - आईओएस, स्विफ्ट, AVFoundation

खाली - मुख्य - खाली

// Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. 

    let mixComposition = AVMutableComposition() 

    let assets = [blank, main, blank] 
    var totalTime : CMTime = CMTimeMake(0, 0) 
    var atTimeM: CMTime = CMTimeMake(0, 0) 

    Utils.log([blank.duration, main.duration]) 

    // VIDEO TRACK 
    let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 

    for (index,asset) in assets.enumerated() { 

     do { 

      if index == 0 { 
       atTimeM = kCMTimeZero 
      } else { 
       atTimeM = totalTime // <-- Use the total time for all the videos seen so far. 
      } 

      try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM) 

     } catch let error as NSError { 
      Utils.log("error: \(error)") 
     } 

     totalTime = CMTimeAdd(totalTime, asset.duration) 
    } 

    // AUDIO TRACK 
    let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid) 
    do { 
     try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, main.duration), of: main.tracks(withMediaType: AVMediaTypeAudio)[0], at: blank.duration) 
    } catch _ { 
     completionHandler(nil, ErrorType(rawValue: "Unable to add audio in composition.")) 
     return 
    } 

    let outputURL = mainVideoObject.getDirectoryURL()?.appendingPathComponent("video-with-blank.mp4") 

    guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset1280x720) else { 
     completionHandler(nil, ErrorType(rawValue: "Unable to create export session.")) 
     return 
    } 

    let mainInstruction = AVMutableVideoCompositionInstruction() 

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(blank.duration, CMTimeAdd(main.duration, blank.duration))) 

    // Fixing orientation 
    let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let firstAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0] 
    firstLayerInstruction.setTransform(firstAssetTrack.preferredTransform, at: kCMTimeZero) 
    firstLayerInstruction.setOpacity(0.0, at: blank.duration) 

    let secondLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let secondAssetTrack = main.tracks(withMediaType: AVMediaTypeVideo)[0] 
    var isSecondAssetPortrait = false 
    let secondTransform = secondAssetTrack.preferredTransform 
    if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) { 
     isSecondAssetPortrait = true 
    } 
    if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) { 
     isSecondAssetPortrait = true 
    } 
    secondLayerInstruction.setTransform(secondAssetTrack.preferredTransform, at: blank.duration) 
    secondLayerInstruction.setOpacity(0.0, at: CMTimeAdd(blank.duration, main.duration)) 

    let thirdLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let thirdAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0] 
    thirdLayerInstruction.setTransform(thirdAssetTrack.preferredTransform, at: CMTimeAdd(blank.duration, main.duration)) 

    mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction, thirdLayerInstruction] 

    var naturalSize = CGSize() 
    if(isSecondAssetPortrait) { 
     naturalSize = CGSize(width: secondAssetTrack.naturalSize.height, height: secondAssetTrack.naturalSize.width) 
    } else { 
     naturalSize = secondAssetTrack.naturalSize 
    } 

    let renderWidth = naturalSize.width 
    let renderHeight = naturalSize.height 

    let mainCompositionInst = AVMutableVideoComposition() 
    mainCompositionInst.instructions = [mainInstruction] 
    mainCompositionInst.frameDuration = CMTimeMake(1, 30) 
    mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight) 

    exporter.outputURL = outputURL 
    exporter.outputFileType = AVFileTypeMPEG4 
    exporter.videoComposition = mainCompositionInst 
    //exporter.shouldOptimizeForNetworkUse = true 

    exporter.exportAsynchronously { 
     if exporter.status == .completed { 
      completionHandler(AVAsset(url: outputURL!), nil) 
     } else { 
      completionHandler(nil, ErrorType(rawValue: "Unable to export video.")) 
      if let error = exporter.error { 
       Utils.log("Unable to export video. \(error)") 
      } 
     } 
    } 

Assu मिंग है कि 720 पी गुणवत्ता पर 5 मिनट के लिए मूल वीडियो रिकॉर्डर लगभग 200 एमबी स्पेस लेता है, जिसमें मुख्य वीडियो की शुरुआत और अंत में 4 एस रिक्त वीडियो को आकार में भारी रूप से बदलाव नहीं करना चाहिए, और इसे बहुत तेज़ प्रोसेसिंग करना चाहिए।

परिणाम हालांकि एक वीडियो है जो मूल वीडियो (इसलिए 400 - 500 एमबी) के आकार से 2 से 2.5x है और प्रक्रिया करने में बहुत लंबा समय लगता है।

कृपया सलाह,

धन्यवाद

+0

तैयार किया है आप कर सकते हैं कृपया मुझे खाली वीडियो फ़ाइल जो आप के बीच में जोड़कर के लिए उपयोग किया जाता है नमूना भेजें। – MinuMaster

उत्तर

1

यहाँ मैं जहां तुम सिर्फ अपने वीडियो के आपके नाम गुजरती हैं और बंडल तक में उन वीडियो रख सकते एक कस्टम वर्ग तैयार किया है। एक बार जब आप अपना ऐप चलाएंगे तो यह आपकी आवश्यकता के अनुसार एक नई वीडियो फ़ाइल उत्पन्न करेगा और इसे एप्लिकेशन दस्तावेज़ निर्देशिका पथ में छोड़ देगा।

स्विफ्ट 4 का उपयोग करते हुए मैं इस डेमो

// 
// ViewController.swift 
// SOVideoMergingDemo 
// 
// Created by iOS Test User on 03/01/18. 
// Copyright © 2018 Test User. Ltd. All rights reserved. 
// 

import UIKit 
import AVFoundation 
import MediaPlayer 
import Photos 
import AssetsLibrary 
import AVKit 


class ViewController : UIViewController { 

    //-------------------------------------------------- 
    //MARK: 
    //MARK: - IBOutlets 
    //-------------------------------------------------- 




    //-------------------------------------------------- 
    //MARK: 
    //MARK: - Properties 
    //-------------------------------------------------- 

    var videoUrls : [URL]  = [] 
    var arrVideoAsset : [AVAsset] = [] 
    let video1 = "1" 
    let video2 = "2" 
    let outPutVideo = "MergedVideo.mp4" 

    let semaphore = DispatchSemaphore(value: 1) 


    //-------------------------------------------------- 
    //MARK: 
    //MARK: - Custom Methods 
    //-------------------------------------------------- 

    func getVideoURL(forVideo : String) -> URL { 
     let videoPath = Bundle.main.path(forResource: forVideo, ofType:"mp4") 
     let vidURL = URL(fileURLWithPath: videoPath!) 
     return vidURL 
    } 

    //-------------------------------------------------- 

    func mergeVideos(arrVideoAsset : [AVAsset]) { 

     let mixComposition = AVMutableComposition() 

     //Tracks to insert in Composition for Merging 
     // Create video tracks 
     let firstTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) 
     let secondTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) 
     let thirdTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) 

     do { 
      try firstTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: kCMTimeZero) 
     } catch _ { 
      print("Failed to load first track") 
     } 

     do { 
      try secondTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[1].duration), of: arrVideoAsset[1].tracks(withMediaType: .video)[0], at: arrVideoAsset[0].duration) 
     } catch _ { 
      print("Failed to load second track") 
     } 

     do { 
      try thirdTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: arrVideoAsset[1].duration) 
     } catch _ { 
      print("Failed to load second track") 
     } 

     //This Instruciton is Created for Merging Video Tracks 
     let compositionInstruction = AVMutableVideoCompositionInstruction() 
     compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,CMTimeAdd(arrVideoAsset[0].duration, CMTimeAdd(arrVideoAsset[1].duration, arrVideoAsset[2].duration))) 

     //Creating Layer Instruction for Videos 
     let firstInstruction = videoCompositionInstructionForTrack(firstTrack!, asset: arrVideoAsset[0]) 
     firstInstruction.setOpacity(0.0, at: arrVideoAsset[0].duration) 
     let secondInstruction = videoCompositionInstructionForTrack(secondTrack!, asset: arrVideoAsset[1]) 
     secondInstruction.setOpacity(0.0, at: arrVideoAsset[1].duration) 
     let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack!, asset: arrVideoAsset[2]) 

     compositionInstruction.layerInstructions = [firstInstruction, secondInstruction,thirdInstruction] 

     //By Changing These Height and Width User can affect Size of Merged Video. Calucalte it Carefully and As per you needs 
     let height = (Float((firstTrack?.naturalSize.height)!) < Float((secondTrack?.naturalSize.height)!)) ? firstTrack?.naturalSize.height : secondTrack?.naturalSize.height 

     let width = (Float((firstTrack?.naturalSize.width)!) < Float((secondTrack?.naturalSize.width)!)) ? firstTrack?.naturalSize.width : secondTrack?.naturalSize.width 

     let mainComposition = AVMutableVideoComposition() 
     mainComposition.instructions = [compositionInstruction] 
     mainComposition.frameDuration = CMTimeMake(1, 30) 
     mainComposition.renderSize = CGSize(width:width!,height: height!) 

     let exporter = AVAssetExportSession(asset:mixComposition, presetName: AVAssetExportPresetHighestQuality) 
     exporter?.outputURL = URL(fileURLWithPath: getDocumentDirectoryPath() + "/" + outPutVideo) 
     exporter?.outputFileType = AVFileType.mp4 
     exporter?.shouldOptimizeForNetworkUse = true 
     exporter?.videoComposition = mainComposition 
     print(self.getDocumentDirectoryPath()) 

     exporter?.exportAsynchronously(completionHandler: { 
      DispatchQueue.main.async { 
       if exporter?.status == AVAssetExportSessionStatus.completed { 
        do { 
         let videoData = try Data(contentsOf: exporter!.outputURL!) 
         try videoData.write(to: URL(fileURLWithPath : self.getDocumentDirectoryPath() + "/" + self.outPutVideo), options: Data.WritingOptions.atomic) 
        } catch { 
         print("Failed to Save video ===>>> \(error.localizedDescription)") 
        } 


        //Uncomment This If you want to save video in Photos Library 
//     PHPhotoLibrary.shared().performChanges({ 
//      PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (exporter?.outputURL)!) 
//     }, completionHandler: { (success, error) in 
//      if success { 
//       let fetchOptions = PHFetchOptions() 
//       fetchOptions.sortDescriptors = [NSSortDescriptor.init(key:"creationDate", ascending: false)] 
//       _ = PHAsset.fetchAssets(with: .video, options:fetchOptions).firstObject 
//      } else { 
//       print("Error in Saving File in Photo Libaray -> \(String(describing: error?.localizedDescription))") 
//      } 
//     }) 
       } else { 
        print("Error -> \(String(describing: exporter?.error?.localizedDescription))") 
       } 
      } 
     }) 

    } 

    //-------------------------------------------------- 

    //This Methiod is Used to Make Layer Instruction for Particular Video 
    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { 
     let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 
     let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0] 
     let scale : CGAffineTransform = CGAffineTransform(scaleX: 1, y:1) 
     instruction.setTransform(assetTrack.preferredTransform.concatenating(scale), at: kCMTimeZero) 
     return instruction 
    } 

    //-------------------------------------------------- 

    func getDocumentDirectoryPath() -> String { 
     let arrPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true) 
     return arrPaths[0] 
    } 

    //-------------------------------------------------- 
    //MARK: 
    //MARK: - View Life Cycle Methods 
    //-------------------------------------------------- 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     // Do any additional setup after loading the view. 

     //Prepare Video Assets 
     arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1))) 
     arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video2))) 
     arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1))) 

     //Merge this Videos 
     mergeVideos(arrVideoAsset:arrVideoAsset) 
    } 
} 
संबंधित मुद्दे