मैं क्लिप की एक श्रृंखला रिकॉर्ड करना चाहता हूं जो एक वीडियो प्लेयर या ffmpeg -f concat
के माध्यम से एक साथ खेला जाता है।AVAssetWriter निरंतर सेगमेंट
अभी किसी भी परिदृश्य में, मुझे प्रत्येक सेगमेंट जॉइन पॉइंट पर एक बहुत ही ध्यान देने योग्य ऑडियो हिचकी मिल रही है।
मेरी वर्तमान रणनीति 2 AssetWriter
उदाहरणों को बनाए रखना है। प्रत्येक कट ऑफ़ पॉइंट पर, मैं एक नया लेखक शुरू करता हूं, तैयार होने तक प्रतीक्षा करें, फिर इसे नमूने देना शुरू करें। जब वीडियो और ऑडियो नमूने समय पर एक विशिष्ट बिंदु पर किए जाते हैं, तो मैं अंतिम लेखक को बंद करता हूं।
निरंतर क्लिप रिकॉर्डिंग प्राप्त करने के लिए मैं इसे कैसे संशोधित करूं? मूल कारण मुद्दा क्या है?
import Foundation
import UIKit
import AVFoundation
class StreamController: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {
@IBOutlet weak var previewView: UIView!
var closingVideoInput: AVAssetWriterInput?
var closingAudioInput: AVAssetWriterInput?
var closingAssetWriter: AVAssetWriter?
var currentVideoInput: AVAssetWriterInput?
var currentAudioInput: AVAssetWriterInput?
var currentAssetWriter: AVAssetWriter?
var nextVideoInput: AVAssetWriterInput?
var nextAudioInput: AVAssetWriterInput?
var nextAssetWriter: AVAssetWriter?
var previewLayer: AVCaptureVideoPreviewLayer?
var videoHelper: VideoHelper?
var startTime: NSTimeInterval = 0
override func viewDidLoad() {
super.viewDidLoad()
startTime = NSDate().timeIntervalSince1970
createSegmentWriter()
videoHelper = VideoHelper()
videoHelper!.delegate = self
videoHelper!.startSession()
NSTimer.scheduledTimerWithTimeInterval(5, target: self, selector: "createSegmentWriter", userInfo: nil, repeats: true)
}
func createSegmentWriter() {
print("Creating segment writer at t=\(NSDate().timeIntervalSince1970 - self.startTime)")
nextAssetWriter = try! AVAssetWriter(URL: NSURL(fileURLWithPath: OutputFileNameHelper.instance.pathForOutput()), fileType: AVFileTypeMPEG4)
nextAssetWriter!.shouldOptimizeForNetworkUse = true
let videoSettings: [String:AnyObject] = [AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 960, AVVideoHeightKey: 540]
nextVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
nextVideoInput!.expectsMediaDataInRealTime = true
nextAssetWriter?.addInput(nextVideoInput!)
let audioSettings: [String:AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt: kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100.0,
AVNumberOfChannelsKey: 2,
]
nextAudioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
nextAudioInput!.expectsMediaDataInRealTime = true
nextAssetWriter?.addInput(nextAudioInput!)
nextAssetWriter!.startWriting()
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
previewLayer = AVCaptureVideoPreviewLayer(session: videoHelper!.captureSession)
previewLayer!.frame = self.previewView.bounds
previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
if ((previewLayer?.connection?.supportsVideoOrientation) != nil) {
previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
}
self.previewView.layer.addSublayer(previewLayer!)
}
func closeWriter() {
if videoFinished && audioFinished {
let outputFile = closingAssetWriter?.outputURL.pathComponents?.last
closingAssetWriter?.finishWritingWithCompletionHandler() {
let delta = NSDate().timeIntervalSince1970 - self.startTime
print("segment \(outputFile) finished at t=\(delta)")
}
self.closingAudioInput = nil
self.closingVideoInput = nil
self.closingAssetWriter = nil
audioFinished = false
videoFinished = false
}
}
func closingVideoFinished() {
if closingVideoInput != nil {
videoFinished = true
closeWriter()
}
}
func closingAudioFinished() {
if closingAudioInput != nil {
audioFinished = true
closeWriter()
}
}
var closingTime: CMTime = kCMTimeZero
var audioFinished = false
var videoFinished = false
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection!) {
let sampleTime: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if let nextWriter = nextAssetWriter {
if nextWriter.status.rawValue != 0 {
print("Switching asset writers at t=\(NSDate().timeIntervalSince1970 - self.startTime)")
closingAssetWriter = currentAssetWriter
closingVideoInput = currentVideoInput
closingAudioInput = currentAudioInput
currentAssetWriter = nextAssetWriter
currentVideoInput = nextVideoInput
currentAudioInput = nextAudioInput
nextAssetWriter = nil
nextVideoInput = nil
nextAudioInput = nil
closingTime = sampleTime
currentAssetWriter!.startSessionAtSourceTime(sampleTime)
}
}
if currentAssetWriter != nil {
if let _ = captureOutput as? AVCaptureVideoDataOutput {
if (CMTimeCompare(sampleTime, closingTime) < 0) {
if closingVideoInput?.readyForMoreMediaData == true {
closingVideoInput?.appendSampleBuffer(sampleBuffer)
}
} else {
closingVideoFinished()
if currentVideoInput?.readyForMoreMediaData == true {
currentVideoInput?.appendSampleBuffer(sampleBuffer)
}
}
} else if let _ = captureOutput as? AVCaptureAudioDataOutput {
if (CMTimeCompare(sampleTime, closingTime) < 0) {
if currentAudioInput?.readyForMoreMediaData == true {
currentAudioInput?.appendSampleBuffer(sampleBuffer)
}
} else {
closingAudioFinished()
if currentAudioInput?.readyForMoreMediaData == true {
currentAudioInput?.appendSampleBuffer(sampleBuffer)
}
}
}
}
}
override func shouldAutorotate() -> Bool {
return true
}
override func supportedInterfaceOrientations() -> UIInterfaceOrientationMask {
return [UIInterfaceOrientationMask.LandscapeRight]
}
}
इस पर आप कैसे समाप्त हुए इस पर कोई अपडेट? – Andy
मैं एक समान समस्या को हल करने की कोशिश कर रहा हूं http://stackoverflow.com/questions/43322157/split-cmsamplebufferref-containing-audio –