2017-03-02 5 views
0

AVFoundationを使用してビデオを録画するサンプルアプリケーションを作成しています。全体のポイントは、私はビデオがどのように記録されるかをより詳細に制御できるからです。私のサンプルプロジェクトでは、ビデオをキャプチャしていますが、向きを正しく処理するのに苦労しています。iOS AVAssetWriterを使用してビデオをキャプチャするときの向きを正しく処理する方法

私はウェブ上で多くの検索を行っており、キャプチャビューやキャプチャセッションでオリエンテーションに基づいて回転するのではなく、再生中にビデオを回転するように設定することを他の人が示唆していることがわかりました。私はこれをうまく動作させていますが、WindowsやAndroidなどの他のプラットフォームで問題が発生するかどうかは疑問です。

また、録画した動画のメタデータを見ると、幅と高さが向きに適切に設定されていないことがわかります。これは、私がビデオのプレゼンテーションを変えるだけで、実際の解像度ではないので理にかなっています。

私の質問は、ポートレートと風景の向きを正しくサポートし、ビデオファイルの出力に正しく反映させる方法です。これらの動画はすべてのプラットフォームで正しく再生する必要がありますので、決着が大きく変わると思っています。

以下はこれまでに書いた全資料です。私はあなたが提供できるアドバイスを感謝します。

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { 

    //MARK: - Outlet 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet var playStopButton: UIButton! 

    //MARK: - Private Variables 

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil) 
    private let captureSession = AVCaptureSession() 

    var outputUrl: URL { 
     get { 

      if let url = _outputUrl { 
       return url 
      } 


      _outputUrl = outputDirectory.appendingPathComponent("video.mp4") 
      return _outputUrl! 
     } 
    } 

    private var _outputUrl: URL? 

    var outputDirectory: URL { 
     get { 

      if let url = _outputDirectory { 
       return url 
      } 


      _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording") 
      return _outputDirectory! 
     } 
    } 

    private var _outputDirectory: URL? 

    private var assetWriter: AVAssetWriter? 
    private var videoInput: AVAssetWriterInput? 
    private var audioInput: AVAssetWriterInput? 
    private var videoOutput: AVCaptureVideoDataOutput? 
    private var audioOutput: AVCaptureAudioDataOutput? 

    private var isRecording = false 
    private var isWriting = false 

    private var videoSize = CGSize(width: 640, height: 480) 

    //MARK: - View Life-cycle 

    override func viewDidLoad() { 
     super.viewDidLoad() 


     videoQueue.async { 

      do { 

       try self.configureCaptureSession() 
       try self.configureAssetWriter() 

       DispatchQueue.main.async { 
        self.configurePreview() 
       } 

      } catch { 

       DispatchQueue.main.async { 
        self.showAlert("Unable to configure video output") 
       } 
      } 
     } 
    } 

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation { 
     return .portrait 
    } 

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask { 
     return .portrait 
    } 

    //MARK: - Capture Session 

    private func configureCaptureSession() throws { 

     do { 

      // configure the session 
      if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) { 
       captureSession.sessionPreset = AVCaptureSessionPreset640x480 
      } 

      // configure capture devices 
      let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) 
      let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 

      let camInput = try AVCaptureDeviceInput(device: camDevice) 
      let micInput = try AVCaptureDeviceInput(device: micDevice) 

      if captureSession.canAddInput(camInput) { 
       captureSession.addInput(camInput) 
      } 

      if captureSession.canAddInput(micInput) { 
       captureSession.addInput(micInput) 
      } 

      // configure audio/video output 
      videoOutput = AVCaptureVideoDataOutput() 
      videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary? 
      videoOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let v = videoOutput { 
       captureSession.addOutput(v) 
      } 

      audioOutput = AVCaptureAudioDataOutput() 
      audioOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let a = audioOutput { 
       captureSession.addOutput(a) 
      } 

      // configure audio session 
      let audioSession = AVAudioSession.sharedInstance() 
      try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord) 
      try audioSession.setActive(true) 

      var micPort: AVAudioSessionPortDescription? 

      if let inputs = audioSession.availableInputs { 
       for port in inputs { 
        if port.portType == AVAudioSessionPortBuiltInMic { 
         micPort = port 
         break; 
        } 
       } 
      } 

      if let port = micPort, let dataSources = port.dataSources { 

       for source in dataSources { 
        if source.orientation == AVAudioSessionOrientationFront { 
         try audioSession.setPreferredInput(port) 
         break 
        } 
       } 
      } 

     } catch { 
      print("Failed to configure audio/video capture session") 
      throw error 
     } 
    } 

    private func configureAssetWriter() throws { 

     prepareVideoFile() 

     do { 

      assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4) 

      guard let writer = assetWriter else { 
       print("Asset writer not created") 
       return 
      } 

      let vidSize = videoSize 
      let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264, 
           AVVideoWidthKey: NSNumber(value: Float(vidSize.width)), 
           AVVideoHeightKey: NSNumber(value: Float(vidSize.height))] 

      videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings) 
      videoInput?.expectsMediaDataInRealTime = true 
      videoInput?.transform = getVideoTransform() 

      var channelLayout = AudioChannelLayout() 
      memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size); 
      channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; 

      let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC, 
               AVSampleRateKey: 44100, 
               AVNumberOfChannelsKey: 2] 

      audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings) 
      audioInput?.expectsMediaDataInRealTime = true 

      guard let vi = videoInput else { 
       print("Video input not configured") 
       return 
      } 

      guard let ai = audioInput else { 
       print("Audio input not configured") 
       return 
      } 

      if writer.canAdd(vi) { 
       writer.add(vi) 
      } 

      if writer.canAdd(ai) { 
       writer.add(ai) 
      } 

     } catch { 
      print("Failed to configure asset writer") 
      throw error 
     } 
    } 

    private func prepareVideoFile() { 

     if FileManager.default.fileExists(atPath: outputUrl.path) { 

      do { 
       try FileManager.default.removeItem(at: outputUrl) 
      } catch { 
       print("Unable to remove file at URL \(outputUrl)") 
      } 
     } 

     if !FileManager.default.fileExists(atPath: outputDirectory.path) { 

      do { 
       try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil) 
      } catch { 
       print("Unable to create directory at URL \(outputDirectory)") 
      } 
     } 
    } 

    private func configurePreview() { 

     if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) { 
      previewLayer.frame = previewView.bounds 
      previewView.layer.addSublayer(previewLayer) 
     } 
    } 

    private func getVideoSize() -> CGSize { 

     if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight { 

      if videoSize.width > videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 

     } else { 

      if videoSize.width < videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 
     } 
    } 

    private func getVideoTransform() -> CGAffineTransform { 

     switch UIDevice.current.orientation { 

     case .portraitUpsideDown: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0))/180.0) 

     case .landscapeLeft: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0))/180.0) // TODO: Add support for front facing camera 
//   return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0))/180.0) // TODO: For front facing camera 

     case .landscapeRight: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0))/180.0) // TODO: Add support for front facing camera 
//   return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0))/180.0) // TODO: For front facing camera 

     default: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0))/180.0) 
     } 
    } 

    //MARK: - Controls 

    private func startRecording() { 

     videoQueue.async { 
      self.captureSession.startRunning() 
     } 

     isRecording = true 
     playStopButton.setTitle("Stop Recording", for: .normal) 
     print("Recording did start") 
    } 

    private func stopRecording() { 

     if !isRecording { 
      return 
     } 

     videoQueue.async { 

      self.assetWriter?.finishWriting { 
       print("Asset writer did finish writing") 
       self.isWriting = false 
      } 

      self.captureSession.stopRunning() 
     } 

     isRecording = false 

     playStopButton.setTitle("Start Recording", for: .normal) 
     print("Recording did stop") 
    } 

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 

     guard let w = assetWriter else { 
      print("Asset writer not configured") 
      return 
     } 

     guard let vo = videoOutput else { 
      print("Video output not configured") 
      return 
     } 

     guard let ao = audioOutput else { 
      print("Audio output not configured") 
      return 
     } 

     guard let vi = videoInput else { 
      print("Video input not configured") 
      return 
     } 

     guard let ai = audioInput else { 
      print("Audio input not configured") 
      return 
     } 

     let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) 

     print("Writer status \(w.status.rawValue)") 

     if let e = w.error { 
      print("Writer error \(e)") 
      stopRecording() 
      return 
     } 

     switch w.status { 

     case .unknown: 

      if !isWriting { 
       isWriting = true 
       w.startWriting() 
       w.startSession(atSourceTime: st) 
      } 

      return 

     case .completed: 
      print("Video writing completed") 
      return 

     case .cancelled: 
      print("Video writing cancelled") 
      return 

     case .failed: 
      print("Video writing failed") 
      return 

     default: 
      print("Video is writing") 
     } 

     if vo == captureOutput { 

      if !vi.append(sampleBuffer) { 
       print("Unable to write to video buffer") 
      } 

     } else if ao == captureOutput { 

      if !ai.append(sampleBuffer) { 
       print("Unable to write to audio buffer") 
      } 
     } 
    } 

    //MARK: Helpers 

    private func getDocumentsDirectory() -> URL { 
     let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) 
     let documentsDirectory = paths[0] 
     return documentsDirectory 
    } 

    //MARK: Actions 

    @IBAction func startStopTapped(sender: AnyObject) { 

     if isRecording { 
      stopRecording() 
     } else { 
      startRecording() 
     } 
    } 
} 
+0

何も情報はありませんか?誰もこれをやろうとしていませんか? –

+0

あなたが方法を見つけたら、アップルに知らせてください、彼らは時々間違ってローテーションを受けるようです:) – Mindaugas

答えて

0

私は私の問題の解決策を見つけました。この問題を解決するには、AVAssetExportSessionを使用してビデオを書き出して、ビデオサイズの設定を処理してから、書き出し時ではなく書き出し時のローテーションを処理します。私は元のビデオサイズからより小さい640x480解像度に移行するためにスケールファクタを修正する必要がありましたが、少なくとも私の回転問題は解決しました。下記の更新されたコードをご覧ください。

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { 

    //MARK: - Outlet 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet var playStopButton: UIButton! 

    //MARK: - Private Variables 

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil) 
    private let captureSession = AVCaptureSession() 

    var outputUrl: URL { 
     get { 

      if let url = _outputUrl { 
       return url 
      } 

      _outputUrl = outputDirectory.appendingPathComponent("video.mp4") 
      return _outputUrl! 
     } 
    } 

    private var _outputUrl: URL? 

    var exportUrl: URL { 
     get { 

      if let url = _exportUrl { 
       return url 
      } 

      _exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4") 
      return _exportUrl! 
     } 
    } 

    private var _exportUrl: URL? 

    var outputDirectory: URL { 
     get { 

      if let url = _outputDirectory { 
       return url 
      } 

      _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording") 
      return _outputDirectory! 
     } 
    } 

    private var _outputDirectory: URL? 

    private var assetWriter: AVAssetWriter? 
    private var videoInput: AVAssetWriterInput? 
    private var audioInput: AVAssetWriterInput? 
    private var videoOutput: AVCaptureVideoDataOutput? 
    private var audioOutput: AVCaptureAudioDataOutput? 

    private var isRecording = false 
    private var isWriting = false 

    private var videoSize = CGSize(width: 640, height: 480) 
    private var exportPreset = AVAssetExportPreset640x480 

    //MARK: - View Life-cycle 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     videoQueue.async { 

      do { 

       try self.configureCaptureSession() 

       DispatchQueue.main.sync { 
        self.configurePreview() 
       } 

      } catch { 

       DispatchQueue.main.async { 
        self.showAlert("Unable to configure capture session") 
       } 
      } 
     } 
    } 

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation { 
     return .portrait 
    } 

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask { 
     return .portrait 
    } 

    //MARK: - Capture Session 

    private func configureCaptureSession() throws { 

     do { 

      // configure capture devices 
      let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) 
      let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 

      let camInput = try AVCaptureDeviceInput(device: camDevice) 
      let micInput = try AVCaptureDeviceInput(device: micDevice) 

      if captureSession.canAddInput(camInput) { 
       captureSession.addInput(camInput) 
      } 

      if captureSession.canAddInput(micInput) { 
       captureSession.addInput(micInput) 
      } 

      // configure audio/video output 
      videoOutput = AVCaptureVideoDataOutput() 
      videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary? 
      videoOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let v = videoOutput { 
       captureSession.addOutput(v) 
      } 

      audioOutput = AVCaptureAudioDataOutput() 
      audioOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let a = audioOutput { 
       captureSession.addOutput(a) 
      } 

      // configure audio session 
      let audioSession = AVAudioSession.sharedInstance() 
      try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord) 
      try audioSession.setActive(true) 

      var micPort: AVAudioSessionPortDescription? 

      if let inputs = audioSession.availableInputs { 
       for port in inputs { 
        if port.portType == AVAudioSessionPortBuiltInMic { 
         micPort = port 
         break; 
        } 
       } 
      } 

      if let port = micPort, let dataSources = port.dataSources { 

       for source in dataSources { 
        if source.orientation == AVAudioSessionOrientationFront { 
         try audioSession.setPreferredInput(port) 
         break 
        } 
       } 
      } 

     } catch { 
      print("Failed to configure audio/video capture session") 
      throw error 
     } 
    } 

    private func configureAssetWriter() throws { 

     prepareVideoFile() 

     do { 

      if assetWriter != nil { 
       assetWriter = nil 
       videoInput = nil 
       audioInput = nil 
      } 

      assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4) 

      guard let writer = assetWriter else { 
       print("Asset writer not created") 
       return 
      } 

      let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264, 
           AVVideoWidthKey: NSNumber(value: Float(videoSize.width)), 
           AVVideoHeightKey: NSNumber(value: Float(videoSize.height))] 

      videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings) 
      videoInput?.expectsMediaDataInRealTime = true 

      var channelLayout = AudioChannelLayout() 
      memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size); 
      channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; 

      let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC, 
               AVSampleRateKey: 44100, 
               AVNumberOfChannelsKey: 2] 

      audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings) 
      audioInput?.expectsMediaDataInRealTime = true 

      guard let vi = videoInput else { 
       print("Video input not configured") 
       return 
      } 

      guard let ai = audioInput else { 
       print("Audio input not configured") 
       return 
      } 

      if writer.canAdd(vi) { 
       writer.add(vi) 
      } 

      if writer.canAdd(ai) { 
       writer.add(ai) 
      } 

     } catch { 
      print("Failed to configure asset writer") 
      throw error 
     } 
    } 

    private func prepareVideoFile() { 

     if FileManager.default.fileExists(atPath: outputUrl.path) { 

      do { 
       try FileManager.default.removeItem(at: outputUrl) 
      } catch { 
       print("Unable to remove file at URL \(outputUrl)") 
      } 
     } 

     if !FileManager.default.fileExists(atPath: outputDirectory.path) { 

      do { 
       try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil) 
      } catch { 
       print("Unable to create directory at URL \(outputDirectory)") 
      } 
     } 
    } 

    private func configurePreview() { 

     if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) { 
      previewLayer.frame = previewView.bounds 
      previewView.layer.addSublayer(previewLayer) 
     } 
    } 

    private func getVideoSize() -> CGSize { 

     if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight { 

      if videoSize.width > videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 

     } else { 

      if videoSize.width < videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 
     } 
    } 

    //MARK: - Controls 

    private func startRecording() { 

     videoQueue.async { 

      do { 
       try self.configureAssetWriter() 
       self.captureSession.startRunning() 

      } catch { 
       print("Unable to start recording") 
       DispatchQueue.main.async { self.showAlert("Unable to start recording") } 
      } 
     } 

     isRecording = true 
     playStopButton.setTitle("Stop Recording", for: .normal) 
     print("Recording did start") 
    } 

    private func stopRecording() { 

     if !isRecording { 
      return 
     } 

     videoQueue.async { 

      self.assetWriter?.finishWriting { 
       print("Asset writer did finish writing") 
       self.isWriting = false 
      } 

      self.captureSession.stopRunning() 

      do { 
       try self.export() 
      } catch { 
       print("Export failed") 
       DispatchQueue.main.async { self.showAlert("Unable to export video") } 
      } 
     } 

     isRecording = false 

     playStopButton.setTitle("Start Recording", for: .normal) 
     print("Recording did stop") 
    } 

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 

     guard let w = assetWriter else { 
      print("Asset writer not configured") 
      return 
     } 

     guard let vo = videoOutput else { 
      print("Video output not configured") 
      return 
     } 

     guard let ao = audioOutput else { 
      print("Audio output not configured") 
      return 
     } 

     guard let vi = videoInput else { 
      print("Video input not configured") 
      return 
     } 

     guard let ai = audioInput else { 
      print("Audio input not configured") 
      return 
     } 

     let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) 

     print("Writer status \(w.status.rawValue)") 

     if let e = w.error { 
      print("Writer error \(e)") 
      stopRecording() 
      return 
     } 

     switch w.status { 

     case .unknown: 

      if !isWriting { 
       isWriting = true 
       w.startWriting() 
       w.startSession(atSourceTime: st) 
      } 

      return 

     case .completed: 
      print("Video writing completed") 
      return 

     case .cancelled: 
      print("Video writing cancelled") 
      return 

     case .failed: 
      print("Video writing failed") 
      return 

     default: 
      print("Video is writing") 
     } 

     if vo == captureOutput { 

      if !vi.append(sampleBuffer) { 
       print("Unable to write to video buffer") 
      } 

     } else if ao == captureOutput { 

      if !ai.append(sampleBuffer) { 
       print("Unable to write to audio buffer") 
      } 
     } 
    } 

    //MARK: - Export 

    private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? { 

     guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else { 
      print("Unable to get video tracks") 
      return nil 
     } 

     let videoComposition = AVMutableVideoComposition() 
     videoComposition.renderSize = videoSize 

     let seconds: Float64 = Float64(1.0/videoTrack.nominalFrameRate) 
     videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600); 

     let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 

     var transforms = asset.preferredTransform 

     var isPortrait = true; 

     if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0) 
     || (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) { 
      isPortrait = false; 
     } 

     if isPortrait { 
      transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians))) 
      transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0)) 
     } 

     layerInst.setTransform(transforms, at: kCMTimeZero) 

     let inst = AVMutableVideoCompositionInstruction() 
     inst.backgroundColor = UIColor.black.cgColor 
     inst.layerInstructions = [layerInst] 
     inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration) 

     videoComposition.instructions = [inst] 

     return videoComposition 

    } 

    private func export() throws { 

     let videoAsset = AVURLAsset(url: outputUrl) 

     if FileManager.default.fileExists(atPath: exportUrl.path) { 
      try FileManager.default.removeItem(at: exportUrl) 
     } 

     let videoSize = getVideoSize() 

     guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else { 
      print("Unable to create encoder") 
      return 
     } 

     guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else { 
      print("Unable to create video composition") 
      return 
     } 

     encoder.videoComposition = vidcomp 
     encoder.outputFileType = AVFileTypeMPEG4 // MP4 format 
     encoder.outputURL = exportUrl 
     encoder.shouldOptimizeForNetworkUse = true 

     encoder.exportAsynchronously(completionHandler: { 
      print("Video exported successfully") 
     }) 
    } 

    //MARK: Helpers 

    private func getDocumentsDirectory() -> URL { 
     let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) 
     let documentsDirectory = paths[0] 
     return documentsDirectory 
    } 

    //MARK: Actions 

    @IBAction func startStopTapped(sender: AnyObject) { 

     if isRecording { 
      stopRecording() 
     } else { 
      startRecording() 
     } 
    } 
} 
関連する問題