2016-04-12 7 views
0

私は人がビデオのウォーターマークをオーバーレイできるアプリケーションに取り組んでいます。AVAssetExportSessionは初めて動作しますが、もう一度試してみると失敗します - ビデオを作成できませんでした

@IBAction func saveVideo(sender: AnyObject) { 

    self.videoAsset = AVAsset(URL: fileURL as NSURL!) 

    // Create Video track (Video + Audio) 
    let videoTrack: AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 
    let audioTrack:AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 

    do { 
     try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration), ofTrack: self.videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: kCMTimeZero) 
     try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration), ofTrack: self.videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] , atTime: kCMTimeZero) 
     print("Inserted time ranges just fine\n") 
    } catch let error as NSError { 
     print("Failed to insert video/audio tracks!!!!\n") 
     print(error.localizedDescription) 
    } 

    videoLayerIntruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let videoAssetTrack: AVAssetTrack = self.videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] 

    var videoAssetOrientation_: UIImageOrientation = .Up 
    var isVideoAssetPortrait_: Bool = false 

    let videoTransform:CGAffineTransform = videoAssetTrack.preferredTransform 

    if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 { 
     videoAssetOrientation_ = .Right 
     isVideoAssetPortrait_ = true 
    } 
    if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 { 
     videoAssetOrientation_ = .Left 
     isVideoAssetPortrait_ = true 
    } 
    if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 { 
     videoAssetOrientation_ = .Up 
    } 
    if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 { 
     videoAssetOrientation_ = .Down 
    } 

    videoLayerIntruction.setTransform(videoAssetTrack.preferredTransform, atTime: kCMTimeZero) 

    mainInstruction.layerInstructions = [videoLayerIntruction] 
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration) 


    var naturalSize = CGSize() 

    if isVideoAssetPortrait_ { 
     naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width) 
    } else { 
     naturalSize = videoAssetTrack.naturalSize 
    } 

    renderWidth = naturalSize.width 
    renderHeight = naturalSize.height 

    parentLayer.frame = CGRectMake(0, 0, renderWidth, renderHeight) 
    parentLayer.geometryFlipped = true 
    parentLayer.anchorPoint = CGPointMake(0.5, 0.5) 

    videoLayer.frame = CGRectMake(0, 0, renderWidth, renderHeight) 

    self.overlayLayer.frame = CGRectMake(self.renderWidth, self.renderHeight, self.newRatioWidth, self.newRatioHeight) 
    self.overlayLayer.addAnimation(self.animation, forKey: "contents") 
    self.overlayLayer.anchorPoint = CGPointMake(0.5, 0.5) 
    self.overlayLayer.contentsGravity = kCAGravityResizeAspect 

    parentLayer.addSublayer(videoLayer) 

    if addedOverlay == true { 
     parentLayer.addSublayer(overlayLayer) 
    } 

    mainCompositionInst.renderScale = 1.0 
    mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight) 
    mainCompositionInst.instructions = [mainInstruction] 
    mainCompositionInst.frameDuration = CMTimeMake(1, 30) 
    mainCompositionInst.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer) 

    outputURL = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("CreatedVideo-\(NSUUID().UUIDString).mov") 

    let exporter: AVAssetExportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)! 
    exporter.outputURL = outputURL 
    exporter.outputFileType = AVFileTypeQuickTimeMovie 
    exporter.shouldOptimizeForNetworkUse = false 
    exporter.videoComposition = mainCompositionInst 

    exporter.exportAsynchronouslyWithCompletionHandler({ 
     dispatch_async(dispatch_get_main_queue(), { 
      self.exportVideo(exporter) 
      switch exporter.status{ 
      case AVAssetExportSessionStatus.Failed: 
       print("FAILED EXPORT - \(exporter.error)\n") 
      case AVAssetExportSessionStatus.Cancelled: 
       print("canceled \(exporter.error)\n") 
      default: 
       print("COMPLETED EXPORT\n") 
      } 
     }) 
    }) 
} 

func exportVideo(sender: AVAssetExportSession) { 
    print("Asked to export\n") 
    PHPhotoLibrary.sharedPhotoLibrary().performChanges({ 
    PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(sender.outputURL!) 
     }, completionHandler: { success, error in 
      if success { 
       print("Success! Finished saving video.") 
      } else { 
       print("ERROR - " + (error?.localizedDescription)!) 
      } 
    }) 
} 
:以下

Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.})

ビデオを保存するための私のコードです:私はカスタムとビデオを保存した後、私はエラーが言って、それが失敗し、再び映像を保存しようとする場合は、最初の時間をオーバーレイ

誰もが最初に正常に作業した後にコンポジションが失敗し始める原因を知っていますか?アドバイスが大好きです! SeanLintern88が指摘したように

+1

:http://stackoverflow.com/a/31146867/1638273 – SeanLintern88

+0

地球上でどうやっ私はこれを見ない。私は自分の正気と物事を探し出す能力に疑問を抱かせてくれます....ありがとうございました! – riverhawk

+0

@riverhawk私は同じ問題に直面しています。 @ SeanLintern88によって提供されるソリューションは、あなたがしたものと同じです: 'videoLayerIntruction = AVMutableVideoCompositionLayerInstruction(assetTrack:videoTrack)'どのように問題を解決しましたか? –

答えて

関連する問題