2012-08-27 12 views
11

私は以下のコードを使用してビデオオーバーレイを追加し、新しい生成ビデオをドキュメントディレクトリにエクスポートしました。しかし、不思議なことに、ビデオは90度回転します。AVMutableVideoCompositionは、ポートレートモードでキャプチャされたビデオを回転しました

- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition 
{ 
    CMTime nextClipStartTime = kCMTimeZero; 
    NSInteger i; 

    // Make transitionDuration no greater than half the shortest clip duration. 
    CMTime transitionDuration = self.transitionDuration; 
    for (i = 0; i < [_clips count]; i++) { 
     NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; 
     if (clipTimeRange) { 
      CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration; 
      halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator. 
      transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration); 
     } 
    } 

    // Add two video tracks and two audio tracks. 
    AVMutableCompositionTrack *compositionVideoTracks[2]; 
    AVMutableCompositionTrack *compositionAudioTracks[2]; 
    compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
    compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

    CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); 
    CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]); 

    // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration. 
    for (i = 0; i < [_clips count]; i++) { 
     NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ... 
     AVURLAsset *asset = [_clips objectAtIndex:i]; 
     NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i]; 
     CMTimeRange timeRangeInAsset; 
     if (clipTimeRange) 
      timeRangeInAsset = [clipTimeRange CMTimeRangeValue]; 
     else 
      timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]); 

     AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
     [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; 

     /* 
     CGAffineTransform t = clipVideoTrack.preferredTransform; 
     NSLog(@"Transform1 : %@",t); 
     */ 
     AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
     [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil]; 

     // Remember the time range in which this clip should pass through. 
     // Every clip after the first begins with a transition. 
     // Every clip before the last ends with a transition. 
     // Exclude those transitions from the pass through time ranges. 
     passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration); 
     if (i > 0) { 
      passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration); 
      passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); 
     } 
     if (i+1 < [_clips count]) { 
      passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); 
     } 

     // The end of this clip will overlap the start of the next by transitionDuration. 
     // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.) 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
     nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration); 

     // Remember the time range for the transition to the next item. 
     transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration); 
    } 

    // Set up the video composition if we are to perform crossfade or push transitions between clips. 
    NSMutableArray *instructions = [NSMutableArray array]; 

    // Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A". 
    for (i = 0; i < [_clips count]; i++) { 
     NSInteger alternatingIndex = i % 2; // alternating targets 

     // Pass through clip i. 
     AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     passThroughInstruction.timeRange = passThroughTimeRanges[i]; 
     AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; 
     /* 
     CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2); 
     CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0); 
     [passThroughLayer setTransform:rotateTranslate atTime:kCMTimeZero]; 
     */ 
     passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer]; 
     [instructions addObject:passThroughInstruction]; 

     if (i+1 < [_clips count]) { 
      // Add transition from clip i to clip i+1. 

      AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
      transitionInstruction.timeRange = transitionTimeRanges[i]; 
      AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; 
      AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]]; 

      if (self.transitionType == SimpleEditorTransitionTypeCrossFade) { 
       // Fade out the fromLayer by setting a ramp from 1.0 to 0.0. 
       [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]]; 
      } 
      else if (self.transitionType == SimpleEditorTransitionTypePush) { 
       // Set a transform ramp on fromLayer from identity to all the way left of the screen. 
       [fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]]; 
       // Set a transform ramp on toLayer from all the way right of the screen to identity. 
       [toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]]; 
      } 

      transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil]; 
      [instructions addObject:transitionInstruction]; 
     } 
    } 

    videoComposition.instructions = instructions; 


} 

適切なモードでポートレート動画を書き出すことができないため、お手伝いしてください。助けてください。 ありがとうございます。

答えて

15

デフォルトでは、AVAssetExportSessionを使用してビデオを書き出すと、ビデオは元の向きから回転します。正確な向きを設定するためにトランスフォームを適用する必要があります。同じコードを実行するには、以下のコードを試してください。

- (AVMutableVideoCompositionLayerInstruction *)layerInstructionAfterFixingOrientationForAsset:(AVAsset *)inAsset 
                        forTrack:(AVMutableCompositionTrack *)inTrack 
                         atTime:(CMTime)inTime 
{ 
    //FIXING ORIENTATION// 
    AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:inTrack]; 
    AVAssetTrack *videoAssetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
    BOOL isVideoAssetPortrait_ = NO; 
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; 

    if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;} 
    if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;} 
    if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;} 
    if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;} 

    CGFloat FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.width; 

    if(isVideoAssetPortrait_) { 
     FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.height; 
     CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
     [videolayerInstruction setTransform:CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
    }else{ 
     CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
     [videolayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; 
    } 
    [videolayerInstruction setOpacity:0.0 atTime:inTime]; 
    return videolayerInstruction; 
} 

これがあなたに役立つことを願っています。上記

AVAssetTrack *assetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

AVMutableCompositionTrack *mutableTrack = [mergeComposition mutableTrackCompatibleWithTrack:assetTrack]; 

AVMutableVideoCompositionLayerInstruction *assetInstruction = [self layerInstructionAfterFixingOrientationForAsset:inAsset forTrack:myLocalVideoTrack atTime:videoTotalDuration]; 

inAssetは、あなたのビデオ資産とvideoTotalDurationCMTime.mergeCompositionであなたのビデオ合計時間がAVMutableCompositionクラスのオブジェクトであるであるような方法を呼び出すためのコードです。

希望すると、これが役立ちます。

EDIT:これはコールバックのメソッドやイベントではないため、上記の必須パラメータで期待通りに呼び出す必要があります。

+0

私はあなたが手の込んだしてください。この方法を試してみましたが、execution.Can時に呼び出されていないそのそれはもう少し使えます。ありがとう。 :) – Dhruv

+0

私のビデオは完全に黒くなります。なぜ私を助けてくれますか? – itsji10dra

+0

すばやく3つのコードを与えることができればいいと思います。 –

2

AVMutableVideoComposition

-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset 
{ 
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    AVMutableComposition *composition = [AVMutableComposition composition]; 
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 
    CGSize videoSize = videoTrack.naturalSize; 
    BOOL isPortrait_ = [self isVideoPortrait:asset]; 
    if(isPortrait_) { 
     NSLog(@"video is portrait "); 
     videoSize = CGSizeMake(videoSize.height, videoSize.width); 
    } 
    composition.naturalSize  = videoSize; 
    videoComposition.renderSize = videoSize; 
    // videoComposition.renderSize = videoTrack.naturalSize; // 
    videoComposition.frameDuration = CMTimeMakeWithSeconds(1/videoTrack.nominalFrameRate, 600); 

    AVMutableCompositionTrack *compositionVideoTrack; 
    compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil]; 
    AVMutableVideoCompositionLayerInstruction *layerInst; 
    layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
    [layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero]; 
    AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); 
    inst.layerInstructions = [NSArray arrayWithObject:layerInst]; 
    videoComposition.instructions = [NSArray arrayWithObject:inst]; 
    return videoComposition; 
} 


-(BOOL) isVideoPortrait:(AVAsset *)asset 
{ 
    BOOL isPortrait = FALSE; 
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 
    if([tracks count] > 0) { 
    AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; 

    CGAffineTransform t = videoTrack.preferredTransform; 
    // Portrait 
    if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) 
    { 
     isPortrait = YES; 
    } 
    // PortraitUpsideDown 
    if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) { 

     isPortrait = YES; 
    } 
    // LandscapeRight 
    if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) 
    { 
     isPortrait = NO; 
    } 
    // LandscapeLeft 
    if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) 
    { 
     isPortrait = NO; 
    } 
    } 
    return isPortrait; 
} 
13

でビデオアセットの向きに応じて、正しい向きを設定する方法の下にこれらを使用してくださいここでは、単にオリジナルの回転を維持したい場合は少し簡単な方法です。迅速なディジで

// Grab the source track from AVURLAsset for example. 
AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].lastObject; 

// Grab the composition video track from AVMutableComposition you already made. 
AVMutableCompositionTrack *compositionVideoTrack = [composition tracksWithMediaType:AVMediaTypeVideo].lastObject; 

// Apply the original transform.  
if (assetVideoTrack && compositionVideoTrack) { 
    [compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform]; 
} 

// Export... 
+0

私はこれで6時間を無駄にしました!私はこれが簡単だとは信じられません。私はこの記事を先に読んでおきたい。 – etayluz

+0

複数の動画を1つにマージしたい場合は、最終動画の各動画の元の向きをどうやって取得できますか? –

+0

何とかそれは私のために働いていません:(https://stackoverflow.com/questions/45127420/avmutablecomposition-orientation-layer-size-are-not-correct –

2

var assetVideoTrack = (sourceAsset.tracksWithMediaType(AVMediaTypeVideo)).last as! AVAssetTrack 

    var compositionVideoTrack = (composition.tracksWithMediaType(AVMediaTypeVideo)).last as! AVMutableCompositionTrack 

    if (assetVideoTrack.playable && compositionVideoTrack.playable) { 

     compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform 
    } 
+1

あなたはどのように構成を初期化するのですか? –

1

迅速2私のために作品をanswer..this:

 

do { 
      let paths = NSSearchPathForDirectoriesInDomains(
       NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true) 
      let documentsDirectory: AnyObject = paths[0] 
      //this will be changed to accommodate dynamic videos 
      let dataPath = documentsDirectory.stringByAppendingPathComponent(videoFileName+".MOV") 
      let videoAsset = AVURLAsset(URL: NSURL(fileURLWithPath: dataPath), options: nil) 
      let imgGenerator = AVAssetImageGenerator(asset: videoAsset) 
      imgGenerator.appliesPreferredTrackTransform = true 
      let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0, 1), actualTime: nil) 
      let uiImage = UIImage(CGImage: cgImage) 

      videoThumb.image = uiImage 
     } catch let err as NSError { 
      print("Error generating thumbnail: \(err)") 
     } 
+0

imgGenerator.appliesPreferredTrackTransform = true –

+0

素晴らしい.. – JAck

関連する問題