2017-05-30 2 views
0

私のアプリは、FaceTimeコールをシミュレートしながら、フロントフェイスカメラを使ってそれを使用している人々を同時に録画し、実際のFaceTimeコールのように画面にプレビューを表示します。その後、FaceTimeコールで「他人」として事前録画されたビデオを再生します。終了ボタンが押されたとき、またはメインムービーが終了したときに、フロントカメラからビデオを保存することになっています。前者の場合は完璧に動作しますが、メインのムービーが完了すると、ムービーの保存された出力には音声はありません。何が起こっているのでしょうか?音声はありませんiOSのAVCaptureセッション

#import "MovieView.h" 
#import <AudioToolbox/AudioToolbox.h> 

@implementation MovieView 
@synthesize selectedCountry, vImagePreview, playit; 

- (void)viewDidLoad { 
    [super viewDidLoad]; 
    NSDate *today = [NSDate date]; 
    NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 
    [dateFormatter setDateFormat:@"MMM d hh:mm:ss a"]; 
    // display in 12HR/24HR (i.e. 11:25PM or 23:25) format according to User Settings 
    NSString *currentTime = [dateFormatter stringFromDate:today]; 

    self.navigationController.navigationBarHidden = YES; 
    [[UIApplication sharedApplication] setStatusBarStyle:UIStatusBarStyleDefault]; 
    [[UIApplication sharedApplication] setStatusBarHidden:NO]; 

    NSError* error4 = nil; 
    AVAudioSession* audioSession = [AVAudioSession sharedInstance]; 
    [audioSession setCategory:AVAudioSessionCategoryAmbient error:&error4]; 
    OSStatus propertySetError = 0; 
    UInt32 allowMixing = true; 
    propertySetError |= AudioSessionSetProperty(kAudioSessionProperty_OtherMixableAudioShouldDuck, sizeof(allowMixing), &allowMixing); 

    // Activate the audio session 
    error4 = nil; 
    if (![audioSession setActive:YES error:&error4]) { 
     NSLog(@"AVAudioSession setActive:YES failed: %@", [error4 localizedDescription]); 
    } 






    //tests 


    // Set audio session category to "play and record" 
     //endtests 

    //this is the end of recording the video and audio 
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *documentsDirectoryPath = [paths objectAtIndex:0]; 

    NSString *proud = [[documentsDirectoryPath stringByAppendingPathComponent:@"proud"] stringByAppendingPathComponent:selectedCountry]; 

    NSURL *movieURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"BlueHarvest" ofType:@"mp4"]]; 
    player = 

    [[MPMoviePlayerController alloc] initWithContentURL: movieURL]; 
    player.useApplicationAudioSession=YES; 

    [player prepareToPlay]; 
    player.controlStyle = MPMovieControlStyleNone; 
    player.allowsAirPlay = NO; 
    player.scalingMode = MPMovieScalingModeFill; 

    player.view.frame = self.view.frame; 

    [self.view insertSubview:player.view belowSubview:vImagePreview]; 

    [player setFullscreen:YES animated:YES]; 

    // ... 

    [[NSNotificationCenter defaultCenter] 
    addObserver:self 
    selector:@selector(movieFinishedCallback:) 
    name:MPMoviePlayerPlaybackDidFinishNotification 
    object:player]; 

    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(exitedFullscreen:) name:MPMoviePlayerDidExitFullscreenNotification object:player]; 


    [[NSNotificationCenter defaultCenter] addObserver:self 
              selector:@selector(moviePlayerWillExitFullscreen:) 
               name:MPMoviePlayerWillExitFullscreenNotification 
               object:player]; 


    [player play]; 


    session = [[AVCaptureSession alloc] init]; 
    [session beginConfiguration]; 
    session.sessionPreset = AVCaptureSessionPresetMedium; 

    CALayer *viewLayer = self.vImagePreview.layer; 
    NSLog(@"viewLayer = %@", viewLayer); 

    self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; 
    self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
    self.captureVideoPreviewLayer.frame = self.vImagePreview.bounds; 
    [self.captureVideoPreviewLayer setCornerRadius:14]; 
    [self.captureVideoPreviewLayer setBorderWidth:3.0]; 
    [self.captureVideoPreviewLayer setBorderColor:[[UIColor whiteColor] CGColor]]; 
    self.captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; 
    [[vImagePreview layer] setCornerRadius:14]; 


    [[vImagePreview layer] setBorderWidth:3.0]; 


    [[vImagePreview layer] setBorderColor:[[UIColor whiteColor] CGColor]]; 
    [self.vImagePreview.layer addSublayer:self.captureVideoPreviewLayer]; 
    AVCaptureDevice *device = [self frontFacingCameraIfAvailable]; 
    NSError *error = nil; 
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 
    if (!input) { 
     // Handle the error appropriately. 
     NSLog(@"ERROR: trying to open camera: %@", error); 
    } 

    AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 
    NSError *error2 = nil; 
    AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2]; 





    AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 

    NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:@"archives"]; 
    NSString *editedfilename = [[selectedCountry lastPathComponent] stringByDeletingPathExtension]; 
    NSString *datestring = [[editedfilename stringByAppendingString:@" "] stringByAppendingString:currentTime]; 
    NSLog(@"%@", datestring); 
    NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:@".mp4"]; 
    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie]; 
    [session addInput:input]; 
    [session addInput:audioInput]; 
    [session addOutput:movieFileOutput]; 
    AVCaptureConnection *videoConnection = nil; 

    for (AVCaptureConnection *connection in [movieFileOutput connections]) 
    { 
     NSLog(@"%@", connection); 
     for (AVCaptureInputPort *port in [connection inputPorts]) 
     { 
      NSLog(@"%@", port); 
      if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
      { 
       videoConnection = connection; 
      } 
     } 
    } 

    if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
    { 
     [videoConnection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight]; 
    } 

    [session commitConfiguration]; 
    [session startRunning]; 

    NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults]; 




    [movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 

    NSLog(@"OutputURL%@", outputURL); 

} 
-(void)viewDidLayoutSubviews { 
    [super viewDidLayoutSubviews]; 

    UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation]; 
    switch (orientation) { 
     case UIInterfaceOrientationPortrait: 
      [self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortrait]; 
      break; 
     case UIInterfaceOrientationPortraitUpsideDown: 
      [self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortraitUpsideDown]; 
      break; 
     case UIInterfaceOrientationLandscapeLeft: 
      [self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft]; 
      break; 
     case UIInterfaceOrientationLandscapeRight: 
      [self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight]; 
      break; 
    } 
} 
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { 
    //finished 
    NSLog(@"ErrorMessage%@", error); 
} 
-(IBAction)endcall { 

    [player stop]; 
    [session stopRunning]; 
} 
-(AVCaptureDevice *)frontFacingCameraIfAvailable 
{ 
    NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
    AVCaptureDevice *captureDevice = nil; 
    for (AVCaptureDevice *device in videoDevices) 
    { 
     if (device.position == AVCaptureDevicePositionFront) 
     { 
      captureDevice = device; 

      break; 
     } 
    } 

    // couldn't find one on the front, so just get the default video device. 
    if (! captureDevice) 
    { 
     captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

    } 

    return captureDevice; 
} 

- (void) movieFinishedCallback:(NSNotification*) aNotification {  
    NSLog(@"MovieDone"); 
    [player stop]; 
    [player.view removeFromSuperview]; 
    [[NSNotificationCenter defaultCenter] 
    removeObserver:self 
    name:MPMoviePlayerPlaybackDidFinishNotification 
    object:player]; 
    [session stopRunning]; 
    [self.navigationController popToRootViewControllerAnimated:NO]; 

} 

- (void) exitedFullscreen:(NSNotification*) aNotification { 
    NSLog(@"MovieDone"); 
    [player.view removeFromSuperview]; 
    [[NSNotificationCenter defaultCenter] 
    removeObserver:self 
    name:MPMoviePlayerDidExitFullscreenNotification 
    object:player]; 
} 
- (void)moviePlayerWillExitFullscreen:(NSNotification*) aNotification { 
    [player stop]; 
    [session stopRunning]; 
    [self dismissMoviePlayerViewControllerAnimated]; 
    [[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerWillExitFullscreenNotification object:player]; 
} 



@end 

答えて

0

が見つかりました。

movieFileOutput.movieFragmentInterval = kCMTimeInvalid; 
関連する問題