2011-02-02 4 views
0
>  **java -Xms64m -Xms64m VideoRecorder** 
>  - number of capture devices: 3 
>  Supported format : rgb, 24-bit, masks=3:2:1, pixelstride=-1, 
> linestride=-1, flip 
>  ped 
>  Track 0 is set to transmit as: 
>  RGB, 24-bit, Masks=3:2:1, PixelStride=-1, LineStride=-1, Flipped 
>  Start datasource handler 
>  Prefetch the processor 
>  processor started 
>  Exception in thread "VFW TransferDataThread" 
> java.lang.OutOfMemoryError: Java heap space 

をキャプチャしたデータを書き込もうとしたコードは次のとおりです。ここでファイルに

/* 
* VideoRecorder.java 
* 
* Created on Mar 16, 2004 
* 
*/ 
//package gov.nist.applet.phone.media.messaging; 

import java.io.File; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.util.Vector; 

import javax.media.CaptureDeviceInfo; 
import javax.media.CaptureDeviceManager; 
import javax.media.ConfigureCompleteEvent; 
import javax.media.Controller; 
import javax.media.ControllerEvent; 
import javax.media.ControllerListener; 
import javax.media.EndOfMediaEvent; 
import javax.media.Format; 
import javax.media.IncompatibleSourceException; 
import javax.media.Manager; 
import javax.media.MediaLocator; 
import javax.media.MediaTimeSetEvent; 
import javax.media.PrefetchCompleteEvent; 
import javax.media.Processor; 
import javax.media.RealizeCompleteEvent; 
import javax.media.ResourceUnavailableEvent; 
import javax.media.SizeChangeEvent; 
import javax.media.StopAtTimeEvent; 
import javax.media.StopByRequestEvent; 
import javax.media.control.TrackControl; 
import javax.media.datasink.DataSinkErrorEvent; 
import javax.media.datasink.DataSinkEvent; 
import javax.media.datasink.DataSinkListener; 
import javax.media.datasink.EndOfStreamEvent; 
import javax.media.format.VideoFormat; 
import javax.media.protocol.ContentDescriptor; 
import javax.media.protocol.DataSource; 
import javax.media.protocol.FileTypeDescriptor; 

/** 
* Class allowing one to record some audio in a buffer 
* Play only MPEG_AUDIO and GSM audio data 
* With some minor modifications can play RAW data also 
* 
* @author Jean Deruelle <[email protected]> 
* 
* <a href="{@docRoot}/uncopyright.html">This code is in the public domain.</a> 
*/ 
public class VideoRecorder implements ControllerListener, DataSinkListener, Runnable{ 
    Processor p; 
    Object waitSync = new Object(); 
    boolean stateTransitionOK = true; 
    static boolean monitorOn = false; 
    private MediaLocator videoLocator=null; 
    boolean bufferingDone = false; 
    RawDataSourceHandler handler =null; 
    Thread recorderThread=null;  
    DataSource ds = null; 
    /** 
    * get the devices for the audio capture and print their formats 
    */ 
    protected void initialize() {  
     CaptureDeviceInfo videoCDI=null; 
     Vector captureDevices=null; 
     captureDevices= CaptureDeviceManager.getDeviceList(null); 
     System.out.println("- number of capture devices: "+captureDevices.size()); 
     CaptureDeviceInfo cdi=null; 
     for (int i = 0; i < captureDevices.size(); i++) { 
      cdi = (CaptureDeviceInfo) captureDevices.elementAt(i);  
      Format[] formatArray=cdi.getFormats(); 
      for (int j = 0; j < formatArray.length; j++) { 
       Format format=formatArray[j];    
       if (format instanceof VideoFormat) { 
        if (videoCDI == null) { 
         videoCDI=cdi; 
        } 
       }    
      } 
     } 
     if(videoCDI!=null) 
      videoLocator=videoCDI.getLocator(); 
    } 

    /** 
    * Set the format of the tracks 
    * either to MPEG_AUDIO or GSM 
    */ 
    protected void setTrackFormat(){ 
     //Get the tracks from the processor 
     TrackControl[] tracks = p.getTrackControls(); 

     // Do we have atleast one track? 
     if (tracks == null || tracks.length < 1) 
      System.out.println("Couldn't find tracks in processor"); 

     // Set the output content descriptor to GSM 
     // This will limit the supported formats reported from 
     // Track.getSupportedFormats to only valid AVI formats. 
     //p.setContentDescriptor(new FileTypeDescriptor(FileTypeDescriptor.MPEG_AUDIO)); 
     p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW)); 

     Format supported[]; 
     Format chosen=null; 
     boolean atLeastOneTrack = false; 

     // Program the tracks. 
     for (int i = 0; i < tracks.length; i++) { 
      Format format = tracks[i].getFormat(); 
      if (tracks[i].isEnabled()) { 
       supported = tracks[i].getSupportedFormats(); 
       /*System.out.println("track : "+ i); 
       for(int j=0;j<supported.length;j++) 
       System.out.println("Supported format : "+supported[j].getEncoding());*/ 
       // We've set the output content to the GSM.    
       if (supported.length > 0) { 
        for(int j=0;j<supported.length;j++){ 
         System.out.println("Supported format : "+supported[j].toString().toLowerCase()); 
         if (supported[j] instanceof VideoFormat) { 
          if(supported[j].toString().toLowerCase().indexOf("rgb")!=-1){ 
           chosen = supported[j]; 
           break; 
          } 
         } 
        } 
        if(chosen!=null){ 
         tracks[i].setFormat(chosen);     
         System.err.println("Track " + i + " is set to transmit as:"); 
         System.err.println(" " + chosen); 
         atLeastOneTrack = true; 
        } 
        else{ 
         System.err.println("Track " + i + " is set to transmit as nothing"); 
        } 
       } else 
        tracks[i].setEnabled(false); 
      } else 
       tracks[i].setEnabled(false); 
     } 
    } 

    /** 
    * Given a DataSource, create a processor and hook up the output 
    * DataSource from the processor to a customed DataSink. 
    * @return false if something wrong happened 
    */ 
    protected boolean record() {   
     // Create a DataSource given the media locator. 
     try { 
      ds = Manager.createDataSource(videoLocator); 
     } catch (Exception e) { 
      System.err.println("Cannot create DataSource from: " + videoLocator); 
      return false; 
     }  

     try { 
      p = Manager.createProcessor(ds); 
     } catch (Exception e) { 
      System.err.println("Failed to create a processor from the given DataSource: " + e); 
      return false; 
     } 

     p.addControllerListener(this); 

     // Put the Processor into configured state. 
     p.configure(); 
     if (!waitForState(Processor.Configured)) { 
      System.err.println("Failed to configure the processor."); 
      return false; 
     } 
     setTrackFormat(); 
     /*ContentDescriptor[] descriptors = p.getSupportedContentDescriptors(); 
     for (int n = 0; n < descriptors.length; n++) { 
      System.out.println("Desc: " + descriptors[n].toString()); 
     }*/ 
     // Get the raw output from the processor. 
     //p.setContentDescriptor(new ContentDescriptor(ContentDescriptor.RAW)); 
     //p.setContentDescriptor(new FileTypeDescriptor(FileTypeDescriptor.MPEG_AUDIO)); 
     p.realize(); 
     if (!waitForState(Controller.Realized)) { 
      System.err.println("Failed to realize the processor."); 
      return false; 
     } 

     // Get the output DataSource from the processor and 
     // hook it up to the RawDataSourceHandler. 
     DataSource ods = p.getDataOutput(); 
     handler = new RawDataSourceHandler(); 

     try { 
      handler.setSource(ods); 
     } catch (IncompatibleSourceException e) { 
      System.err.println("Cannot handle the output DataSource from the processor: " + ods); 
      //return false; 
     } 
     System.err.println("Start datasource handler "); 
     handler.addDataSinkListener(this); 
     try{ 
      handler.setSource(ds); 
      handler.start(); 
     } 
     catch(IncompatibleSourceException ioe){ 
      ioe.printStackTrace(); 
     } 
     System.err.println("Prefetch the processor "); 
     // Prefetch the processor. 
     p.prefetch(); 
     if (!waitForState(Controller.Prefetched)) { 
      System.err.println("Failed to prefetch the processor."); 
      return false; 
     }  
     // Start the processor. 
     p.start();   
     System.err.println("processor started");     

     return true; 
    } 

    /** 
    * Block until file writing is done. 
    */ 
    /*private boolean waitForFileDone(double duration) {   
     synchronized (waitFileSync) { 
      try { 
       while (!bufferingDone) { 
        if(p.getMediaTime().getSeconds() > duration) 
         p.close(); 
        waitFileSync.wait(500); 
        System.err.print("."); 
       } 
      } catch (Exception e) {} 
     } 
     bufferingDone=false; 
     return true; 
    }*/  

    /** 
    * Block until the processor has transitioned to the given state. 
    * @param state - the state to wait for 
    * @return false if the transition failed. 
    */ 
    protected boolean waitForState(int state) { 
     synchronized (waitSync) { 
      try { 
      while (p.getState() < state && stateTransitionOK) 
       waitSync.wait(); 
      } catch (Exception e) {} 
     } 
     return stateTransitionOK; 
    } 

    /** 
    * Stop the voice recording 
    */ 
    public void stop(){ 
     p.stop(); 
     bufferingDone=true;  
    } 

    /** 
    * Start the voice recording 
    */ 
    public void start(){ 
     initialize(); 
     if(recorderThread==null){ 
      recorderThread=new Thread(this); 
      recorderThread.setName("Voice Recorder Thread"); 
     } 

     recorderThread.start();   
    } 

    /** 
    * the process of recording the voice 
    */ 
    public void run(){ 
     boolean succeeded=record(); 
     if(!succeeded) 
      return; 
     while(!bufferingDone){ 
      try{ 
       recorderThread.sleep(1); 
      } 
      catch(InterruptedException ie){ 
       ie.printStackTrace(); 
      } 
     } 
     try{ 
      Thread.sleep(100); 
     } 
     catch(InterruptedException ie){ 
      ie.printStackTrace(); 
     } 
     //Clean up 
     System.err.println("closing datasource"); 
     try{ 
      ds.stop(); 
     } 
     catch(IOException ioe){ 
      ioe.printStackTrace(); 
     } 
     ds.disconnect();       
     System.err.println("closing processor"); 
     p.close(); 
     p.removeControllerListener(this); 
     recorderThread=null; 
     System.err.println("closing handler"); 
     handler.close();   
     System.err.println("...done Buffering."); 
     bufferingDone=false; 
    } 

    /** 
    * Controller Listener Method. 
    * Allow one to know what happen on the recorder and the voice 
    * @param evt - event received 
    */ 
    public void controllerUpdate(ControllerEvent evt) { 
     //System.out.println("new Event received"+evt.getClass().getName()); 
     if (evt instanceof ConfigureCompleteEvent || 
      evt instanceof RealizeCompleteEvent || 
      evt instanceof PrefetchCompleteEvent) { 
      synchronized (waitSync) { 
       stateTransitionOK = true; 
       waitSync.notifyAll(); 
      } 
     } else if (evt instanceof ResourceUnavailableEvent) { 
      synchronized (waitSync) { 
       stateTransitionOK = false; 
       waitSync.notifyAll(); 
      } 
     } else if (evt instanceof EndOfMediaEvent) { 
      System.err.println("closing datasource"); 
      try{ 
       ds.stop(); 
      } 
      catch(IOException ioe){ 
       ioe.printStackTrace(); 
      } 
      ds.disconnect();       
      System.err.println("closing controller"); 
      evt.getSourceController().close(); 
      //Clean up 
      System.err.println("closing processor"); 
      p.close(); 
      p.removeControllerListener(this); 
      recorderThread=null; 
      System.err.println("closing handler"); 
      handler.close();   
      System.err.println("...done Buffering."); 
      bufferingDone=true; 
     } else if (evt instanceof SizeChangeEvent) { 
     } 
     else if (evt instanceof MediaTimeSetEvent) { 
      System.err.println("- mediaTime set: " + 
      ((MediaTimeSetEvent)evt).getMediaTime().getSeconds()); 
     } else if (evt instanceof StopAtTimeEvent) { 
      System.err.println("- stop at time: " + 
      ((StopAtTimeEvent)evt).getMediaTime().getSeconds()); 
      //Clean up 
      System.err.println("closing datasource"); 
      try{ 
       ds.stop(); 
      } 
      catch(IOException ioe){ 
       ioe.printStackTrace(); 
      } 
      ds.disconnect();       
      System.err.println("closing controller"); 
      evt.getSourceController().close(); 
      System.err.println("closing processor"); 
      p.close(); 
      p.removeControllerListener(this); 
      recorderThread=null; 
      System.err.println("closing handler"); 
      handler.close();   
      System.err.println("...done Buffering."); 
      bufferingDone=true; 
     } 
     else if (evt instanceof StopByRequestEvent) {    
      //   Clean up 
      System.err.println("closing datasource"); 
      try{ 
       ds.stop(); 
      } 
      catch(IOException ioe){ 
       ioe.printStackTrace(); 
      } 
      ds.disconnect(); 
      System.err.println("closing controller"); 
      evt.getSourceController().close();      
      System.err.println("closing processor"); 
      p.close(); 
      p.removeControllerListener(this); 
      recorderThread=null; 
      System.err.println("closing handler"); 
      handler.close();   
      System.err.println("...done Buffering."); 
     } 
    } 

    /** 
    * Get the recorded voice buffer 
    * @return the voice recorded in an array of bytes 
    */ 
    public byte[] getRecord(){ 
     return handler.getRecordBuffer(); 
    } 

    /** 
    * DataSink Listener 
    * @param evt - event received 
    */ 
    public void dataSinkUpdate(DataSinkEvent evt) { 

     if (evt instanceof EndOfStreamEvent) { 
      bufferingDone = true; 
      //waitFileSync.notifyAll(); 
      System.err.println("All done!"); 
      evt.getSourceDataSink().close(); 
      //System.exit(0); 
     } 
     else if (evt instanceof DataSinkErrorEvent) { 
      //synchronized (waitFileSync) { 
      bufferingDone = true; 
      evt.getSourceDataSink().close();    
       //waitFileSync.notifyAll(); 
      //} 
     } 
    } 

    /** 
    * Utility method to write a recorded voice buffer to a file 
    * @param data - the recorded voice 
    */ 
    private static void writeBufferToFile(byte[] data){ 
     File f=new File("D://test.mov"); 
     FileOutputStream fos=null; 
     try{ 
      fos=new FileOutputStream(f); 
     } 
     catch(FileNotFoundException fnfe){ 
      fnfe.printStackTrace(); 
     } 
     try{ 
      fos.write(data); 
     } 
     catch(IOException ioe){ 
      ioe.printStackTrace(); 
     } 
    } 

    /** 
    * Main program 
    * @param args - 
    */ 
    public static void main(String [] args) { 
     VideoRecorder videoRecorder = new VideoRecorder(); 

     //for(int i=0;i<2;i++){ 
      videoRecorder.start();  
      try{ 
       Thread.sleep(5000); 
      } 
      catch(InterruptedException ie){ 
       ie.printStackTrace(); 
      } 
      videoRecorder.handler = new RawDataSourceHandler(); 
      //MyCam videoPlayer=new MyCam();  
      //videoRecorder.stop();    
      //videoPlayer.initialize(); 
      //videoPlayer.play(); 
      //videoRecorder.initialize(); 
      //videoRecorder.play(); 

      try{ 
       Thread.sleep(5000); 
      } 
      catch(InterruptedException ie){ 
       ie.printStackTrace(); 
      }    
     //} 
     writeBufferToFile(videoRecorder.getRecord()); 
    } 
} 

答えて

1

私は個人的にRawDataSourceHandlerを使用して避けるだろう。基本的には、キャプチャしたデータをメモリアレイに書き込むだけで、メモリが不足している理由、特に24ビットRGBを記録することができます。

代わりに、一時ファイルを作成し、DataSinkに記録してから、後でそのファイルの名前を変更することをお勧めします。不十分な権限のために、これは私は、少なくとも推薦する(ストリームを使用してファイルに書き込むために必要な権限は異なる)、JMFからファイルに書き込むためにあなたが原因ことができない場合

File tempFile = File.createTempFile("something",".someFormat"); 
MediaLocator dest = new MediaLocator(tempFile.toURI().toURL()); 

DataSink sink = Manager.createDataSink(p.getDataOutput(),dest); 
sink.open(); 
sink.start(); 

p.start(); 

// Later: 
p.stop(); 
tempFile.renameTo("SomeNewName.someFormat"); 

: はここに私の提案された順序です非圧縮RGBよりも小さいフォーマットを使用します。 (私の代わりにH263を示唆する?)

はまた、あなたがハンドラのソースに関するいくつかのコードの奇妙持っている:

try { 
     handler.setSource(ods); // <-- You set the source once here 
    } catch (IncompatibleSourceException e) { 
     System.err.println("Cannot handle the output DataSource from the processor: " + ods); 
     //return false; 
    } 
    System.err.println("Start datasource handler "); 
    handler.addDataSinkListener(this); 
    try{ 
     handler.setSource(ds);  // <-- Then change it again quickly here to the raw output from the capture device! 
     handler.start(); 
    } 
    catch(IncompatibleSourceException ioe){ 
     ioe.printStackTrace(); 
    } 

最後に、プロセッサ/プレーヤーを閉じるに関する非自明注意を。 クロージングの順序は次のとおりです。 ストップ() DEALLOCATE() 近い()

状態は次のように進行する:

State: Controller.Running 
    stop() 
State: Controller.Prefetched 
    deallocate() 
State: Controller.Realized 
    close() 
State: Controller.Unrealized 

をはい、あなたはそれぞれの状態に到達するためのプロセッサ/プレイヤーを待つ必要があります先に述べる。このヒントを無視するとごめんなさい.JMFがメモリをリークすると、特にWindows上で、状況が正しく終了しないと信じられないように、メモリがリークします。

は、ここでは、この自動的にの世話をするために私のコードです:

public class PlayerUtils { 

    static public void cleanupPlayer(Player player) { 
     if (player != null) { 

      if (player.getState() == Player.Started) { 
       player.stop(); 
       waitForState(player, Player.Prefetched); 
      } 

      if (player.getState() == Player.Prefetched) { 
       player.deallocate(); 
       waitForState(player, Player.Realized); 
      } 

      player.close(); 
     } 
    } 

    static public void cleanupPlayer(MediaPlayer player) { 
     if (player != null) { 
      cleanupPlayer(player.getPlayer()); 
     } 
    } 

    static private void waitForState(Player player, int state) { 
     // Fast abort 
     if (player.getState() == state) { 
      return; 
     } 

     long startTime = new Date().getTime(); 

     long timeout = 5 * 1000; 

     final Object waitListener = new Object(); 

     ControllerListener cl = new ControllerListener() { 

      @Override 
      public void controllerUpdate(ControllerEvent ce) { 
       synchronized (waitListener) { 
        waitListener.notifyAll(); 
       } 
      } 
     }; 
     try { 
      player.addControllerListener(cl); 

      // Make sure we wake up every 500ms to check for timeouts and in case we miss a signal 
      synchronized (waitListener) { 
       while (player.getState() != state && new Date().getTime() - startTime < timeout) { 
        try { 
         waitListener.wait(500); 
        } 
        catch (InterruptedException ex) { 
         Logger.getLogger(PlayerUtils.class.getName()).log(Level.SEVERE, null, ex); 
        } 
       } 
      } 
      if (new Date().getTime() - startTime > timeout) { 
       Logger.getLogger(PlayerUtils.class.getName()).log(Level.SEVERE, "Timed out waiting for state change from {0} to {1}", new Object[]{player.getState(), state}); 
      } 
     } 
     finally { 
      // No matter what else happens, we want to remove this 
      player.removeControllerListener(cl); 
     } 
    } 
} 
+0

かなり良い提案。私は確かにあなたのコードを通過し、私のプロジェクトでそれを実装するだろう! btwあなたの情報ページを読むと、それは私を笑顔にしました:) nice – maddy2012

+0

嬉しい私の小さな情報の宣伝は、自分自身以外の誰かを笑顔にしました。あなたが私の答えを気に入っているなら、私はアップボートか、答えの承認のいずれかに感謝します。 (チェックマーク)このサイトへの参加を促すので、有益な回答をすることをお勧めします。だからこそ習慣をつけてみてください。ありがとう! – SplinterReality

関連する問題