2017-01-18 10 views
1

Snapchatアプリのようにカメラにライブフィルタを作りたいと思います。 Google Vision Face Trackerに基づくこのアプリ。アンドロイドカメラでビットマップで写真を撮る

私はFaceGraphic.javaでこれらの次のコードを持っている:

package com.google.android.gms.samples.vision.face.facetracker; 

import android.graphics.Canvas; 
import android.graphics.Color; 
import android.graphics.Paint; 
import android.graphics.Bitmap; 
import android.graphics.BitmapFactory; 

import com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay; 
import com.google.android.gms.vision.face.Face; 
import com.google.android.gms.vision.face.Landmark; 

/** 
* Graphic instance for rendering face position, orientation, and landmarks within an associated 
* graphic overlay view. 
*/ 
class FaceGraphic extends GraphicOverlay.Graphic { 
    private static final float FACE_POSITION_RADIUS = 10.0f; 
    private static final float ID_TEXT_SIZE = 40.0f; 
    private static final float ID_Y_OFFSET = 50.0f; 
    private static final float ID_X_OFFSET = -50.0f; 
    private static final float BOX_STROKE_WIDTH = 5.0f; 

    private static final int COLOR_CHOICES[] = { 
     Color.BLUE, 
     Color.CYAN, 
     Color.GREEN, 
     Color.MAGENTA, 
     Color.RED, 
     Color.WHITE, 
     Color.YELLOW 
    }; 
    private static int mCurrentColorIndex = 0; 

    private Paint mFacePositionPaint; 
    private Paint mIdPaint; 
    private Paint mBoxPaint; 

    private volatile Face mFace; 
    private int mFaceId; 
    private float mFaceHappiness; 

    private Bitmap bitmap; 
    private Bitmap sunglasses; 

    FaceGraphic(GraphicOverlay overlay) { 
     super(overlay); 

     mCurrentColorIndex = (mCurrentColorIndex + 1) % COLOR_CHOICES.length; 
     final int selectedColor = COLOR_CHOICES[mCurrentColorIndex]; 

     mFacePositionPaint = new Paint(); 
     mFacePositionPaint.setColor(selectedColor); 

     mIdPaint = new Paint(); 
     mIdPaint.setColor(selectedColor); 
     mIdPaint.setTextSize(ID_TEXT_SIZE); 

     mBoxPaint = new Paint(); 
     mBoxPaint.setColor(selectedColor); 
     mBoxPaint.setStyle(Paint.Style.STROKE); 
     mBoxPaint.setStrokeWidth(BOX_STROKE_WIDTH); 

     bitmap = BitmapFactory.decodeResource(getOverlay().getContext().getResources(), R.drawable.sunglasses); 
     sunglasses = bitmap; 

    } 

    void setId(int id) { 
     mFaceId = id; 
    } 


    /** 
    * Updates the face instance from the detection of the most recent frame. Invalidates the 
    * relevant portions of the overlay to trigger a redraw. 
    */ 
    void updateFace(Face face) { 
     mFace = face; 

     sunglasses = Bitmap.createScaledBitmap(bitmap, (int) scaleX(face.getWidth()), 
       (int) scaleY(((bitmap.getHeight() * face.getWidth())/bitmap.getWidth())), false); 

     postInvalidate(); 
    } 

    /** 
    * Draws the face annotations for position on the supplied canvas. 
    */ 
    @Override 
    public void draw(Canvas canvas) { 
     Face face = mFace; 

     if (face == null) { 
      return; 
     } 

     float x = translateX(face.getPosition().x + face.getWidth()/2); 
     float y = translateY(face.getPosition().y + face.getHeight()/2); 

     // Draws a bounding box around the face. 
     float xOffset = scaleX(face.getWidth()/2.0f); 
     float yOffset = scaleY(face.getHeight()/2.0f); 
     float left = x - xOffset; 
     float top = y - yOffset; 
     float right = x + xOffset; 
     float bottom = y + yOffset; 
     canvas.drawRect(left, top, right, bottom, mBoxPaint); 

     //Get the left eye to place the sunglasses over the eyes 
     float eyeY = top + sunglasses.getHeight()/2; 
     for(Landmark l : face.getLandmarks()){ 
      if(l.getType() == Landmark.LEFT_EYE){ 
       eyeY = l.getPosition().y + sunglasses.getHeight()/2; 
      } 
     } 
     canvas.drawBitmap(sunglasses, left, eyeY, new Paint()); 

    } 
} 

私はFaceTrackerActivity.javaで写真を撮るための関数を作成します。

findViewById(R.id.capture).setOnClickListener(new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 
      mCameraSource.takePicture(null, new CameraSource.PictureCallback() { 

       @Override 
       public void onPictureTaken(byte[] bytes) { 
        Log.d(TAG, "onPictureTaken - jpeg"); 
        capturePic(bytes); 
       } 

       private void capturePic(byte[] bytes) { 
        try { 
         String mainpath = getExternalStorageDirectory() + separator + "MaskIt" + separator + "images" + separator; 
         File basePath = new File(mainpath); 
         if (!basePath.exists()) 
          Log.d("CAPTURE_BASE_PATH", basePath.mkdirs() ? "Success": "Failed"); 
         File captureFile = new File(mainpath + "photo_" + getPhotoTime() + ".jpg"); 
         if (!captureFile.exists()) 
          Log.d("CAPTURE_FILE_PATH", captureFile.createNewFile() ? "Success": "Failed"); 
         FileOutputStream stream = new FileOutputStream(captureFile); 
         stream.write(bytes); 
         stream.flush(); 
         stream.close(); 
        } catch (IOException e) { 
         e.printStackTrace(); 
        } 
       } 

       private String getPhotoTime(){ 
        SimpleDateFormat sdf=new SimpleDateFormat("ddMMyy_hhmmss"); 
        return sdf.format(new Date()); 
       } 
      }); 
     } 
    }); 

まず、オープンアプリをしてカメラにアクセスする許可を与え、アプリは顔を検出し、ビットマップ(サングラス)を描画します。私はキャプチャとボタン "写真を撮る"を作成します。

これは私のmain.xmlです:

<LinearLayout 
    xmlns:android="http://schemas.android.com/apk/res/android" 
    xmlns:app="http://schemas.android.com/apk/res-auto" 
    xmlns:tools="http://schemas.android.com/tools" 
    android:id="@+id/topLayout" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" 
    android:keepScreenOn="true" 
    android:weightSum="1" 
    android:orientation="vertical"> 

    <LinearLayout 
     android:orientation="horizontal" 
     android:layout_width="match_parent" 
     android:layout_height="64dp"> 

    <Button 
     android:text="Sunglasses" 
     android:layout_width="wrap_content" 
     android:layout_height="wrap_content" 
     android:id="@+id/sun" 
     android:layout_weight="1" 
     tools:ignore="HardcodedText" /> 

    <Button 
     android:text="Helmet" 
     android:layout_width="wrap_content" 
     android:layout_height="wrap_content" 
     android:id="@+id/helm" 
     android:layout_weight="1" 
     tools:ignore="HardcodedText" /> 

    <Button 
     android:text="Mustache" 
     android:layout_width="wrap_content" 
     android:layout_height="wrap_content" 
     android:id="@+id/must" 
     android:layout_weight="1" 
     tools:ignore="HardcodedText" /> 
    </LinearLayout> 
<com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview 
     android:id="@+id/preview" 
     android:layout_width="match_parent" 
     android:layout_height="0dp" 
     android:layout_weight="0.99" 
     android:weightSum="1"> 
    <com.google.android.gms.samples.vision.face.facetracker.ui.camera.GraphicOverlay 
     android:id="@+id/faceOverlay" 
     android:layout_width="match_parent" 
     android:layout_height="match_parent" 
     android:layout_weight="0.79" /> 

</com.google.android.gms.samples.vision.face.facetracker.ui.camera.CameraSourcePreview> 

    <Button 
     android:id="@+id/capture" 
     android:layout_width="match_parent" 
     android:layout_height="wrap_content" 
     android:text="Take a Picture" 
     tools:ignore="HardcodedText" /> 
    </LinearLayout> 

は、どのように私は私がFaceGraphicで描くビットマップを得ることができますか?写真を撮ると、ビットマップのないデフォルトの写真しか得られません。私は、ユーザーの顔とビットマップで写真を撮り、ギャラリーに保存したいと思います。申し訳ありません、あなたは私の質問を理解していただければ幸いですありがとうございました。

答えて

関連する問題