2016-08-31 13 views
0

私はアンドロイドスタジオのカメラで作業しています.YUV420_888はCamera2からImageReader経由で出力されています。問題は、私にこの奇妙な騒音が聞こえるか、それが何であれ呼び出されたことです。ここにはlinkの画像があります。近くを見ると、グレーの色合いが異なる6-7の円があります。私は白黒画像が必要なのでYチャンネルだけを使用し、Yチャンネルはまさにそれです。エッジ検出アルゴリズムとそれらの「円」との干渉がエッジ検出においてノイズとして表されるので、大きな問題である。また、サーフェスが平面の場合にのみ表示されます。誰が私にこれがなぜ起こり、どのようにそれを修正するのか教えてもらえますか?Androidスタジオcamera2ノイズ除去

編集: 私はコードについて何か知っていました。これは私がcamera2を動かそうとした元のアプリケーションです。

パッケージcom.example.nikola.camera2project;私はいくつかのフレームの周囲で作業これは変更を行うために必要なので

import android.hardware.camera2.CaptureResult; 
import android.support.v7.app.AppCompatActivity; 
import android.os.Bundle; 
import android.Manifest; 
import android.content.Context; 
import android.content.pm.PackageManager; 
import android.graphics.ImageFormat; 
import android.graphics.SurfaceTexture; 
import android.hardware.camera2.CameraAccessException; 
import android.hardware.camera2.CameraCaptureSession; 
import android.hardware.camera2.CameraCharacteristics; 
import android.hardware.camera2.CameraDevice; 
import android.hardware.camera2.CameraManager; 
import android.hardware.camera2.CameraMetadata; 
import android.hardware.camera2.CaptureRequest; 
import android.hardware.camera2.TotalCaptureResult; 
import android.hardware.camera2.params.StreamConfigurationMap; 
import android.media.Image; 
import android.media.ImageReader; 
import android.os.Bundle; 
import android.os.Environment; 
import android.os.Handler; 
import android.os.HandlerThread; 
import android.support.annotation.NonNull; 
import android.support.v4.app.ActivityCompat; 
import android.support.v7.app.AppCompatActivity; 
import android.util.Log; 
import android.util.Size; 
import android.util.SparseIntArray; 
import android.view.Surface; 
import android.view.SurfaceView; 
import android.view.TextureView; 
import android.view.View; 
import android.view.Window; 
import android.view.WindowManager; 
import android.widget.Button; 
import android.widget.Toast; 
import java.io.File; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.io.OutputStream; 
import java.nio.ByteBuffer; 
import java.util.ArrayList; 
import java.util.Arrays; 
import java.util.List; 

public class MainActivity extends AppCompatActivity { 

    private static final String TAG = "AndroidCameraApi"; 
    private TextureView textureView; 
    private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); 
    static { 
     ORIENTATIONS.append(Surface.ROTATION_0, 90); 
     ORIENTATIONS.append(Surface.ROTATION_90, 0); 
     ORIENTATIONS.append(Surface.ROTATION_180, 270); 
     ORIENTATIONS.append(Surface.ROTATION_270, 180); 
    } 
    private String cameraId; 
    protected CameraDevice cameraDevice; 
    protected CameraCaptureSession cameraCaptureSessions; 
    protected CaptureRequest.Builder captureRequestBuilder; 
    private Size imageDimension; 
    private ImageReader imageReader; 
    private File file; 
    private static final int REQUEST_CAMERA_PERMISSION = 200; 

    private Handler mBackgroundHandler; 
    private HandlerThread mBackgroundThread; 
    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     requestWindowFeature(Window.FEATURE_NO_TITLE); 
     getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 
       WindowManager.LayoutParams.FLAG_FULLSCREEN); 
     setContentView(R.layout.activity_main); 
     textureView = (TextureView) findViewById(R.id.texture); 
     assert textureView != null; 
     textureView.setSurfaceTextureListener(textureListener); 
     imageReader = ImageReader.newInstance(640, 480, ImageFormat.YUV_420_888, 1); 




    } 


    TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() { 
     @Override 
     public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 
      //open your camera here 
      openCamera(); 
     } 
     @Override 
     public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 
      // Transform you image captured size according to the surface width and height 
     } 
     @Override 
     public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 
      return false; 
     } 
     @Override 
     public void onSurfaceTextureUpdated(SurfaceTexture surface) { 
     } 
    }; 



    private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { 
     @Override 
     public void onOpened(CameraDevice camera) { 
      //This is called when the camera is open 
      Log.e(TAG, "onOpened"); 
      cameraDevice = camera; 
      createCameraPreview(); 

     } 
     @Override 
     public void onDisconnected(CameraDevice camera) { 
      cameraDevice.close(); 
     } 
     @Override 
     public void onError(CameraDevice camera, int error) { 
      cameraDevice.close(); 
      cameraDevice = null; 
     } 
    }; 
    protected void startBackgroundThread() { 
     mBackgroundThread = new HandlerThread("Camera Background"); 
     mBackgroundThread.start(); 
     mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); 
    } 
    protected void stopBackgroundThread() { 
     mBackgroundThread.quitSafely(); 
     try { 
      mBackgroundThread.join(); 
      mBackgroundThread = null; 
      mBackgroundHandler = null; 
     } catch (InterruptedException e) { 
      e.printStackTrace(); 
     } 
    } 
    protected void createCameraPreview() { 
     try { 
      SurfaceTexture texture = textureView.getSurfaceTexture(); 
      assert texture != null; 
      texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight()); 
      Surface surface = new Surface(texture); 
      captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 
      captureRequestBuilder.addTarget(surface); 
      cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback(){ 
       @Override 
       public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 
        //The camera is already closed 
        if (null == cameraDevice) { 
         return; 
        } 
        // When the session is ready, we start displaying the preview. 
        cameraCaptureSessions = cameraCaptureSession; 
        updatePreview(); 
       } 
       @Override 
       public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { 
        Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show(); 
       } 
      }, null); 


     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
    } 
    private void openCamera() { 
     CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); 
     Log.e(TAG, "is camera open"); 
     try { 
      cameraId = manager.getCameraIdList()[0]; 
      CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); 
      StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 
      assert map != null; 
      imageDimension = map.getOutputSizes(SurfaceTexture.class)[0]; 
      // Add permission for camera and let user grant the permission 
      if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { 
       ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION); 
       return; 
      } 
      manager.openCamera(cameraId, stateCallback, null); 
     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
     Log.e(TAG, "openCamera X"); 
    } 
    protected void updatePreview() { 
     if(null == cameraDevice) { 
      Log.e(TAG, "updatePreview error, return"); 
     } 
     captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); 
     try { 
      cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler); 
     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
    } 
    private void closeCamera() { 
     if (null != cameraDevice) { 
      cameraDevice.close(); 
      cameraDevice = null; 
     } 
     if (null != imageReader) { 
      imageReader.close(); 
      imageReader = null; 
     } 
    } 
    @Override 
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { 
     if (requestCode == REQUEST_CAMERA_PERMISSION) { 
      if (grantResults[0] == PackageManager.PERMISSION_DENIED) { 
       // close the app 
       Toast.makeText(MainActivity.this, "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show(); 
       finish(); 
      } 
     } 
    } 
    @Override 
    protected void onResume() { 
     super.onResume(); 
     Log.e(TAG, "onResume"); 
     startBackgroundThread(); 
     if (textureView.isAvailable()) { 
      openCamera(); 
     } else { 
      textureView.setSurfaceTextureListener(textureListener); 
     } 
    } 
    @Override 
    protected void onPause() { 
     Log.e(TAG, "onPause"); 
     //closeCamera(); 
     stopBackgroundThread(); 
     super.onPause(); 
    } 

} 

iは

protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     requestWindowFeature(Window.FEATURE_NO_TITLE); 
     getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 
       WindowManager.LayoutParams.FLAG_FULLSCREEN); 
     setContentView(R.layout.activity_main); 
surfaceView = (SurfaceView) findViewById(R.id.surfaceView); 
     assert surfaceView != null; 
     surfaceHolder = surfaceView.getHolder(); 
     surfaceHolder.addCallback(this); 
     mImageReader = ImageReader.newInstance(480,640, ImageFormat.YUV_420_888,10); 
} 

private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { 
     @Override 
     public void onOpened(CameraDevice camera) { 
      //This is called when the camera is open 
      Log.e(TAG, "onOpened"); 
      cameraDevice = camera; 
      createCameraPreview(); 
      mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { 
       @Override 
       public void onImageAvailable(ImageReader reader) { 
        Image image = reader.acquireLatestImage(); 
        if(image != null) { 
         ByteBuffer buffer0 = image.getPlanes()[0].getBuffer(); 
         byte[] Y = new byte[buffer0.remaining()]; 
         buffer0.get(Y); 

         //I do work here but this is for preview grayscale only 

         byte[] Bits = new byte[Y.length * 4]; //That's where the RGBA array goes. 

         for (int i = 0; i < Y.length; i++) { 
          Bits[i * 4] = 
            Bits[i * 4 + 1] = 
              Bits[i * 4 + 2] = Y[i]; 
          Bits[i * 4 + 3] = -1;//0xff, that's the alpha. 
         } 

         Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888); 
         bm.copyPixelsFromBuffer(ByteBuffer.wrap(Bits)); 
         Bitmap scaled = Bitmap.createScaledBitmap(bm, (int) (surfaceView.getHeight() * 1.33333), surfaceView.getHeight(), true); 
         Canvas c; 
         c = surfaceHolder.lockCanvas(); 
         c.drawBitmap(scaled, 0, 0, null); 
         surfaceHolder.unlockCanvasAndPost(c); 
         image.close(); 



        } 

       } 
      },mBackgroundHandler); 



     } 

protected void createCameraPreview() { 
     try { 

      captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 

      captureRequestBuilder.addTarget(mImageReader.getSurface()); 
      cameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()), new CameraCaptureSession.StateCallback(){ 
       @Override 
       public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 
        //The camera is already closed 
        if (null == cameraDevice) { 
         return; 
        } 
        // When the session is ready, we start displaying the preview. 
        cameraCaptureSessions = cameraCaptureSession; 
        updatePreview(); 
       } 
       @Override 
       public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { 
        Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show(); 
       } 
      }, null); 
     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
    } 

を作り、残りの部分は同じです。私がこれを表示すると、あなたが写真上で見ることができますが、最初のプログラムでは、最初のプログラムのようにTextureViewにYUV420_888を直接表示すると、すべてが問題ありません。どんな考え?

EDIT 2: 全体のコード:

package com.example.nikola.camera2project; 


import android.graphics.Bitmap; 
import android.graphics.Canvas; 
import android.hardware.camera2.CaptureResult; 
import android.support.v7.app.AppCompatActivity; 
import android.os.Bundle; 
import android.Manifest; 
import android.content.Context; 
import android.content.pm.PackageManager; 
import android.graphics.ImageFormat; 
import android.graphics.SurfaceTexture; 
import android.hardware.camera2.CameraAccessException; 
import android.hardware.camera2.CameraCaptureSession; 
import android.hardware.camera2.CameraCharacteristics; 
import android.hardware.camera2.CameraDevice; 
import android.hardware.camera2.CameraManager; 
import android.hardware.camera2.CameraMetadata; 
import android.hardware.camera2.CaptureRequest; 
import android.hardware.camera2.TotalCaptureResult; 
import android.hardware.camera2.params.StreamConfigurationMap; 
import android.media.Image; 
import android.media.ImageReader; 
import android.os.Bundle; 
import android.os.Environment; 
import android.os.Handler; 
import android.os.HandlerThread; 
import android.support.annotation.NonNull; 
import android.support.v4.app.ActivityCompat; 
import android.support.v7.app.AppCompatActivity; 
import android.util.Log; 
import android.util.Size; 
import android.util.SparseIntArray; 
import android.view.Surface; 
import android.view.SurfaceHolder; 
import android.view.SurfaceView; 
import android.view.TextureView; 
import android.view.View; 
import android.view.Window; 
import android.view.WindowManager; 
import android.widget.Button; 
import android.widget.Toast; 
import java.io.File; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.io.OutputStream; 
import java.nio.ByteBuffer; 
import java.util.ArrayList; 
import java.util.Arrays; 
import java.util.List; 

public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback { 

    private static final String TAG = "AndroidCameraApi"; 
    private TextureView textureView; 
    private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); 
    static { 
     ORIENTATIONS.append(Surface.ROTATION_0, 90); 
     ORIENTATIONS.append(Surface.ROTATION_90, 0); 
     ORIENTATIONS.append(Surface.ROTATION_180, 270); 
     ORIENTATIONS.append(Surface.ROTATION_270, 180); 
    } 
    private String cameraId; 
    protected CameraDevice cameraDevice; 
    protected CameraCaptureSession cameraCaptureSessions; 
    protected CaptureRequest.Builder captureRequestBuilder; 
    private Size imageDimension; 
    private ImageReader imageReader; 
    private File file; 
    private static final int REQUEST_CAMERA_PERMISSION = 200; 

    SurfaceView surfaceView; 
    SurfaceHolder surfaceHolder; 
    private Handler mBackgroundHandler; 
    private HandlerThread mBackgroundThread; 
    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     requestWindowFeature(Window.FEATURE_NO_TITLE); 
     getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, 
       WindowManager.LayoutParams.FLAG_FULLSCREEN); 
     setContentView(R.layout.activity_main); 
     surfaceView = (SurfaceView) findViewById(R.id.surfaceView); 
     assert surfaceView != null; 
     surfaceHolder = surfaceView.getHolder(); 
     surfaceHolder.addCallback(this); 
     imageReader = ImageReader.newInstance(480,640, ImageFormat.YUV_420_888,10); 




    } 



    private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { 
     @Override 
     public void onOpened(CameraDevice camera) { 
      //This is called when the camera is open 
      Log.e(TAG, "onOpened"); 
      cameraDevice = camera; 
      createCameraPreview(); 
      imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { 
       @Override 
       public void onImageAvailable(ImageReader reader) { 
        Image image = reader.acquireLatestImage(); 
        if(image != null) { 
         ByteBuffer buffer0 = image.getPlanes()[0].getBuffer(); 
         byte[] Y = new byte[buffer0.remaining()]; 
         buffer0.get(Y); 
         byte[] Bits = new byte[Y.length * 4]; //That's where the RGBA array goes. 
         for (int i = 0; i < Y.length; i++) { 
          Bits[i * 4] = 
            Bits[i * 4 + 1] = 
              Bits[i * 4 + 2] = Y[i]; 
          Bits[i * 4 + 3] = -1;//0xff, that's the alpha. 
         } 
         Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888); 
         bm.copyPixelsFromBuffer(ByteBuffer.wrap(Bits)); 
         Bitmap scaled = Bitmap.createScaledBitmap(bm, (int) (surfaceView.getHeight() * 1.33333), surfaceView.getHeight(), true); 
         Canvas c; 
         c = surfaceHolder.lockCanvas(); 
         c.drawBitmap(scaled, 0, 0, null); 
         surfaceHolder.unlockCanvasAndPost(c); 
         image.close(); 



        } 

       } 
      },mBackgroundHandler); 

     } 
     @Override 
     public void onDisconnected(CameraDevice camera) { 
      cameraDevice.close(); 
     } 
     @Override 
     public void onError(CameraDevice camera, int error) { 
      cameraDevice.close(); 
      cameraDevice = null; 
     } 
    }; 
    protected void startBackgroundThread() { 
     mBackgroundThread = new HandlerThread("Camera Background"); 
     mBackgroundThread.start(); 
     mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); 
    } 
    protected void stopBackgroundThread() { 
     mBackgroundThread.quitSafely(); 
     try { 
      mBackgroundThread.join(); 
      mBackgroundThread = null; 
      mBackgroundHandler = null; 
     } catch (InterruptedException e) { 
      e.printStackTrace(); 
     } 
    } 
    protected void createCameraPreview() { 
     try { 
      captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 
      captureRequestBuilder.addTarget(imageReader.getSurface()); 
      cameraDevice.createCaptureSession(Arrays.asList(imageReader.getSurface()), new CameraCaptureSession.StateCallback(){ 
       @Override 
       public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { 
        //The camera is already closed 
        if (null == cameraDevice) { 
         return; 
        } 
        // When the session is ready, we start displaying the preview. 
        cameraCaptureSessions = cameraCaptureSession; 
        updatePreview(); 
       } 
       @Override 
       public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { 
        Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show(); 
       } 
      }, null); 


     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
    } 
    private void openCamera() { 
     CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); 
     Log.e(TAG, "is camera open"); 
     try { 
      cameraId = manager.getCameraIdList()[0]; 
      CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); 
      StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 
      assert map != null; 
      imageDimension = map.getOutputSizes(SurfaceTexture.class)[0]; 
      // Add permission for camera and let user grant the permission 
      if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { 
       ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION); 
       return; 
      } 
      manager.openCamera(cameraId, stateCallback, null); 
     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
     Log.e(TAG, "openCamera X"); 
    } 
    protected void updatePreview() { 
     if(null == cameraDevice) { 
      Log.e(TAG, "updatePreview error, return"); 
     } 
     captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); 
     try { 
      cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler); 
     } catch (CameraAccessException e) { 
      e.printStackTrace(); 
     } 
    } 

    @Override 
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { 
     if (requestCode == REQUEST_CAMERA_PERMISSION) { 
      if (grantResults[0] == PackageManager.PERMISSION_DENIED) { 
       // close the app 
       Toast.makeText(MainActivity.this, "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show(); 
       finish(); 
      } 
     } 
    } 
    @Override 
    protected void onResume() { 
     super.onResume(); 
     Log.e(TAG, "onResume"); 
     startBackgroundThread(); 
    } 
    @Override 
    protected void onPause() { 
     Log.e(TAG, "onPause"); 
     //closeCamera(); 
     stopBackgroundThread(); 
     super.onPause(); 
    } 




    @Override 
    public void surfaceCreated(SurfaceHolder holder) { 
     openCamera(); 
    } 

    @Override 
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 

    } 

    @Override 
    public void surfaceDestroyed(SurfaceHolder holder) { 

    } 
} 

答えて

1

それが原因お使いのカメラのハードウェアの制限に量子化ノイズである可能性があります。サーフェスが薄暗い場合は、自動的に明るさの調整やホワイトバランスを使用してシーンをより見やすくすることができます。カメラに調整可能な絞り、調整可能なシャッター、または高解像度ADCがない場合、「Y」値に整数係数(ピクセルごとに)を掛けるなどして、デジタルで調整を行います。したがって、量子化ノイズ。

AWB modeを調整すると、より良い結果が得られる場合があります。より良い照明が役立つかもしれません。または、エッジ検出アルゴリズムでしきい値を調整することもできます。

EDIT

はすぐにaddCallback()呼び出しの後surfaceHolder.setFormat(PixelFormat.RGBA_8888)への呼び出しを追加します。

これは、表面のデフォルトの内部フォーマットの結果であり、実際にはARGB_8888よりも小さいものでした。 S6でも起こった。

+0

AWBモードを変更しようとしましたが、うまくいきません。私は私の電話(CM12モードでサムスン銀河S2)でこのコードを試した。また、私はカメラのための私のアプリを開くときは、画像がはるかに良いです。私のコードは、このコードに似ています。エッジに関する問題は、私が従いたいエッジは、それらのサークルと同じ派生を持つということです。 http://inducesmile.com/android/android-camera2-api-example-tutorial/ – Nikola010

+0

カメラアプリと同じ品質を自分のソフトウェアから得ることができるはずです。あなたのイメージファイルのバイトを見ると、間違いなく量子化が行われていますが、その理由は分かりません。私はYUV420_888がかなり良く見えると思っていたでしょうが、おそらくキャプチャパイプラインはあなたをそこに連れていくためにいくつかの変換を行わなければならず、いくつかの情報は失われました。私は他の利用可能なフォーマットを試みます。エッジ検出アルゴリズムを実行することができないかもしれませんが、少なくともそれらがTextureViewでどのように見えるか見ることができます。 – greeble31

+0

実際には、私は何かプログラムに間違っていたようですが、何も得られません。オリジナルのポストを編集してコードを追加します – Nikola010

関連する問題