Android Camera2.0 ImageReaderと組み合わせて写真を撮り、画像の偏向問題を解決

13391 ワード

前文Android Camera 2.0 APIはカメラのプレビューを実現し、顔のキー座標を取得するためにCamera 2を紹介した.0の基本コンテンツは,2.0のAPIで写真撮影機能を実現するにはImageReaderを利用する.
まずカスタマイズされたTextureView.Surface TextureListenerの実装クラスでは、ImageReaderの参照を維持します.
private ImageReader mImageReader ;

次にsetuUpCameraメソッドでImageReaderを初期化する
/**
     *   camera2.0      
     */
    private void setUpCamera() {
        cameraManager = (CameraManager)mContext.getSystemService(Context.CAMERA_SERVICE);
        try{
            for (String id : cameraManager.getCameraIdList()) {

                //          characteristics
                characteristics = cameraManager.getCameraCharacteristics(id);

                //        
                if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
                    mCameraId = id ;

                    StreamConfigurationMap streamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

                    sizes = streamConfigurationMap.getOutputSizes(SurfaceHolder.class);

                    //      
                    mPreviewSize = sizes[0];

                    //imageReader   
                    mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(),
                            mPreviewSize.getHeight(),
                            ImageFormat.JPEG,
                            2 );
                    mImageReader.setOnImageAvailableListener(onImageAvailableListener , mBackgroundHandler);

                    //        
                    int[] FD =characteristics.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
                    int maxFD=characteristics.get(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT);

                    if (FD.length>0) {
                        List fdList = new ArrayList<>();
                        for (int FaceD : FD
                                ) {
                            fdList.add(FaceD);
                            Log.e(TAG, "setUpCameraOutputs: FD type:" + Integer.toString(FaceD));
                        }
                        Log.e(TAG, "setUpCameraOutputs: FD count" + Integer.toString(maxFD));

                        if (maxFD > 0) {
                            mFaceDetectSupported = true;
                            mFaceDetectMode = Collections.max(fdList);
                        }
                    }
                }
            }
        } catch ( CameraAccessException e ){
            e.printStackTrace();
        }
    }

次にImageReaderのSurfaceをCapture Requestに追加する.BuilderでstartPreview()メソッドに追加
previewRequestBuilder.addTarget(mImageReader.getSurface());

次にクラスインプリメンテーションImageReaderを作成します.OnImageAvailableListenerインタフェースは、各フレームのImageオブジェクトを取得し、Bitmapを生成し、カメラで取得した元の画像とプレビューで見られる画像が異なるため、Bitmapをミラーリングおよび回転操作する
/**
*OnImageAvailableListener   
*/

   private final ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
        @Override
        public void onImageAvailable(ImageReader reader) {

            Image image = reader.acquireNextImage() ;

            ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
            byte[] bytes = new byte[byteBuffer.remaining()];
            byteBuffer.get(bytes);

            try {
                Bitmap temp = BitmapFactory.decodeByteArray(bytes,0,bytes.length);
                Bitmap newBitmap = Bitmap.createBitmap(mPreviewSize.getWidth(),mPreviewSize.getWidth(),temp.getConfig());

                Canvas canvas = new Canvas(newBitmap);
                Paint paint = new Paint();
                Matrix matrix = new Matrix();
                //       90 
                matrix.setScale(-1, 1);
                matrix.postTranslate(temp.getWidth(), 0);
                matrix.postRotate(90 ,temp.getWidth()/2,temp.getHeight()/2);
                matrix.postTranslate(0,(temp.getWidth()-temp.getHeight())/2);

                canvas.drawBitmap(temp, matrix , paint );

                File newFile = new File( Environment.getExternalStorageDirectory() , "345.jpg");
                BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(newFile));
                newBitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
                bos.flush();
                bos.close();
                temp.recycle();
                newBitmap.recycle();
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            } finally {
                image.close();
                byteBuffer.clear();
            }

            image.close();



        }
    };

ImageReaderで各フレームのリアルタイム画像を取得し、写真を撮るにはCameraCapture Sessionのcaptureメソッドを呼び出せばよい
public void captureStillImage(){
        try {

            CaptureRequest.Builder captureStillBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
            captureStillBuilder.addTarget(mImageReader.getSurface());
            //         
            captureStillBuilder.set(CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
            //         
            captureStillBuilder.set(CaptureRequest.CONTROL_AE_MODE,
                    CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

            int rotation = ((Activity)mContext).getWindowManager().getDefaultDisplay().getRotation();
            captureStillBuilder.set( CaptureRequest.JPEG_ORIENTATION ,
                    ORIENTATIONS.get(rotation));

            CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
                @Override
                public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
                    super.onCaptureCompleted(session, request, result);

                    Toast.makeText(mContext,"Image Captured" , Toast.LENGTH_SHORT).show();
                }



            };

            captureSession.stopRepeating();
            captureSession.capture( captureStillBuilder.build() , captureCallback , null );
            captureSession.setRepeatingRequest(captureRequest, new CameraCaptureSession.CaptureCallback() {
                @Override
                public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
                    super.onCaptureStarted(session, request, timestamp, frameNumber);
                }
            },mBackgroundHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

メンバー変数でのROTATIONの設定
//  Surface   
    private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
    static {
        ORIENTATIONS.append(Surface.ROTATION_0,90);
        ORIENTATIONS.append(Surface.ROTATION_90,0);
        ORIENTATIONS.append(Surface.ROTATION_180,270);
        ORIENTATIONS.append(Surface.ROTATION_270,180);
    }