当前位置: 代码迷 >> 综合 >> Android camera2 API 获取相机的 YUV 元数据预览
  详细解决方案

Android camera2 API 获取相机的 YUV 元数据预览

热度:82   发布时间:2023-12-29 09:20:33.0

参考资料:

Android Camera 一 源码路径

android camera2 拿到的yuv420数据到底是什么样的?

Camera2 YUV420_888

android-Camera2Basic源码逻辑流程解析Android Camera2 相机预览、获取数据

Android Camera2采集摄像头原始数据并手动预览

googlesamples/android-Camera2Basic

WuKaik/QuickCamera2

 

 

该 demo 演示使用 camera2 API 获取相机 YUV 数据, 转换成 Bitmap  通过 Canvas 在 Surface 上渲染。

实测可行, 性能不能满足需求。

特别是 YUV 转 Bitmap 后,失真严重。 可能是转换算法或者其他原因导致。

我测试的目的是获取相机的 YUV 数据, 后续的目标是把 YUV 数据放在 native 层调用 NDK 处理和预览。

有兴趣的朋友一起讨论。

 

主要 android camera 的权限,  在  AndroidManifest.xml 中加入

<uses-permission android:name="android.permission.CAMERA"/>

如果 demo 运行后没获取到 camera 图像,查看权限:系统设置->权限管理 , 查看该 demo 是否有相机权限。

 

下面贴出源码

 

activity_main.xml

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"android:layout_width="match_parent"android:layout_height="match_parent"android:orientation="vertical"><TextureViewandroid:id="@+id/textureView"android:layout_width="match_parent"android:layout_height="match_parent" /></LinearLayout>

 

MainActivity.java

package com.example.camera2_yuv;import android.Manifest;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.TextureView;import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;/**   简要过程*   获取CameraManager*   调用CameraManager.getCameraCharacteristics()方法得到CameraCharacteristics*   调用CameraManager.open()方法在回调中得到CameraDevice*   通过CameraDevice.createCaptureRequest()得到CaptureRequest.Builder,构建CaptureRequest对象*   通过CameraDevice.createCaptureSession() 在回调中获取CameraCaptureSession*   通过 CameraCaptureSession发送CaptureRequest, capture表示只发一次请求, setRepeatingRequest表示不断发送请求.*   在ImageReader.OnImageAvailableListener回调中获取拍照数据*/
public class MainActivity extends AppCompatActivity
{public static final String TAG = MainActivity.class.getSimpleName();public static final int REQUEST_CODE_PREMISSION = 1;private TextureView textureView;private String mCameraId;     //摄像头IDprivate Size mPreviewSize;    //图像预览尺寸private Handler mHandler;private HandlerThread mHandlerThread;private CameraManager mCameraManager;private CameraDevice mCameraDevice;private CameraCaptureSession mCameraSession;private CaptureRequest mCaptureRequest;private CaptureRequest.Builder mCaptureBuilder;private ImageReader mReader;  //camera2没有预览回调,需要通过ImageReader获取数据private Canvas canvas;private Paint mpaint;@Overrideprotected void onCreate(Bundle savedInstanceState){super.onCreate(savedInstanceState);setContentView(R.layout.activity_main);mHandlerThread = new HandlerThread("QuickCamera");mHandlerThread.start();mHandler = new Handler(mHandlerThread.getLooper());mCameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);textureView = findViewById(R.id.textureView);textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener(){@Overridepublic void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height){Log.d(TAG, "onSurfaceTextureAvailable: width:" + width + "height:" + height);//选择摄像头setCamera(width, height);//打开摄像头openCamera();}@Overridepublic void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height){}@Overridepublic boolean onSurfaceTextureDestroyed(SurfaceTexture surface){return false;}@Overridepublic void onSurfaceTextureUpdated(SurfaceTexture surface){}});}/*** 设置相机** @param width* @param height*/private void setCamera(int width, int height){try{//遍历所有摄像头for(String cameraId : mCameraManager.getCameraIdList()){CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(cameraId);//默认打开前置摄像头if(characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT){continue;}//获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);//根据TextureView的尺寸设置预览尺寸mPreviewSize = getOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);Log.d(TAG, "预览Size: width:" + mPreviewSize.getWidth() + " height:" + mPreviewSize.getHeight());mCameraId = cameraId;Log.d(TAG, "摄像头Id: " + cameraId);}} catch(CameraAccessException e){e.printStackTrace();}}/*** 从摄像头支持的预览Size中选择大于并且最接近width和height的size** @param sizeMap* @param width* @param height* @return*/private Size getOptimalSize(Size[] sizeMap, int width, int height){List<Size> sizeList = new ArrayList<>();for(Size option : sizeMap){Log.d(TAG, "系统支持的尺寸大小==width:" + option.getWidth() + " height:" + option.getHeight());if(width > height){if(option.getWidth() > width && option.getHeight() > height){sizeList.add(option);}} else{if(option.getWidth() > height && option.getHeight() > width){sizeList.add(option);}}}if(sizeList.size() > 0){return Collections.min(sizeList, new Comparator<Size>(){@Overridepublic int compare(Size lhs, Size rhs){return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());}});}if(sizeList.size() > 0)return sizeList.get(0);return sizeMap[0];}/*** 打开摄像头*/private void openCamera(){//检查权限try{if(ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED){//Toast.makeText(MainActivity.this,"must add camera premission",Toast.LENGTH_SHORT).show();//运行时权限ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CODE_PREMISSION);return;}//打开相机,第一个参数指示打开哪个摄像头,第二个参数stateCallback为相机的状态回调接口,第三个参数用来确定Callback在哪个线程执行,为null的话就在当前线程执行mCameraManager.openCamera(mCameraId, mDeviceStateCallback, mHandler);} catch(CameraAccessException e){e.printStackTrace();}}/*** 摄像头打开状态监听回调接口对象*/private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback(){@Overridepublic void onOpened(@NonNull CameraDevice camera){Log.d(TAG, "CameraDevice.StateCallback onOpened: ");mCameraDevice = camera;startPreview();}@Overridepublic void onDisconnected(@NonNull CameraDevice camera){Log.d(TAG, "CameraDevice.StateCallback onDisconnected: ");}@Overridepublic void onError(@NonNull CameraDevice camera, int error){Log.d(TAG, "CameraDevice.StateCallback onError: ");}};/*** 开始预览* 使用TextureView显示相机预览数据* Camera2的预览和拍照数据都是使用CameraCaptureSession会话来请求的*/private void startPreview(){SurfaceTexture surfaceTexture = textureView.getSurfaceTexture();//设置TextureView缓冲区大小surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());//获取Surface显示数据final Surface surface = new Surface(surfaceTexture);//定义画笔mpaint = new Paint();mpaint.setColor(Color.BLUE);mpaint.setAntiAlias(true);//去锯齿mpaint.setStyle(Paint.Style.STROKE);//空心mpaint.setStrokeWidth(2f); // 设置paint的外框宽度try{// 设置捕获请求为预览(还有其它,如拍照、录像)mCaptureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);//可以通过这个set(key,value)方法,设置曝光啊,自动聚焦等参数!!//mCaptureBuilder.set(CaptureRequest.CONTROL_AE_MODE,CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);//获取ImageReader(ImageFormat不要使用jpeg,预览会出现卡顿)mReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 2);//设置有图像数据流时监听mReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener(){@Overridepublic void onImageAvailable(ImageReader reader){//需要调用 acquireLatestImage() 和 close(),不然会卡顿Image image = reader.acquireLatestImage();Image.Plane[] planes = image.getPlanes();
//                    Log.d(TAG, "onImageAvailable: data size " + data.length);Log.i(TAG,"image format: " +image.getFormat() + "===========================================================");// 从image里获取三个planefor (int i = 0; i < planes.length; i++){Log.i(TAG, "pixelStride  " + planes[i].getPixelStride());Log.i(TAG, "rowStride   " + planes[i].getRowStride());Log.i(TAG, "width  " + image.getWidth());Log.i(TAG, "height  " + image.getHeight());Log.i(TAG, "Finished reading data from plane  " + i);}int n_image_size = image.getWidth() * image.getHeight() * 3 / 2;final byte[] yuv420pbuf = new byte[n_image_size];//  转换 YUVSystem.arraycopy(ImageUtil.getBytesFromImageAsType(image, 2), 0, yuv420pbuf, 0, n_image_size);canvas =  surface.lockCanvas(new Rect(0, 0, mPreviewSize.getWidth(), mPreviewSize.getHeight()));if(canvas != null){canvas.drawRect(new Rect(0, 0, mPreviewSize.getWidth(), mPreviewSize.getHeight()), mpaint);Bitmap dd = nv21ToBitmap(yuv420pbuf, 1440, 1080);Bitmap cc = scaleBitmap(dd, 0.5f);Matrix matrix = GetRotatematrix(cc.getWidth(), cc.getHeight(), 90, false);canvas.drawBitmap(dd, matrix, mpaint);surface.unlockCanvasAndPost(canvas);Log.i(TAG, "canvas drawRect");}else{Log.i(TAG, "canvas failed");}image.close();}}, mHandler);//设置预览界面为数据的显示
//            mCaptureBuilder.addTarget(surface);mCaptureBuilder.addTarget(mReader.getSurface());  // 从 camera2 中取出图像数据//创建相机捕获会话
//            mCameraDevice.createCaptureSession(Arrays.asList(surface, mReader.getSurface()), mCaptureStateCallback, mHandler);mCameraDevice.createCaptureSession(Arrays.asList(mReader.getSurface()), mCaptureStateCallback, null);} catch(CameraAccessException e){e.printStackTrace();}}private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback(){@Overridepublic void onConfigured(@NonNull CameraCaptureSession session){//创建捕获请求mCaptureRequest = mCaptureBuilder.build();mCameraSession = session;  // 相机会话准备好后, 开始预览显示try{//设置反复捕获数据的请求,这样预览界面就会一直有数据显示mCameraSession.setRepeatingRequest(mCaptureRequest, null, mHandler);} catch(CameraAccessException e){e.printStackTrace();}}@Overridepublic void onConfigureFailed(@NonNull CameraCaptureSession session){}};@Overrideprotected void onPause(){super.onPause();//退出时记得关闭if(mCameraSession != null){mCameraSession.close();mCameraSession = null;}if(mCameraDevice != null){mCameraDevice.close();mCameraDevice = null;}if(mReader != null){mReader.close();mReader = null;}}@Overridepublic void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults){if(requestCode == REQUEST_CODE_PREMISSION){if(grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED){openCamera();}}}public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {Bitmap bitmap = null;try {YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);ByteArrayOutputStream stream = new ByteArrayOutputStream();image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());stream.close();} catch (IOException e) {e.printStackTrace();}return bitmap;}public Bitmap scaleBitmap(Bitmap origin, float scale){if(origin == null){return null;}int height = origin.getHeight();int width = origin.getWidth();Matrix matrix = new Matrix();matrix.postScale(scale, scale);// 使用后乘Bitmap newBM = Bitmap.createBitmap(origin, 0, 0, width, height, matrix, false);if (!origin.isRecycled()){origin.recycle();}return newBM;}public static Matrix GetRotatematrix(final int width,final int height,final int roatateDegree, final boolean translate){Matrix matrix = new Matrix();RectF original_rect = new RectF(0, 0, width, height);float centerX = original_rect.centerX();float centerY = original_rect.centerY();if(translate){matrix.postTranslate(width, 0);}if(roatateDegree == 90 || roatateDegree == 270){float scale = Math.max( (float) height / width, (float) width / height);matrix.postScale(scale, scale, centerX, centerY);matrix.postRotate(roatateDegree, centerX, centerY);}else if(roatateDegree == 180){matrix.postRotate(180, centerX, centerY);}return matrix;}}

 

ImageUtil.java

package com.example.camera2_yuv;import android.graphics.ImageFormat;
import android.media.Image;
import android.util.Log;import java.nio.ByteBuffer;/*** yuv420p:  yyyyyyyyuuvv* yuv420sp: yyyyyyyyuvuv* nv21:     yyyyyyyyvuvu*/public class ImageUtil {public static final int YUV420P = 0;public static final int YUV420SP = 1;public static final int NV21 = 2;private static final String TAG = "ImageUtil";/**** 此方法内注释以640*480为例* 未考虑CropRect的*/public static byte[] getBytesFromImageAsType(Image image, int type) {try {//获取源数据,如果是YUV格式的数据planes.length = 3//plane[i]里面的实际数据可能存在byte[].length <= capacity (缓冲区总大小)final Image.Plane[] planes = image.getPlanes();//数据有效宽度,一般的,图片width <= rowStride,这也是导致byte[].length <= capacity的原因//所以我们只取width部分int width = image.getWidth();int height = image.getHeight();//此处用来装填最终的YUV数据,需要1.5倍的图片大小,因为Y U V 比例为 4:1:1byte[] yuvBytes = new byte[width * height * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];//目标数组的装填到的位置int dstIndex = 0;//临时存储uv数据的byte uBytes[] = new byte[width * height / 4];byte vBytes[] = new byte[width * height / 4];int uIndex = 0;int vIndex = 0;int pixelsStride, rowStride;for (int i = 0; i < planes.length; i++) {pixelsStride = planes[i].getPixelStride();rowStride = planes[i].getRowStride();ByteBuffer buffer = planes[i].getBuffer();// 如果pixelsStride==2,一般的Y的buffer长度=640*480,UV的长度=640*480/2-1// 源数据的索引,y的数据是byte中连续的,u的数据是v向左移以为生成的,两者都是偶数位为有效数据byte[] bytes = new byte[buffer.capacity()];buffer.get(bytes);int srcIndex = 0;if (i == 0) {// 直接取出来所有Y的有效区域,也可以存储成一个临时的bytes,到下一步再copyfor (int j = 0; j < height; j++) {System.arraycopy(bytes, srcIndex, yuvBytes, dstIndex, width);srcIndex += rowStride;dstIndex += width;}} else if (i == 1) {// 根据pixelsStride取相应的数据for (int j = 0; j < height / 2; j++) {for (int k = 0; k < width / 2; k++) {uBytes[uIndex++] = bytes[srcIndex];srcIndex += pixelsStride;}if (pixelsStride == 2) {srcIndex += rowStride - width;} else if (pixelsStride == 1) {srcIndex += rowStride - width / 2;}}} else if (i == 2) {// 根据pixelsStride取相应的数据for (int j = 0; j < height / 2; j++) {for (int k = 0; k < width / 2; k++) {vBytes[vIndex++] = bytes[srcIndex];srcIndex += pixelsStride;}if (pixelsStride == 2) {srcIndex += rowStride - width;} else if (pixelsStride == 1) {srcIndex += rowStride - width / 2;}}}}image.close();// 根据要求的结果类型进行填充switch (type) {case YUV420P:System.arraycopy(uBytes, 0, yuvBytes, dstIndex, uBytes.length);System.arraycopy(vBytes, 0, yuvBytes, dstIndex + uBytes.length, vBytes.length);break;case YUV420SP:for (int i = 0; i < vBytes.length; i++) {yuvBytes[dstIndex++] = uBytes[i];yuvBytes[dstIndex++] = vBytes[i];}break;case NV21:for (int i = 0; i < vBytes.length; i++) {yuvBytes[dstIndex++] = vBytes[i];yuvBytes[dstIndex++] = uBytes[i];}break;}return yuvBytes;} catch (final Exception e) {if (image != null) {image.close();}Log.i(TAG, e.toString());}return null;}
}

 

 

 

 

  相关解决方案