一、CameraX的相关介绍说明
• CameraX 和 Lifecycle 结合在一起,方便开发者管理生命周期。与camera2相比简洁了许多
• CameraX 是基于 Camera2 API 实现的,兼容市面上大多数设备。
• 开发者可以通过扩展的形式使用和原生摄像头应用同样的功能(如:人像、夜间模式、滤镜、美颜)
• CameraX 开发最低API级别为21,AndroidStudio需要最低3.6的版本
二、CameraX的相关gradle
model的build.gradle文件中
android {
···
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
def camerax_version = "1.0.0-beta07"
// CameraX core library using camera2 implementation
implementation "androidx.camera:camera-camera2:$camerax_version"
// CameraX Lifecycle Library
implementation "androidx.camera:camera-lifecycle:$camerax_version"
// CameraX View class
implementation "androidx.camera:camera-view:1.0.0-alpha14"
}
相机是需要权限的,CameraX需要的相关动态权限
三、CameraX实现预览拍照的效果
<androidx.camera.view.CameraView
android:id="@+id/view_finder"
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
private CameraView mViewFinder;
protected void onCreate(Bundle savedInstanceState) {
...
mViewFinder = findViewById(R.id.view_finder);
mViewFinder.bindToLifecycle(this);
//按钮点击
mViewFinder.setCaptureMode(CameraView.CaptureMode.IMAGE);
@SuppressLint("RestrictedApi")
File file = new File(getContext().getExternalFilesDir(null) + File.separator + "/wt.jpg");
Log.e("TAG", file.toString());
ImageCapture.OutputFileOptions outputFileOptions = new ImageCapture.OutputFileOptions.Builder(file).build();
mViewFinder.takePicture(outputFileOptions, ContextCompat.getMainExecutor(getContext().getApplicationContext()),
new ImageCapture.OnImageSavedCallback() {
@Override
public void onImageSaved(@NonNull ImageCapture.OutputFileResults outputFileResults) {
Uri savedUri = outputFileResults.getSavedUri();
if(savedUri == null){
savedUri = Uri.fromFile(file);
}
onFileSaved(savedUri);
}
@Override
public void onError(@NonNull ImageCaptureException exception) {
}
});
}
//将前面保存的文件添加到媒体中
private void onFileSaved(Uri savedUri) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
sendBroadcast(new Intent(Camera.ACTION_NEW_PICTURE, savedUri));
}
String mimeTypeFromExtension = MimeTypeMap.getSingleton().getMimeTypeFromExtension(MimeTypeMap
.getFileExtensionFromUrl(savedUri.getPath()));
MediaScannerConnection.scanFile(getApplicationContext(),
new String[]{new File(savedUri.getPath()).getAbsolutePath()},
new String[]{mimeTypeFromExtension}, new MediaScannerConnection.OnScanCompletedListener() {
@Override
public void onScanCompleted(String path, Uri uri) {
Log.d("TAG", "Image capture scanned into media store: $uri" + uri);
}
});
}
四、对预览数据处理
<androidx.camera.view.PreviewView
android:id="@+id/viewFinder"
android:layout_width="match_parent"
android:layout_height="match_parent" />
private PreviewView mViewFinder;
protected void onCreate(Bundle savedInstanceState) {
...
mViewFinder = findViewById(R.id.viewFinder);
startCamera();
}
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
private void startCamera() {
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(new Runnable() {
@Override
public void run() {
try {
ProcessCameraProvider processCameraProvider = cameraProviderFuture.get();
Preview preview = new Preview.Builder()
.build();
preview.setSurfaceProvider(mViewFinder.createSurfaceProvider());
ImageCapture imageCapture = new ImageCapture.Builder()
.build();
OrientationEventListener orientationEventListener = new OrientationEventListener(PreActivity.this) {
@Override
public void onOrientationChanged(int orientation) {
int rotation;
if (orientation >= 45 && orientation < 135) {
rotation = Surface.ROTATION_270;
} else if (orientation >= 135 && orientation < 225) {
rotation = Surface.ROTATION_180;
} else if (orientation >= 225 && orientation < 315) {
rotation = Surface.ROTATION_90;
} else {
rotation = Surface.ROTATION_0;
}
imageCapture.setTargetRotation(rotation);
}
};
orientationEventListener.enable();
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
.setTargetAspectRatio(AspectRatio.RATIO_16_9)
.build();
ExecutorService executorService = Executors.newSingleThreadExecutor();
imageAnalysis.setAnalyzer(executorService, new LuminosityAnalyzer());
processCameraProvider.unbindAll();
processCameraProvider.bindToLifecycle(PreActivity.this, CameraSelector.DEFAULT_BACK_CAMERA, preview, imageCapture, imageAnalysis);
} catch (ExecutionException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}, ContextCompat.getMainExecutor(this));
}
private class LuminosityAnalyzer implements ImageAnalysis.Analyzer {
@Override
public void analyze(@NonNull ImageProxy image) {
@SuppressLint("UnsafeExperimentalUsageError")
Image data = image.getImage();
ImageProxy.PlaneProxy[] planes = image.getPlanes();
byte[] dataFromImage = new byte[image.getWidth() * image.getHeight() * 3 / 2];
ByteBuffer yBuffer = planes[0].getBuffer();
int yLen = image.getWidth() * image.getHeight();
yBuffer.get(dataFromImage, 0, yLen);
ByteBuffer uBuffer = planes[1].getBuffer();
ByteBuffer vBuffer = planes[2].getBuffer();
int pixelStride = planes[1].getPixelStride();
for (int i = 0; i <= uBuffer.remaining(); i += pixelStride) {
dataFromImage[yLen++] = uBuffer.get(i);
dataFromImage[yLen++] = vBuffer.get(i);
}
//dataFromImage为预览数据
image.close();
}
}