Ich benutze Android Kamera 2 API, um Kamera Vorschau als Byte-Array zu erhalten, so dass ich es später in Unity3d fortfahren könnte. Allerdings habe ich auf meinem Galaxy Note4 einige ernsthafte Performance-Probleme festgestellt. Ich habe zwischen 200-300ms (dann 13, 12 ms und wieder 200-300) Zeit zwischen Aufnahmeanforderungen.Kamera 2 Anfrage Leistungsprobleme
Aus meinen Experimenten sieht es so aus, als ob eine Capture-Anfrage sehr langsam ist oder der Image Loader Callback. Hast du eine Idee, wie ich das verbessern könnte? Code unten. Ich wäre dankbar für irgendwelche technischen Informationen, die ich vielleicht vermisse.
package com.aind.mfs.cam;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import com.unity3d.player.UnityPlayer;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
public class NativeCamera {
private static final int REQUEST_CAMERA_PERMISSION = 200;
private static final String TAG = "CameraJavaMFS";
public static NativeCamera instance;
public static String gameObjectTargetName;
private static final long MICRO_SECOND = 1000;
private static final long MILLI_SECOND = MICRO_SECOND * 1000;
private static final long ONE_SECOND = MILLI_SECOND * 1000;
private String _cameraId;
private CameraManager _cameraManager;
private CameraDevice _cameraDevice;
private CameraCaptureSession _cameraCaptureSessions;
private CaptureRequest.Builder _captureRequestBuilder;
private ImageReader _imageReader;
private int _targetWidth;
private int _targetHeight;
private int _prevWidth;
private int _prevHeight;
private long _sensorFrameDuration = ONE_SECOND/33;
private long _exposureTime = ONE_SECOND/33;
private HandlerThread _backgroundThread;
private Handler _backgroundHandler;
public byte[] bytes;
public int getPreviewSizeWidth() {
return _prevWidth;
}
public int getPreviewSizeHeight() {
return _prevHeight;
}
public static void init(String gameObjectName) {
gameObjectTargetName = gameObjectName;
instance = new NativeCamera();
}
public void startCamera(int width, int height) {
_targetWidth = width;
_targetHeight = height;
openCamera();
}
public void stopCamera() {
if (null != _cameraDevice) {
_cameraDevice.close();
_cameraDevice = null;
}
if (null != _imageReader) {
_imageReader.close();
_imageReader = null;
closeBackgoundThread();
}
}
private void openBackgroundThread() {
_backgroundThread = new HandlerThread("Camera2 background thread");
_backgroundThread.start();
_backgroundHandler = new Handler(_backgroundThread.getLooper());
}
private void closeBackgoundThread() {
_backgroundThread.quitSafely();
try {
_backgroundThread.join();
_backgroundThread = null;
_backgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void openCamera() {
_cameraManager = (CameraManager) App.context().getSystemService(Context.CAMERA_SERVICE);
try {
_cameraId = _cameraManager.getCameraIdList()[0];
_cameraManager.openCamera(_cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setupCamera() {
try {
CameraCharacteristics characteristics = _cameraManager.getCameraCharacteristics(_cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] previewSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
Size optimalSize = getOptimalSize(_targetWidth, _targetHeight, previewSizes);
_prevWidth = optimalSize.getWidth();
_prevHeight = optimalSize.getHeight();
_imageReader = ImageReader.newInstance(_prevWidth, _prevHeight, ImageFormat.YUV_420_888, 1);
_imageReader.setOnImageAvailableListener(onImageListener, null);
assert map != null;
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private Size getOptimalSize(int width, int height, Size[] sizes) {
if (_cameraDevice == null) {
Log.e(TAG, "CameraDevice is NULL");
return null;
}
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) width/height;
if (sizes == null) {
Log.e(TAG, "Camera preview sizes is NULL");
return null;
}
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetWidth = width;
for (Size size : sizes) {
double ratio = (double) size.getWidth()/size.getHeight();
Log.i("Unity", "RES: size=" + size.getWidth() + "/" + size.getHeight() + "/ Aspect Ratio: " + ratio + "target width: " + width + "target height: " + height);
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
continue;
if (Math.abs(size.getWidth() - targetWidth) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.getWidth() - targetWidth);
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.getWidth() - targetWidth) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.getWidth() - targetWidth);
}
}
}
Log.i("Unity", "optimal size=" + optimalSize.getWidth() + "/" + optimalSize.getHeight() + "/ Aspect Ratio: " + (double) optimalSize.getWidth()/optimalSize.getHeight());
return optimalSize;
}
private byte[] data0, data1, data2;
private Image image;
private ByteArrayOutputStream outputStream;
private YuvImage yuvImage;
private long previusTime;
private Image.Plane[] planes;
private final ImageReader.OnImageAvailableListener onImageListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
image = null;
outputStream = new ByteArrayOutputStream();
try {
image = reader.acquireLatestImage();
if (image == null) {
return;
}
ByteBuffer bufferY = image.getPlanes()[0].getBuffer();
data0 = new byte[bufferY.remaining()];
bufferY.get(data0);
ByteBuffer bufferU = image.getPlanes()[1].getBuffer();
data1 = new byte[bufferU.remaining()];
bufferU.get(data1);
ByteBuffer bufferV = image.getPlanes()[2].getBuffer();
data2 = new byte[bufferV.remaining()];
bufferV.get(data2);
try {
outputStream.write(data0);
outputStream.write(data1);
outputStream.write(data2);
} catch (IOException e) {
throw new RuntimeException(e);
}
yuvImage = new YuvImage(outputStream.toByteArray(), ImageFormat.NV21, image.getWidth(), image.getHeight(), null);
bytes = yuvImage.getYuvData();
} finally {
if (image != null) {
image.close();
}
UnityPlayer.UnitySendMessage(gameObjectTargetName, "GetBuffer", "");
}
}
};
private CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
NewFrameAvaiableHandler();
}
};
private void NewFrameAvaiableHandler() {
long startTime = previusTime;
previusTime = System.currentTimeMillis();
Log.e(TAG, "Image reader time elapsed: " + (previusTime - startTime));
}
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
_cameraDevice = camera;
setupCamera();
UnityPlayer.UnitySendMessage(gameObjectTargetName, "OnCameraReadyHandler", "");
createCameraPreview();
openBackgroundThread();
}
@Override
public void onDisconnected(CameraDevice camera) {
_cameraDevice.close();
}
@Override
public void onError(CameraDevice camera, int error) {
_cameraDevice.close();
_cameraDevice = null;
}
};
private void createCameraPreview() {
try {
Surface readerSurface = _imageReader.getSurface();
List surfaces = new ArrayList<>();
surfaces.add(readerSurface);
_captureRequestBuilder = _cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
_captureRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
_captureRequestBuilder.addTarget(readerSurface);
_cameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
if (null == _cameraDevice) {
return;
}
_cameraCaptureSessions = cameraCaptureSession;
configurePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void configurePreview() {
if (null == _cameraDevice) {
Log.e(TAG, "UpdatePreview error, CameraDevice is null");
}
_captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_OFF);
//_captureRequestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, _exposureTime);
//_captureRequestBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, _sensorFrameDuration);
try {
_cameraCaptureSessions.setRepeatingRequest(_captureRequestBuilder.build(), CaptureCallback, _backgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}
Der Code von onImageAvailable() führt zu viel Overhead. Beachten Sie, dass es tatsächlich falsch ist und auf einem Gerät fehlschlägt, das eine andere interne Pixelanordnung verwendet. –
Das Kommentieren dieses ganzen Abschnitts hat das Problem nicht gelöst. – Szejp
Welche Vorschaugröße haben Sie gewählt? –