Note: All info in my post only goes for Samsung Galaxy S7 device. I do not know how emulators and other devices behave.
In onImageAvailable I conver
Here is the code to convert the Image to NV21 byte[]. This will work when the imgYUV420 U and V planes have pixelStride=1 (as on emulator) or pixelStride=2 (as on Nexus):
private byte[] convertYUV420ToNV21_ALL_PLANES(Image imgYUV420) {
assert(imgYUV420.getFormat() == ImageFormat.YUV_420_888);
Log.d(TAG, "image: " + imgYUV420.getWidth() + "x" + imgYUV420.getHeight() + " " + imgYUV420.getFormat());
Log.d(TAG, "planes: " + imgYUV420.getPlanes().length);
for (int nplane = 0; nplane < imgYUV420.getPlanes().length; nplane++) {
Log.d(TAG, "plane[" + nplane + "]: length " + imgYUV420.getPlanes()[nplane].getBuffer().remaining() + ", strides: " + imgYUV420.getPlanes()[nplane].getPixelStride() + " " + imgYUV420.getPlanes()[nplane].getRowStride());
}
byte[] rez = new byte[imgYUV420.getWidth() * imgYUV420.getHeight() * 3 / 2];
ByteBuffer buffer0 = imgYUV420.getPlanes()[0].getBuffer();
ByteBuffer buffer1 = imgYUV420.getPlanes()[1].getBuffer();
ByteBuffer buffer2 = imgYUV420.getPlanes()[2].getBuffer();
int n = 0;
assert(imgYUV420.getPlanes()[0].getPixelStride() == 1);
for (int row = 0; row < imgYUV420.getHeight(); row++) {
for (int col = 0; col < imgYUV420.getWidth(); col++) {
rez[n++] = buffer0.get();
}
}
assert(imgYUV420.getPlanes()[2].getPixelStride() == imgYUV420.getPlanes()[1].getPixelStride());
int stride = imgYUV420.getPlanes()[1].getPixelStride();
for (int row = 0; row < imgYUV420.getHeight(); row += 2) {
for (int col = 0; col < imgYUV420.getWidth(); col += 2) {
rez[n++] = buffer1.get();
rez[n++] = buffer2.get();
for (int skip = 1; skip < stride; skip++) {
if (buffer1.remaining() > 0) {
buffer1.get();
}
if (buffer2.remaining() > 0) {
buffer2.get();
}
}
}
}
Log.w(TAG, "total: " + rez.length);
return rez;
}
optimized Java code is available here.
As you can see, it is very easy to change this code to produce a rotated image in a single step:
private byte[] rotateYUV420ToNV21(Image imgYUV420) {
Log.d(TAG, "image: " + imgYUV420.getWidth() + "x" + imgYUV420.getHeight() + " " + imgYUV420.getFormat());
Log.d(TAG, "planes: " + imgYUV420.getPlanes().length);
for (int nplane = 0; nplane < imgYUV420.getPlanes().length; nplane++) {
Log.d(TAG, "plane[" + nplane + "]: length " + imgYUV420.getPlanes()[nplane].getBuffer().remaining() + ", strides: " + imgYUV420.getPlanes()[nplane].getPixelStride() + " " + imgYUV420.getPlanes()[nplane].getRowStride());
}
byte[] rez = new byte[imgYUV420.getWidth() * imgYUV420.getHeight() * 3 / 2];
ByteBuffer buffer0 = imgYUV420.getPlanes()[0].getBuffer();
ByteBuffer buffer1 = imgYUV420.getPlanes()[1].getBuffer();
ByteBuffer buffer2 = imgYUV420.getPlanes()[2].getBuffer();
int width = imgYUV420.getHeight();
assert(imgYUV420.getPlanes()[0].getPixelStride() == 1);
for (int row = imgYUV420.getHeight()-1; row >=0; row--) {
for (int col = 0; col < imgYUV420.getWidth(); col++) {
rez[col*width+row] = buffer0.get();
}
}
int uv_offset = imgYUV420.getWidth()*imgYUV420.getHeight();
assert(imgYUV420.getPlanes()[2].getPixelStride() == imgYUV420.getPlanes()[1].getPixelStride());
int stride = imgYUV420.getPlanes()[1].getPixelStride();
for (int row = imgYUV420.getHeight() - 2; row >= 0; row -= 2) {
for (int col = 0; col < imgYUV420.getWidth(); col += 2) {
rez[uv_offset+col/2*width+row] = buffer1.get();
rez[uv_offset+col/2*width+row+1] = buffer2.get();
for (int skip = 1; skip < stride; skip++) {
if (buffer1.remaining() > 0) {
buffer1.get();
}
if (buffer2.remaining() > 0) {
buffer2.get();
}
}
}
}
Log.w(TAG, "total rotated: " + rez.length);
return rez;
}
I sincerely recommend the site http://rawpixels.net/ to see the actual structure of your raw images.
With OpenCV
and Android Camera API 2 this task is very fast and you don't need YUV420toNV21
Java conversion, and with OpenCV
this convertion is 4x more fast:
Java side:
//Starts a builtin camera with api camera 2
public void startCamera() {
CameraManager manager = (CameraManager) AppData.getAppContext().getSystemService(Context.CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
manager.openCamera(pickedCamera, cameraStateCallback, null);
// set image format on YUV
mImageReader = ImageReader.newInstance(mWidth,mHeight, ImageFormat.YUV_420_888, 4);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
//Listens for frames and send them to be processed
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] frameData = new byte[buffer.capacity()];
buffer.get(frameData);
// Native process (see below)
processAndRotateFrame(frameData);
image.close();
} catch (Exception e) {
Logger.e(TAG, "imageReader exception: "+e.getMessage());
} finally {
if (image != null) {
image.close();
}
}
}
};
Native side (NDK or Cmake):
JNIEXPORT jint JNICALL com_android_mvf_Utils_ProccessAndRotateFrame
(JNIEnv *env, jobject object, jint width, jint height, jbyteArray frame, jint rotation) {
// load data from JAVA side
jbyte *pFrameData = env->GetByteArrayElements(frame, 0);
// convert array to Mat, for example GRAY or COLOR
Mat mGray(height, width, cv::IMREAD_GRAYSCALE, (unsigned char *)pFrameData);
// rotate image
rotateMat(mGray, rotation);
int objects = your_function(env, mGray);
env->ReleaseByteArrayElements(frame, pFrameData, 0);
return objects;
}
void rotateMat(cv::Mat &matImage, int rotFlag) {
if (rotFlag != 0 && rotFlag != 360) {
if (rotFlag == 90) {
cv::transpose(matImage, matImage);
cv::flip(matImage, matImage, 1);
} else if (rotFlag == 270 || rotFlag == -90) {
cv::transpose(matImage, matImage);
cv::flip(matImage, matImage, 0);
} else if (rotFlag == 180) {
cv::flip(matImage, matImage, -1);
}
}
}