From e4f4699031802497819691d7680dfa4a82ad8e8c Mon Sep 17 00:00:00 2001 From: Yuxi Sun Date: Sat, 27 May 2017 08:35:47 +0000 Subject: PD#130174 Using optimized MJPEG decode function Since AndroidN has swich libyuv and libjpeg-turbo branch to optimized branch, and cammera HAL can use optimized MJPEG decode function Change-Id: Icdaefe64ca63a844d1b9326e127ba4c9ff3442ac --- diff --git a/v3/fake-pipeline2/Sensor.cpp b/v3/fake-pipeline2/Sensor.cpp index 73c3c9a..950a5db 100644 --- a/v3/fake-pipeline2/Sensor.cpp +++ b/v3/fake-pipeline2/Sensor.cpp @@ -2009,38 +2009,6 @@ void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) { ALOGE("new buffer failed!\n"); return; } -#if ANDROID_PLATFORM_SDK_VERSION > 23 - uint8_t *vBuffer = new uint8_t[width * height / 4]; - if (vBuffer == NULL) - ALOGE("alloc temperary v buffer failed\n"); - uint8_t *uBuffer = new uint8_t[width * height / 4]; - if (uBuffer == NULL) - ALOGE("alloc temperary u buffer failed\n"); - - if (ConvertToI420(src, vinfo->picture.buf.bytesused, tmp_buffer, width, uBuffer, (width + 1) / 2, - vBuffer, (width + 1) / 2, 0, 0, width, height, - width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) { - DBG_LOGA("Decode MJPEG frame failed\n"); - putback_picture_frame(vinfo); - usleep(5000); - delete vBuffer; - delete uBuffer; - } else { - - uint8_t *pUVBuffer = tmp_buffer + width * height; - for (int i = 0; i < width * height / 4; i++) { - *pUVBuffer++ = *(vBuffer + i); - *pUVBuffer++ = *(uBuffer + i); - } - - delete vBuffer; - delete uBuffer; - nv21_to_rgb24(tmp_buffer,img,width,height); - if (tmp_buffer != NULL) - delete [] tmp_buffer; - break; - } -#else if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height, (width + 1) / 2, width, height, width, height, libyuv::FOURCC_MJPG) != 0) { @@ -2053,7 +2021,6 @@ void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) { delete [] tmp_buffer; break; } -#endif } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) { yuyv422_to_rgb24(src,img,width,height); @@ -2302,30 +2269,6 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) { uint32_t width = vinfo->preview.format.fmt.pix.width; uint32_t height = vinfo->preview.format.fmt.pix.height; memset(mTemp_buffer, 0 , width * height * 3/2); -#if ANDROID_PLATFORM_SDK_VERSION > 23 - uint8_t *vBuffer = new uint8_t[width * height / 4]; - if (vBuffer == NULL) - ALOGE("alloc temperary v buffer failed\n"); - uint8_t *uBuffer = new uint8_t[width * height / 4]; - if (uBuffer == NULL) - ALOGE("alloc temperary u buffer failed\n"); - - if (ConvertToI420(src, vinfo->preview.buf.bytesused, mTemp_buffer, width, uBuffer, (width + 1) / 2, - vBuffer, (width + 1) / 2, 0, 0, width, height, - width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) { - DBG_LOGA("Decode MJPEG frame failed\n"); - putback_frame(vinfo); - ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__); - continue; - } - uint8_t *pUVBuffer = mTemp_buffer + width * height; - for (int i = 0; i < width * height / 4; i++) { - *pUVBuffer++ = *(vBuffer + i); - *pUVBuffer++ = *(uBuffer + i); - } - delete vBuffer; - delete uBuffer; -#else if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer, width, mTemp_buffer + width * height, (width + 1) / 2, width, height, width, height, libyuv::FOURCC_MJPG) != 0) { @@ -2333,7 +2276,6 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) { ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__); continue; } -#endif if ((width == b.width) && (height == b.height)) { memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2); mKernelBuffer = b.img; -- cgit