author | shanghai engineers <yuxi.sun@droid09.amlogic.com> | 2014-11-06 12:12:37 (GMT) |
---|---|---|
committer | shanghai engineers <yuxi.sun@droid09.amlogic.com> | 2014-11-07 06:02:25 (GMT) |
commit | 54dab6270b2c656909a92b2606dafd1ffda1a997 (patch) | |
tree | 43864456094ca4a4c6b0ac015b160e5fd9f2550d | |
parent | 0303373167fd9ae1040878b1b456fd53d79a00f6 (diff) | |
download | camera-54dab6270b2c656909a92b2606dafd1ffda1a997.zip camera-54dab6270b2c656909a92b2606dafd1ffda1a997.tar.gz camera-54dab6270b2c656909a92b2606dafd1ffda1a997.tar.bz2 |
Scale down frame from larger frame when there is multi stream
Currently only add NV21 and YV12 scale support
Change-Id: Ic03519c335a82a48f3ff0eb94cfe76ab1db952a2
Signed-off-by: shanghai engineers <yuxi.sun@droid09.amlogic.com>
-rwxr-xr-x | v3/Android.mk | 1 | ||||
-rwxr-xr-x | v3/EmulatedFakeCamera3.cpp | 7 | ||||
-rwxr-xr-x | v3/fake-pipeline2/Sensor.cpp | 196 | ||||
-rwxr-xr-x | v3/fake-pipeline2/Sensor.h | 4 |
4 files changed, 173 insertions, 35 deletions
diff --git a/v3/Android.mk b/v3/Android.mk index 56580d6..d735144 100755 --- a/v3/Android.mk +++ b/v3/Android.mk @@ -98,6 +98,7 @@ LOCAL_SRC_FILES := \ fake-pipeline2/Scene.cpp \ fake-pipeline2/Sensor.cpp \ fake-pipeline2/JpegCompressor.cpp \ + fake-pipeline2/NV12_resize.c\ EmulatedCamera3.cpp \ EmulatedFakeCamera3.cpp \ EmulatedFakeCamera3Info.cpp \ diff --git a/v3/EmulatedFakeCamera3.cpp b/v3/EmulatedFakeCamera3.cpp index 6cb97e7..a01a3e5 100755 --- a/v3/EmulatedFakeCamera3.cpp +++ b/v3/EmulatedFakeCamera3.cpp @@ -64,6 +64,7 @@ const int32_t EmulatedFakeCamera3::kAvailableFormats[] = { // These are handled by YCbCr_420_888 HAL_PIXEL_FORMAT_YV12, HAL_PIXEL_FORMAT_YCrCb_420_SP, + HAL_PIXEL_FORMAT_YCbCr_422_I, HAL_PIXEL_FORMAT_YCbCr_420_888 }; @@ -823,7 +824,7 @@ const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); static const int32_t aeTargetFpsRange[2] = { - 5, 20 + 5,15 }; settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); @@ -1095,6 +1096,8 @@ status_t EmulatedFakeCamera3::processCaptureRequest( destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc destBuf.buffer = srcBuf.buffer; + //ALOGI("%s, i:%d format for this usage: %d x %d, usage %x, format=%x, returned\n", + // __FUNCTION__, i, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format); if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { needJpeg = true; memset(&info,0,sizeof(struct ExifInfo)); @@ -1630,7 +1633,7 @@ status_t EmulatedFakeCamera3::constructStaticInfo() { static const int32_t availableTargetFpsRanges[] = { - 5, 20, + 5, 15, }; info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, availableTargetFpsRanges, diff --git a/v3/fake-pipeline2/Sensor.cpp b/v3/fake-pipeline2/Sensor.cpp index dcc9082..b63ebdb 100755 --- a/v3/fake-pipeline2/Sensor.cpp +++ b/v3/fake-pipeline2/Sensor.cpp @@ -33,6 +33,8 @@ #include <cstdlib> #include "system/camera_metadata.h" #include "libyuv.h" +#include "NV12_resize.h" +#include "libyuv/scale.h" #define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0]))) @@ -961,10 +963,10 @@ bool Sensor::threadLoop() { mNextCapturedBuffers->push_back(bAux); break; case HAL_PIXEL_FORMAT_YCrCb_420_SP: - captureNV21(b.img, gain, b.stride); + captureNV21(b, gain); break; case HAL_PIXEL_FORMAT_YV12: - captureYV12(b.img, gain, b.stride); + captureYV12(b, gain); break; case HAL_PIXEL_FORMAT_YCbCr_422_I: captureYUYV(b.img, gain, b.stride); @@ -1580,7 +1582,7 @@ void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height) } -void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) { +void Sensor::captureNV21(StreamBuffer b, uint32_t gain) { #if 0 float totalGain = gain/100.0 * kBaseGainFactor; // Using fixed-point math with 6 bits of fractional precision. @@ -1637,21 +1639,96 @@ void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) { uint8_t *src; if (mKernelBuffer) { src = mKernelBuffer; - if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) - memcpy(img, src, vinfo->preview.buf.length); - else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { + if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) { + ALOGI("Sclale NV21 frame down \n"); + //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/); + structConvImage input = {vinfo->preview.format.fmt.pix.width, + vinfo->preview.format.fmt.pix.height, + vinfo->preview.format.fmt.pix.width, + IC_FORMAT_YCbCr420_lp, + (mmByte *) src, + (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height, + 0}; + + structConvImage output = {b.width, + b.height, + b.width, + IC_FORMAT_YCbCr420_lp, + (mmByte *) b.img, + (mmByte *) b.img + b.width * b.height, + 0}; + + if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0)) + ALOGE("Sclale NV21 frame down failed!\n"); + } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - YUYVToNV21(src, img, width, height); - } - else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { + uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; + + if ( tmp_buffer == NULL) { + ALOGE("new buffer failed!\n"); + return; + } + + YUYVToNV21(src, tmp_buffer, width, height); + + structConvImage input = {width, + height, + width, + IC_FORMAT_YCbCr420_lp, + (mmByte *) tmp_buffer, + (mmByte *) tmp_buffer + width * height, + 0}; + + structConvImage output = {b.width, + b.height, + b.width, + IC_FORMAT_YCbCr420_lp, + (mmByte *) b.img, + (mmByte *) b.img + b.width * b.height, + 0}; + + if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0)) + ALOGE("Sclale NV21 frame down failed!\n"); + + delete [] tmp_buffer; + } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, img, - width, img + width * height, (width + 1) / 2, width, + + uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; + + if ( tmp_buffer == NULL) { + ALOGE("new buffer failed!\n"); + return; + } + + if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, tmp_buffer, + width, tmp_buffer + width * height, (width + 1) / 2, width, height, width, height, libyuv::FOURCC_MJPG) != 0) { DBG_LOGA("Decode MJPEG frame failed\n"); } + + structConvImage input = {width, + height, + width, + IC_FORMAT_YCbCr420_lp, + (mmByte *) tmp_buffer, + (mmByte *) tmp_buffer + width * height, + 0}; + + structConvImage output = {b.width, + b.height, + b.width, + IC_FORMAT_YCbCr420_lp, + (mmByte *) b.img, + (mmByte *) b.img + b.width * b.height, + 0}; + + if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0)) + ALOGE("Sclale NV21 frame down failed!\n"); + + delete [] tmp_buffer; } else { ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); } @@ -1663,17 +1740,17 @@ void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) { if (NULL == src) continue; if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) - memcpy(img, src, vinfo->preview.buf.length); + memcpy(b.img, src, vinfo->preview.buf.length); else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - YUYVToNV21(src, img, width, height); + YUYVToNV21(src, b.img, width, height); } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, img, - width, img + width * height, (width + 1) / 2, width, + if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img, + width, b.img + width * height, (width + 1) / 2, width, height, width, height, libyuv::FOURCC_MJPG) != 0) { DBG_LOGA("Decode MJPEG frame failed\n"); } @@ -1688,7 +1765,7 @@ void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) { ALOGVV("NV21 sensor image captured"); } -void Sensor::captureYV12(uint8_t *img, uint32_t gain, uint32_t stride) { +void Sensor::captureYV12(StreamBuffer b, uint32_t gain) { #if 0 float totalGain = gain/100.0 * kBaseGainFactor; // Using fixed-point math with 6 bits of fractional precision. @@ -1745,21 +1822,76 @@ void Sensor::captureYV12(uint8_t *img, uint32_t gain, uint32_t stride) { uint8_t *src; if (mKernelBuffer) { src = mKernelBuffer; - if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) - memcpy(img, src, vinfo->preview.buf.length); - else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { + if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) { + //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/); + ALOGI("Sclale YV12 frame down \n"); + int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - YUYVToYV12(src, img, width, height); - } - else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { + int ret = libyuv::I420Scale(src, width, + src + width * height, width / 2, + src + width * height + width * height / 4, width / 2, + width, height, + b.img, b.width, + b.img + b.width * b.height, b.width / 2, + b.img + b.width * b.height + b.width * b.height / 4, b.width / 2, + b.width, b.height, + libyuv::kFilterNone); + if (ret < 0) + ALOGE("Sclale YV12 frame down failed!\n"); + } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - if (ConvertToI420(src, vinfo->preview.buf.bytesused, img, width, img + width * height + width * height / 4, (width + 1) / 2, - img + width * height, (width + 1) / 2, 0, 0, width, height, + uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; + + if ( tmp_buffer == NULL) { + ALOGE("new buffer failed!\n"); + return; + } + + YUYVToYV12(src, tmp_buffer, width, height); + + int ret = libyuv::I420Scale(tmp_buffer, width, + tmp_buffer + width * height, width / 2, + tmp_buffer + width * height + width * height / 4, width / 2, + width, height, + b.img, b.width, + b.img + b.width * b.height, b.width / 2, + b.img + b.width * b.height + b.width * b.height / 4, b.width / 2, + b.width, b.height, + libyuv::kFilterNone); + if (ret < 0) + ALOGE("Sclale YV12 frame down failed!\n"); + delete [] tmp_buffer; + } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { + int width = vinfo->preview.format.fmt.pix.width; + int height = vinfo->preview.format.fmt.pix.height; + uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; + + if ( tmp_buffer == NULL) { + ALOGE("new buffer failed!\n"); + return; + } + + if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2, + tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height, width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) { DBG_LOGA("Decode MJPEG frame failed\n"); } + + int ret = libyuv::I420Scale(tmp_buffer, width, + tmp_buffer + width * height, width / 2, + tmp_buffer + width * height + width * height / 4, width / 2, + width, height, + b.img, b.width, + b.img + b.width * b.height, b.width / 2, + b.img + b.width * b.height + b.width * b.height / 4, b.width / 2, + b.width, b.height, + libyuv::kFilterNone); + if (ret < 0) + ALOGE("Sclale YV12 frame down failed!\n"); + + delete [] tmp_buffer; } else { ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); } @@ -1771,19 +1903,19 @@ void Sensor::captureYV12(uint8_t *img, uint32_t gain, uint32_t stride) { if (NULL == src) continue; if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) { - memcpy(img, src, vinfo->preview.buf.length); + memcpy(b.img, src, vinfo->preview.buf.length); } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - YUYVToYV12(src, img, width, height); + YUYVToYV12(src, b.img, width, height); } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; - if (ConvertToI420(src, vinfo->preview.buf.bytesused, img, width, img + width * height + width * height / 4, (width + 1) / 2, - img + width * height, (width + 1) / 2, 0, 0, width, height, + if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2, + b.img + width * height, (width + 1) / 2, 0, 0, width, height, width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) { DBG_LOGA("Decode MJPEG frame failed\n"); } @@ -1855,9 +1987,11 @@ void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) { uint8_t *src; if (mKernelBuffer) { src = mKernelBuffer; - if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) - memcpy(img, src, vinfo->preview.buf.length); - else + if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { + //TODO YUYV scale + //memcpy(img, src, vinfo->preview.buf.length); + + } else ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); return ; diff --git a/v3/fake-pipeline2/Sensor.h b/v3/fake-pipeline2/Sensor.h index 7a9f474..c67897f 100755 --- a/v3/fake-pipeline2/Sensor.h +++ b/v3/fake-pipeline2/Sensor.h @@ -329,8 +329,8 @@ class Sensor: private Thread, public virtual RefBase { void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride); void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride); void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride); - void captureNV21(uint8_t *img, uint32_t gain, uint32_t stride); - void captureYV12(uint8_t *img, uint32_t gain, uint32_t stride); + void captureNV21(StreamBuffer b, uint32_t gain); + void captureYV12(StreamBuffer b, uint32_t gain); void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride); void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height); void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height); |