From 2da2a289c3092d38c17fd79401a9e54006aa58e3 Mon Sep 17 00:00:00 2001 From: Guosong Zhou Date: Wed, 15 Jun 2016 10:31:43 +0000 Subject: PD#127061: camera: unify android_5 and android_6 camera hal branch Change-Id: I3ebc0b701aebdc2b36e58ad7eec43fa3fd6c40cf --- diff --git a/Android.mk b/Android.mk index d561cc2..b7b86c2 100755..100644 --- a/Android.mk +++ b/Android.mk @@ -176,7 +176,14 @@ ifeq ($(BOARD_HAVE_HW_JPEGENC),true) LOCAL_CFLAGS += -DAMLOGIC_HW_JPEGENC endif +LOCAL_KK=0 ifeq ($(GPU_TYPE),t83x) +LOCAL_KK:=1 +endif +ifeq ($(GPU_ARCH),midgard) +LOCAL_KK:=1 +endif +ifeq ($(LOCAL_KK),1) LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=1 else LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=0 diff --git a/v3/Android.mk b/v3/Android.mk index a64c131..335bbd3 100644 --- a/v3/Android.mk +++ b/v3/Android.mk @@ -20,6 +20,7 @@ include $(CLEAR_VARS) LOCAL_MODULE_RELATIVE_PATH := hw LOCAL_CFLAGS += -fno-short-enums -DQEMU_HARDWARE LOCAL_CFLAGS += -Wno-unused-parameter -Wno-missing-field-initializers +LOCAL_CFLAGS += -DPLATFORM_SDK_VERSION=$(PLATFORM_SDK_VERSION) ######################################################################################################## CAMHAL_GIT_VERSION="$(shell cd $(LOCAL_PATH);git log | grep commit -m 1 | cut -d' ' -f 2)" @@ -66,7 +67,14 @@ LOCAL_SHARED_LIBRARIES += \ LOCAL_STATIC_LIBRARIES := \ libyuv_static \ +LOCAL_KK=0 ifeq ($(GPU_TYPE),t83x) +LOCAL_KK:=1 +endif +ifeq ($(GPU_ARCH),midgard) +LOCAL_KK:=1 +endif +ifeq ($(LOCAL_KK),1) LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=1 else LOCAL_CFLAGS += -DMALI_AFBC_GRALLOC=0 diff --git a/v3/EmulatedFakeCamera3.cpp b/v3/EmulatedFakeCamera3.cpp index a23fa1d..2967de3 100644 --- a/v3/EmulatedFakeCamera3.cpp +++ b/v3/EmulatedFakeCamera3.cpp @@ -1712,8 +1712,10 @@ status_t EmulatedFakeCamera3::constructStaticInfo() { lensPosition[1] = 20; lensPosition[2] = 0; } +#if PLATFORM_SDK_VERSION <= 22 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/ sizeof(float)); +#endif static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1); @@ -2200,7 +2202,7 @@ status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { // OK for AUTO modes break; default: - ALOGE("%s: Emulator doesn't support AE mode %d", + ALOGVV("%s: Emulator doesn't support AE mode %d", __FUNCTION__, aeMode); return BAD_VALUE; } @@ -2776,7 +2778,7 @@ bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { return false; } - CAMHAL_LOGDB("Sensor done with readout for frame %d, captured at %lld ", + ALOGVV("Sensor done with readout for frame %d, captured at %lld ", mCurrentRequest.frameNumber, captureTime); // Check if we need to JPEG encode a buffer, and send it for async diff --git a/v3/fake-pipeline2/Sensor.cpp b/v3/fake-pipeline2/Sensor.cpp index d94da95..8554c01 100644 --- a/v3/fake-pipeline2/Sensor.cpp +++ b/v3/fake-pipeline2/Sensor.cpp @@ -194,11 +194,14 @@ Sensor::Sensor(): mFrameNumber(0), mCapturedBuffers(NULL), mListener(NULL), + mTemp_buffer(NULL), mExitSensorThread(false), mIoctlSupport(0), msupportrotate(0), mTimeOutCount(0), mWait(false), + mPre_width(0), + mPre_height(0), mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond) { @@ -326,6 +329,31 @@ status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool is } } + if (NULL == mTemp_buffer) { + mPre_width = vinfo->preview.format.fmt.pix.width; + mPre_height = vinfo->preview.format.fmt.pix.height; + DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height); + mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2]; + if (mTemp_buffer == NULL) { + ALOGE("first time allocate mTemp_buffer failed !"); + return -1; + } + } + + if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) { + if (mTemp_buffer) { + delete [] mTemp_buffer; + mTemp_buffer = NULL; + } + mPre_width = vinfo->preview.format.fmt.pix.width; + mPre_height = vinfo->preview.format.fmt.pix.height; + mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2]; + if (mTemp_buffer == NULL) { + ALOGE("allocate mTemp_buffer failed !"); + return -1; + } + } + return OK; } @@ -472,6 +500,12 @@ status_t Sensor::shutDown() { free(vinfo); vinfo = NULL; } + + if (mTemp_buffer) { + delete [] mTemp_buffer; + mTemp_buffer = NULL; + } + ALOGD("%s: Exit", __FUNCTION__); return res; } @@ -1202,9 +1236,11 @@ int Sensor::captureNewImage() { i, b.streamId, b.width, b.height, b.format, b.stride, b.buffer, b.img); switch (b.format) { +#if PLATFORM_SDK_VERSION <= 22 case HAL_PIXEL_FORMAT_RAW_SENSOR: captureRaw(b.img, gain, b.stride); break; +#endif case HAL_PIXEL_FORMAT_RGB_888: captureRGB(b.img, gain, b.stride); break; @@ -1227,7 +1263,6 @@ int Sensor::captureNewImage() { if (pixelfmt == V4L2_PIX_FMT_YVU420) { pixelfmt = HAL_PIXEL_FORMAT_YV12; } else if (pixelfmt == V4L2_PIX_FMT_NV21) { - DBG_LOGA(""); pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP; } else if (pixelfmt == V4L2_PIX_FMT_YUYV) { pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I; @@ -1583,7 +1618,7 @@ int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duratio duration[count+0] = (int64_t)(picSizes[size-4]); duration[count+1] = (int64_t)(picSizes[size-3]); duration[count+2] = (int64_t)(picSizes[size-2]); - duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver + duration[count+3] = (int64_t)((1.0/framerate) * 1000000000); j++; } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){ temp_rate = fival.discrete.denominator/fival.discrete.numerator; @@ -1592,7 +1627,7 @@ int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duratio duration[count+0] = (int64_t)picSizes[size-4]; duration[count+1] = (int64_t)picSizes[size-3]; duration[count+2] = (int64_t)picSizes[size-2]; - duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver + duration[count+3] = (int64_t)((1.0/framerate) * 1000000000); j++; } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){ temp_rate = fival.discrete.denominator/fival.discrete.numerator; @@ -1601,7 +1636,7 @@ int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duratio duration[count+0] = (int64_t)picSizes[size-4]; duration[count+1] = (int64_t)picSizes[size-3]; duration[count+2] = (int64_t)picSizes[size-2]; - duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver + duration[count+3] = (int64_t)((1.0/framerate) * 1000000000); j++; } } else { @@ -2154,21 +2189,43 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) { } mKernelBuffer = b.img; } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { - int width = vinfo->preview.format.fmt.pix.width; - int height = vinfo->preview.format.fmt.pix.height; - YUYVToNV21(src, b.img, width, height); - mKernelBuffer = b.img; + uint32_t width = vinfo->preview.format.fmt.pix.width; + uint32_t height = vinfo->preview.format.fmt.pix.height; + memset(mTemp_buffer, 0 , width * height * 3/2); + YUYVToNV21(src, mTemp_buffer, width, height); + if ((width == b.width) && (height == b.height)) { + memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2); + mKernelBuffer = b.img; + } else { + if ((b.height % 2) != 0) { + DBG_LOGB("%d , b.height = %d", __LINE__, b.height); + b.height = b.height - 1; + } + ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height); + mKernelBuffer = mTemp_buffer; + } } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { - int width = vinfo->preview.format.fmt.pix.width; - int height = vinfo->preview.format.fmt.pix.height; - if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img, - width, b.img + width * height, (width + 1) / 2, width, + uint32_t width = vinfo->preview.format.fmt.pix.width; + uint32_t height = vinfo->preview.format.fmt.pix.height; + memset(mTemp_buffer, 0 , width * height * 3/2); + if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer, + width, mTemp_buffer + width * height, (width + 1) / 2, width, height, width, height, libyuv::FOURCC_MJPG) != 0) { putback_frame(vinfo); - DBG_LOGA("Decode MJPEG frame failed\n"); + ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__); continue; } - mKernelBuffer = b.img; + if ((width == b.width) && (height == b.height)) { + memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2); + mKernelBuffer = b.img; + } else { + if ((b.height % 2) != 0) { + DBG_LOGB("%d, b.height = %d", __LINE__, b.height); + b.height = b.height - 1; + } + ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height); + mKernelBuffer = mTemp_buffer; + } } break; diff --git a/v3/fake-pipeline2/Sensor.h b/v3/fake-pipeline2/Sensor.h index b55c499..d84d951 100644 --- a/v3/fake-pipeline2/Sensor.h +++ b/v3/fake-pipeline2/Sensor.h @@ -325,8 +325,7 @@ class Sensor: private Thread, public virtual RefBase { uint8_t *mKernelBuffer; uintptr_t mKernelPhysAddr; uint32_t mFrameNumber; - int mRotateValue; - + int mRotateValue; // End of control parameters int mEV; @@ -340,6 +339,7 @@ class Sensor: private Thread, public virtual RefBase { SensorListener *mListener; // End of readout variables + uint8_t *mTemp_buffer; bool mExitSensorThread; // Time of sensor startup, used for simulation zero-time point @@ -359,6 +359,8 @@ class Sensor: private Thread, public virtual RefBase { unsigned int msupportrotate; uint32_t mTimeOutCount; bool mWait; + uint32_t mPre_width; + uint32_t mPre_height; /** * Inherited Thread virtual overrides, and members only used by the -- cgit