Merge "Camera3: Refactor stream code to be DRY" into jb-mr2-dev
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index ecae3d3..4a34233 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -575,8 +575,8 @@
         return BAD_VALUE;
     }
     if (sessionId != mAudioSessionId) {
-        AudioSystem::releaseAudioSessionId(mAudioSessionId);
         AudioSystem::acquireAudioSessionId(sessionId);
+        AudioSystem::releaseAudioSessionId(mAudioSessionId);
         mAudioSessionId = sessionId;
     }
     return NO_ERROR;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 058852e..994d3f4 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -3131,7 +3131,8 @@
 
     int32_t render;
     if (mCodec->mNativeWindow != NULL
-            && msg->findInt32("render", &render) && render != 0) {
+            && msg->findInt32("render", &render) && render != 0
+            && (info->mData == NULL || info->mData->size() != 0)) {
         // The client wants this buffer to be rendered.
 
         status_t err;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 714da55..f412dc8 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1656,7 +1656,7 @@
         return -EACCES;
     }
 
-    if (render) {
+    if (render && (info->mData == NULL || info->mData->size() != 0)) {
         info->mNotify->setInt32("render", true);
 
         if (mSoftRenderer != NULL) {
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
index dd37283..5e88102 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
@@ -26,6 +26,7 @@
 #include "../CameraDeviceBase.h"
 #include "../Camera2Client.h"
 
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
 
 namespace android {
 namespace camera2 {
@@ -64,6 +65,14 @@
         return INVALID_OPERATION;
     }
 
+    // If possible, use the flexible YUV format
+    int32_t callbackFormat = params.previewFormat;
+    if (params.fastInfo.useFlexibleYuv &&
+            (params.previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP ||
+             params.previewFormat == HAL_PIXEL_FORMAT_YV12) ) {
+        callbackFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
+    }
+
     if (mCallbackConsumer == 0) {
         // Create CPU buffer queue endpoint
         mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
@@ -86,12 +95,12 @@
         }
         if (currentWidth != (uint32_t)params.previewWidth ||
                 currentHeight != (uint32_t)params.previewHeight ||
-                currentFormat != (uint32_t)params.previewFormat) {
+                currentFormat != (uint32_t)callbackFormat) {
             // Since size should only change while preview is not running,
             // assuming that all existing use of old callback stream is
             // completed.
-            ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
-                __FUNCTION__, mId, mCallbackStreamId);
+            ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
+                    "parameters changed", __FUNCTION__, mId, mCallbackStreamId);
             res = device->deleteStream(mCallbackStreamId);
             if (res != OK) {
                 ALOGE("%s: Camera %d: Unable to delete old output stream "
@@ -104,12 +113,12 @@
     }
 
     if (mCallbackStreamId == NO_STREAM) {
-        ALOGV("Creating callback stream: %d %d format 0x%x",
+        ALOGV("Creating callback stream: %d x %d, format 0x%x, API format 0x%x",
                 params.previewWidth, params.previewHeight,
-                params.previewFormat);
+                callbackFormat, params.previewFormat);
         res = device->createStream(mCallbackWindow,
                 params.previewWidth, params.previewHeight,
-                params.previewFormat, 0, &mCallbackStreamId);
+                callbackFormat, 0, &mCallbackStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
                     "%s (%d)", __FUNCTION__, mId,
@@ -220,6 +229,8 @@
     ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__,
             mId);
 
+    bool useFlexibleYuv = false;
+    int32_t previewFormat = 0;
     {
         SharedParameters::Lock l(client->getParameters());
 
@@ -246,10 +257,18 @@
             return OK;
         }
 
-        if (imgBuffer.format != l.mParameters.previewFormat) {
+        previewFormat = l.mParameters.previewFormat;
+        useFlexibleYuv = l.mParameters.fastInfo.useFlexibleYuv &&
+                (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP ||
+                 previewFormat == HAL_PIXEL_FORMAT_YV12);
+
+        int32_t expectedFormat = useFlexibleYuv ?
+                HAL_PIXEL_FORMAT_YCbCr_420_888 : previewFormat;
+
+        if (imgBuffer.format != expectedFormat) {
             ALOGE("%s: Camera %d: Unexpected format for callback: "
-                    "%x, expected %x", __FUNCTION__, mId,
-                    imgBuffer.format, l.mParameters.previewFormat);
+                    "0x%x, expected 0x%x", __FUNCTION__, mId,
+                    imgBuffer.format, expectedFormat);
             mCallbackConsumer->unlockBuffer(imgBuffer);
             return INVALID_OPERATION;
         }
@@ -262,9 +281,28 @@
         }
     }
 
+    uint32_t destYStride = 0;
+    uint32_t destCStride = 0;
+    if (useFlexibleYuv) {
+        if (previewFormat == HAL_PIXEL_FORMAT_YV12) {
+            // Strides must align to 16 for YV12
+            destYStride = ALIGN(imgBuffer.width, 16);
+            destCStride = ALIGN(destYStride / 2, 16);
+        } else {
+            // No padding for NV21
+            ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP,
+                    "Unexpected preview format 0x%x", previewFormat);
+            destYStride = imgBuffer.width;
+            destCStride = destYStride / 2;
+        }
+    } else {
+        destYStride = imgBuffer.stride;
+        // don't care about cStride
+    }
+
     size_t bufferSize = Camera2Client::calculateBufferSize(
             imgBuffer.width, imgBuffer.height,
-            imgBuffer.format, imgBuffer.stride);
+            previewFormat, destYStride);
     size_t currentBufferSize = (mCallbackHeap == 0) ?
             0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount);
     if (bufferSize != currentBufferSize) {
@@ -294,7 +332,7 @@
     mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount;
     mCallbackHeapFree--;
 
-    // TODO: Get rid of this memcpy by passing the gralloc queue all the way
+    // TODO: Get rid of this copy by passing the gralloc queue all the way
     // to app
 
     ssize_t offset;
@@ -303,7 +341,20 @@
             mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset,
                     &size);
     uint8_t *data = (uint8_t*)heap->getBase() + offset;
-    memcpy(data, imgBuffer.data, bufferSize);
+
+    if (!useFlexibleYuv) {
+        // Can just memcpy when HAL format matches API format
+        memcpy(data, imgBuffer.data, bufferSize);
+    } else {
+        res = convertFromFlexibleYuv(previewFormat, data, imgBuffer,
+                destYStride, destCStride);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Can't convert between 0x%x and 0x%x formats!",
+                    __FUNCTION__, mId, imgBuffer.format, previewFormat);
+            mCallbackConsumer->unlockBuffer(imgBuffer);
+            return BAD_VALUE;
+        }
+    }
 
     ALOGV("%s: Freeing buffer", __FUNCTION__);
     mCallbackConsumer->unlockBuffer(imgBuffer);
@@ -328,5 +379,98 @@
     return OK;
 }
 
+status_t CallbackProcessor::convertFromFlexibleYuv(int32_t previewFormat,
+        uint8_t *dst,
+        const CpuConsumer::LockedBuffer &src,
+        uint32_t dstYStride,
+        uint32_t dstCStride) const {
+
+    if (previewFormat != HAL_PIXEL_FORMAT_YCrCb_420_SP &&
+            previewFormat != HAL_PIXEL_FORMAT_YV12) {
+        ALOGE("%s: Camera %d: Unexpected preview format when using "
+                "flexible YUV: 0x%x", __FUNCTION__, mId, previewFormat);
+        return INVALID_OPERATION;
+    }
+
+    // Copy Y plane, adjusting for stride
+    const uint8_t *ySrc = src.data;
+    uint8_t *yDst = dst;
+    for (size_t row = 0; row < src.height; row++) {
+        memcpy(yDst, ySrc, src.width);
+        ySrc += src.stride;
+        yDst += dstYStride;
+    }
+
+    // Copy/swizzle chroma planes, 4:2:0 subsampling
+    const uint8_t *uSrc = src.dataCb;
+    const uint8_t *vSrc = src.dataCr;
+    size_t chromaHeight = src.height / 2;
+    size_t chromaWidth = src.width / 2;
+    ssize_t chromaGap = src.chromaStride -
+            (chromaWidth * src.chromaStep);
+    size_t dstChromaGap = dstCStride - chromaWidth;
+
+    if (previewFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
+        // Flexible YUV chroma to NV21 chroma
+        uint8_t *vuDst = yDst;
+        // Check for shortcuts
+        if (uSrc == vSrc + 1 && src.chromaStep == 2) {
+            // Source has semiplanar CrCb chroma layout, can copy by rows
+            for (size_t row = 0; row < chromaHeight; row++) {
+                memcpy(vuDst, uSrc, src.width);
+                vuDst += src.width;
+                uSrc += src.chromaStride;
+            }
+        } else {
+            // Generic copy, always works but not very efficient
+            for (size_t row = 0; row < chromaHeight; row++) {
+                for (size_t col = 0; col < chromaWidth; col++) {
+                    *(vuDst++) = *vSrc;
+                    *(vuDst++) = *uSrc;
+                    vSrc += src.chromaStep;
+                    uSrc += src.chromaStep;
+                }
+                vSrc += chromaGap;
+                uSrc += chromaGap;
+            }
+        }
+    } else {
+        // flexible YUV chroma to YV12 chroma
+        ALOG_ASSERT(previewFormat == HAL_PIXEL_FORMAT_YV12,
+                "Unexpected preview format 0x%x", previewFormat);
+        uint8_t *vDst = yDst;
+        uint8_t *uDst = yDst + chromaHeight * dstCStride;
+        if (src.chromaStep == 1) {
+            // Source has planar chroma layout, can copy by row
+            for (size_t row = 0; row < chromaHeight; row++) {
+                memcpy(vDst, vSrc, chromaWidth);
+                vDst += dstCStride;
+                vSrc += src.chromaStride;
+            }
+            for (size_t row = 0; row < chromaHeight; row++) {
+                memcpy(uDst, uSrc, chromaWidth);
+                uDst += dstCStride;
+                uSrc += src.chromaStride;
+            }
+        } else {
+            // Generic copy, always works but not very efficient
+            for (size_t row = 0; row < chromaHeight; row++) {
+                for (size_t col = 0; col < chromaWidth; col++) {
+                    *(vDst++) = *vSrc;
+                    *(uDst++) = *uSrc;
+                    vSrc += src.chromaStep;
+                    uSrc += src.chromaStep;
+                }
+                vSrc += chromaGap;
+                uSrc += chromaGap;
+                vDst += dstChromaGap;
+                uDst += dstChromaGap;
+            }
+        }
+    }
+
+    return OK;
+}
+
 }; // namespace camera2
 }; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/camera2/CallbackProcessor.h
index 1c40a03..d851a84 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.h
@@ -77,6 +77,13 @@
     status_t processNewCallback(sp<Camera2Client> &client);
     // Used when shutting down
     status_t discardNewCallback();
+
+    // Convert from flexible YUV to NV21 or YV12
+    status_t convertFromFlexibleYuv(int32_t previewFormat,
+            uint8_t *dst,
+            const CpuConsumer::LockedBuffer &src,
+            uint32_t dstYStride,
+            uint32_t dstCStride) const;
 };
 
 
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index b26cd09..49fc3d8 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -152,7 +152,16 @@
                 supportedPreviewFormats +=
                     CameraParameters::PIXEL_FORMAT_RGBA8888;
                 break;
+            case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                // Flexible YUV allows both YV12 and NV21
+                supportedPreviewFormats +=
+                    CameraParameters::PIXEL_FORMAT_YUV420P;
+                supportedPreviewFormats += ",";
+                supportedPreviewFormats +=
+                    CameraParameters::PIXEL_FORMAT_YUV420SP;
+                break;
             // Not advertizing JPEG, RAW_SENSOR, etc, for preview formats
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
             case HAL_PIXEL_FORMAT_RAW_SENSOR:
             case HAL_PIXEL_FORMAT_BLOB:
                 addComma = false;
@@ -863,6 +872,11 @@
         staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS);
     if (!availableFocalLengths.count) return NO_INIT;
 
+    camera_metadata_ro_entry_t availableFormats =
+        staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
+    if (!availableFormats.count) return NO_INIT;
+
+
     if (sceneModeOverrides.count > 0) {
         // sceneModeOverrides is defined to have 3 entries for each scene mode,
         // which are AE, AWB, and AF override modes the HAL wants for that scene
@@ -940,6 +954,17 @@
         }
     }
 
+    // Check if the HAL supports HAL_PIXEL_FORMAT_YCbCr_420_888
+    fastInfo.useFlexibleYuv = false;
+    for (size_t i = 0; i < availableFormats.count; i++) {
+        if (availableFormats.data.i32[i] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+            fastInfo.useFlexibleYuv = true;
+            break;
+        }
+    }
+    ALOGV("Camera %d: Flexible YUV %s supported",
+            cameraId, fastInfo.useFlexibleYuv ? "is" : "is not");
+
     return OK;
 }
 
@@ -1085,15 +1110,24 @@
         }
         camera_metadata_ro_entry_t availableFormats =
             staticInfo(ANDROID_SCALER_AVAILABLE_FORMATS);
-        for (i = 0; i < availableFormats.count; i++) {
-            if (availableFormats.data.i32[i] == validatedParams.previewFormat)
-                break;
-        }
-        if (i == availableFormats.count) {
-            ALOGE("%s: Requested preview format %s (0x%x) is not supported",
-                    __FUNCTION__, newParams.getPreviewFormat(),
-                    validatedParams.previewFormat);
-            return BAD_VALUE;
+        // If using flexible YUV, always support NV21/YV12. Otherwise, check
+        // HAL's list.
+        if (! (fastInfo.useFlexibleYuv &&
+                (validatedParams.previewFormat ==
+                        HAL_PIXEL_FORMAT_YCrCb_420_SP ||
+                 validatedParams.previewFormat ==
+                        HAL_PIXEL_FORMAT_YV12) ) ) {
+            // Not using flexible YUV format, so check explicitly
+            for (i = 0; i < availableFormats.count; i++) {
+                if (availableFormats.data.i32[i] ==
+                        validatedParams.previewFormat) break;
+            }
+            if (i == availableFormats.count) {
+                ALOGE("%s: Requested preview format %s (0x%x) is not supported",
+                        __FUNCTION__, newParams.getPreviewFormat(),
+                        validatedParams.previewFormat);
+                return BAD_VALUE;
+            }
         }
     }
 
@@ -1810,13 +1844,14 @@
             CropRegion::OUTPUT_PREVIEW     |
             CropRegion::OUTPUT_VIDEO       |
             CropRegion::OUTPUT_PICTURE    ));
-    int32_t reqCropRegion[3] = {
+    int32_t reqCropRegion[4] = {
         static_cast<int32_t>(crop.left),
         static_cast<int32_t>(crop.top),
-        static_cast<int32_t>(crop.width)
+        static_cast<int32_t>(crop.width),
+        static_cast<int32_t>(crop.height)
     };
     res = request->update(ANDROID_SCALER_CROP_REGION,
-            reqCropRegion, 3);
+            reqCropRegion, 4);
     if (res != OK) return res;
 
     uint8_t reqVstabMode = videoStabilization ?
@@ -2524,27 +2559,24 @@
      * stream cropping.
      */
     if (quirks.meteringCropRegion) {
-        /**
-         * All streams are the same in height, so narrower aspect ratios will
-         * get cropped on the sides.  First find the largest (widest) aspect
-         * ratio, then calculate the crop of the still FOV based on that.
-         */
-        float cropAspect = arrayAspect;
-        float aspects[] = {
-            stillAspect,
-            static_cast<float>(previewWidth) / previewHeight,
-            static_cast<float>(videoWidth) / videoHeight
-        };
-        for (size_t i = 0; i < sizeof(aspects)/sizeof(aspects[0]); i++) {
-            if (cropAspect < aspects[i]) cropAspect = aspects[i];
+        // Use max of preview and video as first crop
+        float previewAspect = static_cast<float>(previewWidth) / previewHeight;
+        float videoAspect = static_cast<float>(videoWidth) / videoHeight;
+        if (videoAspect > previewAspect) {
+            previewAspect = videoAspect;
         }
-        ALOGV("Widest crop aspect: %f", cropAspect);
-        // Horizontal crop of still is done based on fitting in the widest
-        // aspect ratio
-        horizCropFactor = stillAspect / cropAspect;
-        // Vertical crop is a function of the array aspect ratio and the
-        // widest aspect ratio.
-        vertCropFactor = arrayAspect / cropAspect;
+        // First crop sensor to preview aspect ratio
+        if (arrayAspect < previewAspect) {
+            vertCropFactor = arrayAspect / previewAspect;
+        } else {
+            horizCropFactor = previewAspect / arrayAspect;
+        }
+        // Second crop to still aspect ratio
+        if (stillAspect < previewAspect) {
+            horizCropFactor *= stillAspect / previewAspect;
+        } else {
+            vertCropFactor *= previewAspect / stillAspect;
+        }
     } else {
         /**
          * Crop are just a function of just the still/array relative aspect
diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/camera2/Parameters.h
index 6d85037..b994ec9 100644
--- a/services/camera/libcameraservice/camera2/Parameters.h
+++ b/services/camera/libcameraservice/camera2/Parameters.h
@@ -184,6 +184,7 @@
         };
         DefaultKeyedVector<uint8_t, OverrideModes> sceneModeOverrides;
         float minFocalLength;
+        bool useFlexibleYuv;
     } fastInfo;
 
     // Quirks information; these are short-lived flags to enable workarounds for
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
index bbcccaf..a2c97d4 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
@@ -264,11 +264,6 @@
         return res;
     }
 
-    res = setTransformLocked(0);
-    if (res != OK) {
-        return res;
-    }
-
     if (mMaxSize == 0) {
         // For buffers of known size
         res = native_window_set_buffers_geometry(mConsumer.get(),