merge in jb-mr2-release history after reset to jb-mr2-dev
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index a06a8e1..76aa503 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -177,6 +177,8 @@
kFlagDequeueOutputPending = 32,
kFlagIsSecure = 64,
kFlagSawMediaServerDie = 128,
+ kFlagIsEncoder = 256,
+ kFlagGatherCodecSpecificData = 512,
};
struct BufferInfo {
@@ -244,6 +246,8 @@
status_t onSetParameters(const sp<AMessage> ¶ms);
+ status_t amendOutputFormatWithCodecSpecificData(const sp<ABuffer> &buffer);
+
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
};
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 6d952c3..058852e 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1470,24 +1470,47 @@
&format, sizeof(format));
}
+static const struct VideoCodingMapEntry {
+ const char *mMime;
+ OMX_VIDEO_CODINGTYPE mVideoCodingType;
+} kVideoCodingMapEntry[] = {
+ { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
+ { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
+ { MEDIA_MIMETYPE_VIDEO_VPX, OMX_VIDEO_CodingVPX },
+};
+
static status_t GetVideoCodingTypeFromMime(
const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
- if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
- *codingType = OMX_VIDEO_CodingAVC;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
- *codingType = OMX_VIDEO_CodingMPEG4;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
- *codingType = OMX_VIDEO_CodingH263;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
- *codingType = OMX_VIDEO_CodingMPEG2;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) {
- *codingType = OMX_VIDEO_CodingVPX;
- } else {
- *codingType = OMX_VIDEO_CodingUnused;
- return ERROR_UNSUPPORTED;
+ for (size_t i = 0;
+ i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+ ++i) {
+ if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) {
+ *codingType = kVideoCodingMapEntry[i].mVideoCodingType;
+ return OK;
+ }
}
- return OK;
+ *codingType = OMX_VIDEO_CodingUnused;
+
+ return ERROR_UNSUPPORTED;
+}
+
+static status_t GetMimeTypeForVideoCoding(
+ OMX_VIDEO_CODINGTYPE codingType, AString *mime) {
+ for (size_t i = 0;
+ i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+ ++i) {
+ if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) {
+ *mime = kVideoCodingMapEntry[i].mMime;
+ return OK;
+ }
+ }
+
+ mime->clear();
+
+ return ERROR_UNSUPPORTED;
}
status_t ACodec::setupVideoDecoder(
@@ -2227,49 +2250,61 @@
{
OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
- notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
- notify->setInt32("width", videoDef->nFrameWidth);
- notify->setInt32("height", videoDef->nFrameHeight);
- notify->setInt32("stride", videoDef->nStride);
- notify->setInt32("slice-height", videoDef->nSliceHeight);
- notify->setInt32("color-format", videoDef->eColorFormat);
-
- OMX_CONFIG_RECTTYPE rect;
- InitOMXParams(&rect);
- rect.nPortIndex = kPortIndexOutput;
-
- if (mOMX->getConfig(
- mNode, OMX_IndexConfigCommonOutputCrop,
- &rect, sizeof(rect)) != OK) {
- rect.nLeft = 0;
- rect.nTop = 0;
- rect.nWidth = videoDef->nFrameWidth;
- rect.nHeight = videoDef->nFrameHeight;
+ AString mime;
+ if (!mIsEncoder) {
+ notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+ } else if (GetMimeTypeForVideoCoding(
+ videoDef->eCompressionFormat, &mime) != OK) {
+ notify->setString("mime", "application/octet-stream");
+ } else {
+ notify->setString("mime", mime.c_str());
}
- CHECK_GE(rect.nLeft, 0);
- CHECK_GE(rect.nTop, 0);
- CHECK_GE(rect.nWidth, 0u);
- CHECK_GE(rect.nHeight, 0u);
- CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
- CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+ notify->setInt32("width", videoDef->nFrameWidth);
+ notify->setInt32("height", videoDef->nFrameHeight);
- notify->setRect(
- "crop",
- rect.nLeft,
- rect.nTop,
- rect.nLeft + rect.nWidth - 1,
- rect.nTop + rect.nHeight - 1);
+ if (!mIsEncoder) {
+ notify->setInt32("stride", videoDef->nStride);
+ notify->setInt32("slice-height", videoDef->nSliceHeight);
+ notify->setInt32("color-format", videoDef->eColorFormat);
- if (mNativeWindow != NULL) {
- android_native_rect_t crop;
- crop.left = rect.nLeft;
- crop.top = rect.nTop;
- crop.right = rect.nLeft + rect.nWidth;
- crop.bottom = rect.nTop + rect.nHeight;
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = kPortIndexOutput;
- CHECK_EQ(0, native_window_set_crop(
- mNativeWindow.get(), &crop));
+ if (mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
+
+ CHECK_GE(rect.nLeft, 0);
+ CHECK_GE(rect.nTop, 0);
+ CHECK_GE(rect.nWidth, 0u);
+ CHECK_GE(rect.nHeight, 0u);
+ CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+ CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+
+ notify->setRect(
+ "crop",
+ rect.nLeft,
+ rect.nTop,
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ if (mNativeWindow != NULL) {
+ android_native_rect_t crop;
+ crop.left = rect.nLeft;
+ crop.top = rect.nTop;
+ crop.right = rect.nLeft + rect.nWidth;
+ crop.bottom = rect.nTop + rect.nHeight;
+
+ CHECK_EQ(0, native_window_set_crop(
+ mNativeWindow.get(), &crop));
+ }
}
break;
}
@@ -2277,41 +2312,108 @@
case OMX_PortDomainAudio:
{
OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
- CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM);
- OMX_AUDIO_PARAM_PCMMODETYPE params;
- InitOMXParams(¶ms);
- params.nPortIndex = kPortIndexOutput;
+ switch (audioDef->eEncoding) {
+ case OMX_AUDIO_CodingPCM:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm,
- ¶ms, sizeof(params)),
- (status_t)OK);
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm,
+ ¶ms, sizeof(params)),
+ (status_t)OK);
- CHECK(params.nChannels == 1 || params.bInterleaved);
- CHECK_EQ(params.nBitPerSample, 16u);
- CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
- CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
+ CHECK(params.nChannels == 1 || params.bInterleaved);
+ CHECK_EQ(params.nBitPerSample, 16u);
+ CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+ CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
- notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
- notify->setInt32("channel-count", params.nChannels);
- notify->setInt32("sample-rate", params.nSamplingRate);
- if (mEncoderDelay + mEncoderPadding) {
- size_t frameSize = params.nChannels * sizeof(int16_t);
- if (mSkipCutBuffer != NULL) {
- size_t prevbufsize = mSkipCutBuffer->size();
- if (prevbufsize != 0) {
- ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize);
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSamplingRate);
+ if (mEncoderDelay + mEncoderPadding) {
+ size_t frameSize = params.nChannels * sizeof(int16_t);
+ if (mSkipCutBuffer != NULL) {
+ size_t prevbufsize = mSkipCutBuffer->size();
+ if (prevbufsize != 0) {
+ ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize);
+ }
+ }
+ mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize,
+ mEncoderPadding * frameSize);
}
+
+ if (mChannelMaskPresent) {
+ notify->setInt32("channel-mask", mChannelMask);
+ }
+ break;
}
- mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize,
- mEncoderPadding * frameSize);
- }
- if (mChannelMaskPresent) {
- notify->setInt32("channel-mask", mChannelMask);
- }
+ case OMX_AUDIO_CodingAAC:
+ {
+ OMX_AUDIO_PARAM_AACPROFILETYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac,
+ ¶ms, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAMR:
+ {
+ OMX_AUDIO_PARAM_AMRTYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAmr,
+ ¶ms, sizeof(params)),
+ (status_t)OK);
+
+ notify->setInt32("channel-count", 1);
+ if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
+ notify->setString(
+ "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
+
+ notify->setInt32("sample-rate", 16000);
+ } else {
+ notify->setString(
+ "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
+
+ notify->setInt32("sample-rate", 8000);
+ }
+ break;
+ }
+
+ case OMX_AUDIO_CodingFLAC:
+ {
+ OMX_AUDIO_PARAM_FLACTYPE params;
+ InitOMXParams(¶ms);
+ params.nPortIndex = kPortIndexOutput;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioFlac,
+ ¶ms, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
break;
}
@@ -2957,7 +3059,7 @@
break;
}
- if (!mCodec->mIsEncoder && !mCodec->mSentFormat) {
+ if (!mCodec->mSentFormat) {
mCodec->sendFormatChange();
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ae7bb17..714da55 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,10 +31,13 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/ACodec.h>
#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/NativeWindowWrapper.h>
+#include "include/avc_utils.h"
+
namespace android {
// static
@@ -741,8 +744,16 @@
}
mOutputFormat = msg;
- mFlags |= kFlagOutputFormatChanged;
- postActivityNotificationIfPossible();
+
+ if (mFlags & kFlagIsEncoder) {
+ // Before we announce the format change we should
+ // collect codec specific data and amend the output
+ // format as necessary.
+ mFlags |= kFlagGatherCodecSpecificData;
+ } else {
+ mFlags |= kFlagOutputFormatChanged;
+ postActivityNotificationIfPossible();
+ }
break;
}
@@ -812,6 +823,25 @@
buffer->meta()->setInt32("omxFlags", omxFlags);
+ if (mFlags & kFlagGatherCodecSpecificData) {
+ // This is the very first output buffer after a
+ // format change was signalled, it'll either contain
+ // the one piece of codec specific data we can expect
+ // or there won't be codec specific data.
+ if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ status_t err =
+ amendOutputFormatWithCodecSpecificData(buffer);
+
+ if (err != OK) {
+ ALOGE("Codec spit out malformed codec "
+ "specific data!");
+ }
+ }
+
+ mFlags &= ~kFlagGatherCodecSpecificData;
+ mFlags |= kFlagOutputFormatChanged;
+ }
+
if (mFlags & kFlagDequeueOutputPending) {
CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
@@ -955,6 +985,7 @@
if (flags & CONFIGURE_FLAG_ENCODE) {
format->setInt32("encoder", true);
+ mFlags |= kFlagIsEncoder;
}
extractCSD(format);
@@ -1413,6 +1444,8 @@
mFlags &= ~kFlagOutputFormatChanged;
mFlags &= ~kFlagOutputBuffersChanged;
mFlags &= ~kFlagStickyError;
+ mFlags &= ~kFlagIsEncoder;
+ mFlags &= ~kFlagGatherCodecSpecificData;
mActivityNotify.clear();
}
@@ -1720,4 +1753,45 @@
return OK;
}
+status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
+ const sp<ABuffer> &buffer) {
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+
+ if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+ // Codec specific data should be SPS and PPS in a single buffer,
+ // each prefixed by a startcode (0x00 0x00 0x00 0x01).
+ // We separate the two and put them into the output format
+ // under the keys "csd-0" and "csd-1".
+
+ unsigned csdIndex = 0;
+
+ const uint8_t *data = buffer->data();
+ size_t size = buffer->size();
+
+ const uint8_t *nalStart;
+ size_t nalSize;
+ while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+ sp<ABuffer> csd = new ABuffer(nalSize + 4);
+ memcpy(csd->data(), "\x00\x00\x00\x01", 4);
+ memcpy(csd->data() + 4, nalStart, nalSize);
+
+ mOutputFormat->setBuffer(
+ StringPrintf("csd-%u", csdIndex).c_str(), csd);
+
+ ++csdIndex;
+ }
+
+ if (csdIndex != 2) {
+ return ERROR_MALFORMED;
+ }
+ } else {
+ // For everything else we just stash the codec specific data into
+ // the output format as a single piece of csd under "csd-0".
+ mOutputFormat->setBuffer("csd-0", buffer);
+ }
+
+ return OK;
+}
+
} // namespace android
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 3c84703..3479553 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -25,10 +25,12 @@
camera2/JpegCompressor.cpp \
camera2/CaptureSequencer.cpp \
camera2/ProFrameProcessor.cpp \
+ camera2/ZslProcessor3.cpp \
camera3/Camera3Stream.cpp \
camera3/Camera3InputStream.cpp \
camera3/Camera3OutputStream.cpp \
- camera3/Camera3ZslStream.cpp
+ camera3/Camera3ZslStream.cpp \
+ gui/RingBufferConsumer.cpp \
LOCAL_SHARED_LIBRARIES:= \
libui \
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index eae7461..48f3606 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -28,6 +28,9 @@
#include "Camera2Device.h"
#include "Camera3Device.h"
+#include "camera2/ZslProcessor.h"
+#include "camera2/ZslProcessor3.h"
+
#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
@@ -51,12 +54,13 @@
int deviceVersion):
Camera2ClientBase(cameraService, cameraClient, clientPackageName,
cameraId, cameraFacing, clientPid, clientUid, servicePid),
- mParameters(cameraId, cameraFacing)
+ mParameters(cameraId, cameraFacing),
+ mDeviceVersion(deviceVersion)
{
ATRACE_CALL();
ALOGI("Camera %d: Opened", cameraId);
- switch (deviceVersion) {
+ switch (mDeviceVersion) {
case CAMERA_DEVICE_API_VERSION_2_0:
mDevice = new Camera2Device(cameraId);
break;
@@ -65,7 +69,7 @@
break;
default:
ALOGE("Camera %d: Unknown HAL device version %d",
- cameraId, deviceVersion);
+ cameraId, mDeviceVersion);
mDevice = NULL;
break;
}
@@ -114,10 +118,27 @@
mCameraId);
mJpegProcessor->run(threadName.string());
- mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
+ switch (mDeviceVersion) {
+ case CAMERA_DEVICE_API_VERSION_2_0: {
+ sp<ZslProcessor> zslProc =
+ new ZslProcessor(this, mCaptureSequencer);
+ mZslProcessor = zslProc;
+ mZslProcessorThread = zslProc;
+ break;
+ }
+ case CAMERA_DEVICE_API_VERSION_3_0:{
+ sp<ZslProcessor3> zslProc =
+ new ZslProcessor3(this, mCaptureSequencer);
+ mZslProcessor = zslProc;
+ mZslProcessorThread = zslProc;
+ break;
+ }
+ default:
+ break;
+ }
threadName = String8::format("C2-%d-ZslProc",
mCameraId);
- mZslProcessor->run(threadName.string());
+ mZslProcessorThread->run(threadName.string());
mCallbackProcessor = new CallbackProcessor(this);
threadName = String8::format("C2-%d-CallbkProc",
@@ -393,7 +414,7 @@
mFrameProcessor->requestExit();
mCaptureSequencer->requestExit();
mJpegProcessor->requestExit();
- mZslProcessor->requestExit();
+ mZslProcessorThread->requestExit();
mCallbackProcessor->requestExit();
ALOGV("Camera %d: Waiting for threads", mCameraId);
@@ -401,7 +422,7 @@
mFrameProcessor->join();
mCaptureSequencer->join();
mJpegProcessor->join();
- mZslProcessor->join();
+ mZslProcessorThread->join();
mCallbackProcessor->join();
ALOGV("Camera %d: Disconnecting device", mCameraId);
diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/Camera2Client.h
index 713fab3..af72ab2 100644
--- a/services/camera/libcameraservice/Camera2Client.h
+++ b/services/camera/libcameraservice/Camera2Client.h
@@ -23,7 +23,7 @@
#include "camera2/FrameProcessor.h"
#include "camera2/StreamingProcessor.h"
#include "camera2/JpegProcessor.h"
-#include "camera2/ZslProcessor.h"
+#include "camera2/ZslProcessorInterface.h"
#include "camera2/CaptureSequencer.h"
#include "camera2/CallbackProcessor.h"
#include "Camera2ClientBase.h"
@@ -154,6 +154,7 @@
void setPreviewCallbackFlagL(Parameters ¶ms, int flag);
status_t updateRequests(Parameters ¶ms);
+ int mDeviceVersion;
// Used with stream IDs
static const int NO_STREAM = -1;
@@ -173,7 +174,8 @@
sp<camera2::CaptureSequencer> mCaptureSequencer;
sp<camera2::JpegProcessor> mJpegProcessor;
- sp<camera2::ZslProcessor> mZslProcessor;
+ sp<camera2::ZslProcessorInterface> mZslProcessor;
+ sp<Thread> mZslProcessorThread;
/** Notification-related members */
diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/Camera3Device.cpp
index 1433108..5f87e8b 100644
--- a/services/camera/libcameraservice/Camera3Device.cpp
+++ b/services/camera/libcameraservice/Camera3Device.cpp
@@ -42,6 +42,7 @@
#include <utils/Timers.h>
#include "Camera3Device.h"
#include "camera3/Camera3OutputStream.h"
+#include "camera3/Camera3InputStream.h"
using namespace android::camera3;
@@ -392,6 +393,144 @@
return mRequestThread->waitUntilRequestProcessed(requestId, timeout);
}
+status_t Camera3Device::createInputStream(
+ uint32_t width, uint32_t height, int format, int *id) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res;
+ bool wasActive = false;
+
+ switch (mStatus) {
+ case STATUS_ERROR:
+ ALOGE("%s: Device has encountered a serious error", __FUNCTION__);
+ return INVALID_OPERATION;
+ case STATUS_UNINITIALIZED:
+ ALOGE("%s: Device not initialized", __FUNCTION__);
+ return INVALID_OPERATION;
+ case STATUS_IDLE:
+ // OK
+ break;
+ case STATUS_ACTIVE:
+ ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
+ mRequestThread->setPaused(true);
+ res = waitUntilDrainedLocked();
+ if (res != OK) {
+ ALOGE("%s: Can't pause captures to reconfigure streams!",
+ __FUNCTION__);
+ mStatus = STATUS_ERROR;
+ return res;
+ }
+ wasActive = true;
+ break;
+ default:
+ ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+ return INVALID_OPERATION;
+ }
+ assert(mStatus == STATUS_IDLE);
+
+ if (mInputStream != 0) {
+ ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ sp<Camera3InputStream> newStream = new Camera3InputStream(mNextStreamId,
+ width, height, format);
+
+ mInputStream = newStream;
+
+ *id = mNextStreamId++;
+
+ // Continue captures if active at start
+ if (wasActive) {
+ ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
+ res = configureStreamsLocked();
+ if (res != OK) {
+ ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
+ __FUNCTION__, mNextStreamId, strerror(-res), res);
+ return res;
+ }
+ mRequestThread->setPaused(false);
+ }
+
+ return OK;
+}
+
+
+status_t Camera3Device::createZslStream(
+ uint32_t width, uint32_t height,
+ int depth,
+ /*out*/
+ int *id,
+ sp<Camera3ZslStream>* zslStream) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res;
+ bool wasActive = false;
+
+ switch (mStatus) {
+ case STATUS_ERROR:
+ ALOGE("%s: Device has encountered a serious error", __FUNCTION__);
+ return INVALID_OPERATION;
+ case STATUS_UNINITIALIZED:
+ ALOGE("%s: Device not initialized", __FUNCTION__);
+ return INVALID_OPERATION;
+ case STATUS_IDLE:
+ // OK
+ break;
+ case STATUS_ACTIVE:
+ ALOGV("%s: Stopping activity to reconfigure streams", __FUNCTION__);
+ mRequestThread->setPaused(true);
+ res = waitUntilDrainedLocked();
+ if (res != OK) {
+ ALOGE("%s: Can't pause captures to reconfigure streams!",
+ __FUNCTION__);
+ mStatus = STATUS_ERROR;
+ return res;
+ }
+ wasActive = true;
+ break;
+ default:
+ ALOGE("%s: Unexpected status: %d", __FUNCTION__, mStatus);
+ return INVALID_OPERATION;
+ }
+ assert(mStatus == STATUS_IDLE);
+
+ if (mInputStream != 0) {
+ ALOGE("%s: Cannot create more than 1 input stream", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ sp<Camera3ZslStream> newStream = new Camera3ZslStream(mNextStreamId,
+ width, height, depth);
+
+ res = mOutputStreams.add(mNextStreamId, newStream);
+ if (res < 0) {
+ ALOGE("%s: Can't add new stream to set: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ mInputStream = newStream;
+
+ *id = mNextStreamId++;
+ *zslStream = newStream;
+
+ // Continue captures if active at start
+ if (wasActive) {
+ ALOGV("%s: Restarting activity to reconfigure streams", __FUNCTION__);
+ res = configureStreamsLocked();
+ if (res != OK) {
+ ALOGE("%s: Can't reconfigure device for new stream %d: %s (%d)",
+ __FUNCTION__, mNextStreamId, strerror(-res), res);
+ return res;
+ }
+ mRequestThread->setPaused(false);
+ }
+
+ return OK;
+}
+
status_t Camera3Device::createStream(sp<ANativeWindow> consumer,
uint32_t width, uint32_t height, int format, size_t size, int *id) {
ATRACE_CALL();
@@ -545,7 +684,7 @@
return INVALID_OPERATION;
}
- sp<Camera3Stream> deletedStream;
+ sp<Camera3StreamInterface> deletedStream;
if (mInputStream != NULL && id == mInputStream->getId()) {
deletedStream = mInputStream;
mInputStream.clear();
@@ -836,7 +975,8 @@
streams.data.u8[i]);
return NULL;
}
- sp<Camera3OutputStream> stream = mOutputStreams.editValueAt(idx);
+ sp<Camera3OutputStreamInterface> stream =
+ mOutputStreams.editValueAt(idx);
// Lazy completion of stream configuration (allocation/registration)
// on first use
@@ -885,6 +1025,15 @@
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
+
+ // Don't configure bidi streams twice, nor add them twice to the list
+ if (mOutputStreams[i].get() ==
+ static_cast<Camera3StreamInterface*>(mInputStream.get())) {
+
+ config.num_streams--;
+ continue;
+ }
+
camera3_stream_t *outputStream;
outputStream = mOutputStreams.editValueAt(i)->startConfiguration();
if (outputStream == NULL) {
@@ -1471,7 +1620,7 @@
if (nextRequest->mInputStream != NULL) {
request.input_buffer = &inputBuffer;
- res = nextRequest->mInputStream->getBuffer(&inputBuffer);
+ res = nextRequest->mInputStream->getInputBuffer(&inputBuffer);
if (res != OK) {
SET_ERR("RequestThread: Can't get input buffer, skipping request:"
" %s (%d)", strerror(-res), res);
@@ -1558,6 +1707,23 @@
mLatestRequestSignal.signal();
}
+ // Return input buffer back to framework
+ if (request.input_buffer != NULL) {
+ Camera3Stream *stream =
+ Camera3Stream::cast(request.input_buffer->stream);
+ res = stream->returnInputBuffer(*(request.input_buffer));
+ // Note: stream may be deallocated at this point, if this buffer was the
+ // last reference to it.
+ if (res != OK) {
+ ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
+ " its stream:%s (%d)", __FUNCTION__,
+ request.frame_number, strerror(-res), res);
+ // TODO: Report error upstream
+ }
+ }
+
+
+
return true;
}
@@ -1571,7 +1737,7 @@
}
if (request.input_buffer != NULL) {
request.input_buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
- nextRequest->mInputStream->returnBuffer(*(request.input_buffer), 0);
+ nextRequest->mInputStream->returnInputBuffer(*(request.input_buffer));
}
for (size_t i = 0; i < request.num_output_buffers; i++) {
outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/Camera3Device.h
index 6cad08e..7a8c22a 100644
--- a/services/camera/libcameraservice/Camera3Device.h
+++ b/services/camera/libcameraservice/Camera3Device.h
@@ -26,6 +26,7 @@
#include "CameraDeviceBase.h"
#include "camera3/Camera3Stream.h"
#include "camera3/Camera3OutputStream.h"
+#include "camera3/Camera3ZslStream.h"
#include "hardware/camera3.h"
@@ -82,6 +83,15 @@
virtual status_t createStream(sp<ANativeWindow> consumer,
uint32_t width, uint32_t height, int format, size_t size,
int *id);
+ virtual status_t createInputStream(
+ uint32_t width, uint32_t height, int format,
+ int *id);
+ virtual status_t createZslStream(
+ uint32_t width, uint32_t height,
+ int depth,
+ /*out*/
+ int *id,
+ sp<camera3::Camera3ZslStream>* zslStream);
virtual status_t createReprocessStreamFromStream(int outputId, int *id);
virtual status_t getStreamInfo(int id,
@@ -133,14 +143,15 @@
String8 mErrorCause;
// Mapping of stream IDs to stream instances
- typedef KeyedVector<int, sp<camera3::Camera3OutputStream> > StreamSet;
+ typedef KeyedVector<int, sp<camera3::Camera3OutputStreamInterface> >
+ StreamSet;
StreamSet mOutputStreams;
sp<camera3::Camera3Stream> mInputStream;
int mNextStreamId;
// Need to hold on to stream references until configure completes.
- Vector<sp<camera3::Camera3Stream> > mDeletedStreams;
+ Vector<sp<camera3::Camera3StreamInterface> > mDeletedStreams;
/**** End scope for mLock ****/
@@ -148,7 +159,8 @@
public:
CameraMetadata mSettings;
sp<camera3::Camera3Stream> mInputStream;
- Vector<sp<camera3::Camera3Stream> > mOutputStreams;
+ Vector<sp<camera3::Camera3OutputStreamInterface> >
+ mOutputStreams;
};
typedef List<sp<CaptureRequest> > RequestList;
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
index ee03329..266e516 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
@@ -27,6 +27,7 @@
#include "../Camera2Device.h"
#include "../Camera2Client.h"
#include "Parameters.h"
+#include "ZslProcessorInterface.h"
namespace android {
namespace camera2 {
@@ -54,7 +55,7 @@
ALOGV("%s: Exit", __FUNCTION__);
}
-void CaptureSequencer::setZslProcessor(wp<ZslProcessor> processor) {
+void CaptureSequencer::setZslProcessor(wp<ZslProcessorInterface> processor) {
Mutex::Autolock l(mInputMutex);
mZslProcessor = processor;
}
@@ -265,8 +266,10 @@
res = INVALID_OPERATION;
}
}
- sp<ZslProcessor> processor = mZslProcessor.promote();
+ sp<ZslProcessorInterface> processor = mZslProcessor.promote();
if (processor != 0) {
+ ALOGV("%s: Memory optimization, clearing ZSL queue",
+ __FUNCTION__);
processor->clearZslQueue();
}
@@ -324,7 +327,7 @@
sp<Camera2Client> &client) {
ALOGV("%s", __FUNCTION__);
status_t res;
- sp<ZslProcessor> processor = mZslProcessor.promote();
+ sp<ZslProcessorInterface> processor = mZslProcessor.promote();
if (processor == 0) {
ALOGE("%s: No ZSL queue to use!", __FUNCTION__);
return DONE;
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h
index 7db8007..76750aa 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.h
@@ -33,7 +33,7 @@
namespace camera2 {
-class ZslProcessor;
+class ZslProcessorInterface;
class BurstCapture;
/**
@@ -48,7 +48,7 @@
~CaptureSequencer();
// Get reference to the ZslProcessor, which holds the ZSL buffers and frames
- void setZslProcessor(wp<ZslProcessor> processor);
+ void setZslProcessor(wp<ZslProcessorInterface> processor);
// Begin still image capture
status_t startCapture(int msgType);
@@ -105,7 +105,7 @@
static const int kMaxTimeoutsForCaptureEnd = 40; // 4 sec
wp<Camera2Client> mClient;
- wp<ZslProcessor> mZslProcessor;
+ wp<ZslProcessorInterface> mZslProcessor;
sp<BurstCapture> mBurstCapture;
enum CaptureState {
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
index 2c12fb0..94059cd 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
@@ -248,11 +248,6 @@
return mZslStreamId;
}
-int ZslProcessor::getReprocessStreamId() const {
- Mutex::Autolock l(mInputMutex);
- return mZslReprocessStreamId;
-}
-
status_t ZslProcessor::pushToReprocess(int32_t requestId) {
ALOGV("%s: Send in reprocess request with id %d",
__FUNCTION__, requestId);
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h
index ee3bcd6..27b597e 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.h
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.h
@@ -28,6 +28,7 @@
#include "camera/CameraMetadata.h"
#include "Camera2Heap.h"
#include "../CameraDeviceBase.h"
+#include "ZslProcessorInterface.h"
namespace android {
@@ -44,7 +45,8 @@
virtual public Thread,
virtual public BufferItemConsumer::FrameAvailableListener,
virtual public FrameProcessor::FilteredListener,
- virtual public CameraDeviceBase::BufferReleasedListener {
+ virtual public CameraDeviceBase::BufferReleasedListener,
+ public ZslProcessorInterface {
public:
ZslProcessor(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
~ZslProcessor();
@@ -56,10 +58,15 @@
virtual void onBufferReleased(buffer_handle_t *handle);
+ /**
+ ****************************************
+ * ZslProcessorInterface implementation *
+ ****************************************
+ */
+
status_t updateStream(const Parameters ¶ms);
status_t deleteStream();
int getStreamId() const;
- int getReprocessStreamId() const;
status_t pushToReprocess(int32_t requestId);
status_t clearZslQueue();
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp
new file mode 100644
index 0000000..88bcefb
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor3.cpp
@@ -0,0 +1,442 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2-ZslProcessor3"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "ZslProcessor3.h"
+#include <gui/Surface.h>
+#include "../CameraDeviceBase.h"
+#include "../Camera3Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+ZslProcessor3::ZslProcessor3(
+ sp<Camera2Client> client,
+ wp<CaptureSequencer> sequencer):
+ Thread(false),
+ mState(RUNNING),
+ mClient(client),
+ mSequencer(sequencer),
+ mId(client->getCameraId()),
+ mZslStreamId(NO_STREAM),
+ mFrameListHead(0),
+ mZslQueueHead(0),
+ mZslQueueTail(0) {
+ mZslQueue.insertAt(0, kZslBufferDepth);
+ mFrameList.insertAt(0, kFrameListDepth);
+ sp<CaptureSequencer> captureSequencer = mSequencer.promote();
+ if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
+}
+
+ZslProcessor3::~ZslProcessor3() {
+ ALOGV("%s: Exit", __FUNCTION__);
+ deleteStream();
+}
+
+void ZslProcessor3::onFrameAvailable(int32_t /*frameId*/,
+ const CameraMetadata &frame) {
+ Mutex::Autolock l(mInputMutex);
+ camera_metadata_ro_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ nsecs_t timestamp = entry.data.i64[0];
+ (void)timestamp;
+ ALOGVV("Got preview metadata for timestamp %lld", timestamp);
+
+ if (mState != RUNNING) return;
+
+ mFrameList.editItemAt(mFrameListHead) = frame;
+ mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
+}
+
+status_t ZslProcessor3::updateStream(const Parameters ¶ms) {
+ ATRACE_CALL();
+ ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) {
+ ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+ sp<Camera3Device> device =
+ static_cast<Camera3Device*>(client->getCameraDevice().get());
+ if (device == 0) {
+ ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ if (mZslStreamId != NO_STREAM) {
+ // Check if stream parameters have to change
+ uint32_t currentWidth, currentHeight;
+ res = device->getStreamInfo(mZslStreamId,
+ ¤tWidth, ¤tHeight, 0);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Error querying capture output stream info: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
+ currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
+ ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
+ "dimensions changed",
+ __FUNCTION__, client->getCameraId(), mZslStreamId);
+ res = device->deleteStream(mZslStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to delete old output stream "
+ "for ZSL: %s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ return res;
+ }
+ mZslStreamId = NO_STREAM;
+ }
+ }
+
+ if (mZslStreamId == NO_STREAM) {
+ // Create stream for HAL production
+ // TODO: Sort out better way to select resolution for ZSL
+
+ // Note that format specified internally in Camera3ZslStream
+ res = device->createZslStream(
+ params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
+ kZslBufferDepth,
+ &mZslStreamId,
+ &mZslStream);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Can't create ZSL stream: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+ client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
+ Camera2Client::kPreviewRequestIdEnd,
+ this);
+
+ return OK;
+}
+
+status_t ZslProcessor3::deleteStream() {
+ ATRACE_CALL();
+ status_t res;
+
+ Mutex::Autolock l(mInputMutex);
+
+ if (mZslStreamId != NO_STREAM) {
+ sp<Camera2Client> client = mClient.promote();
+ if (client == 0) {
+ ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ sp<Camera3Device> device =
+ reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
+ if (device == 0) {
+ ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ res = device->deleteStream(mZslStreamId);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
+ "%s (%d)", __FUNCTION__, client->getCameraId(),
+ mZslStreamId, strerror(-res), res);
+ return res;
+ }
+
+ mZslStreamId = NO_STREAM;
+ }
+ return OK;
+}
+
+int ZslProcessor3::getStreamId() const {
+ Mutex::Autolock l(mInputMutex);
+ return mZslStreamId;
+}
+
+status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
+ ALOGV("%s: Send in reprocess request with id %d",
+ __FUNCTION__, requestId);
+ Mutex::Autolock l(mInputMutex);
+ status_t res;
+ sp<Camera2Client> client = mClient.promote();
+
+ if (client == 0) {
+ ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ IF_ALOGV() {
+ dumpZslQueue(-1);
+ }
+
+ size_t metadataIdx;
+ nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
+
+ if (candidateTimestamp == -1) {
+ ALOGE("%s: Could not find good candidate for ZSL reprocessing",
+ __FUNCTION__);
+ return NOT_ENOUGH_DATA;
+ }
+
+ res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp,
+ /*actualTimestamp*/NULL);
+
+ if (res == mZslStream->NO_BUFFER_AVAILABLE) {
+ ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
+ return NOT_ENOUGH_DATA;
+ } else if (res != OK) {
+ ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ {
+ CameraMetadata request = mFrameList[metadataIdx];
+
+ // Verify that the frame is reasonable for reprocessing
+
+ camera_metadata_entry_t entry;
+ entry = request.find(ANDROID_CONTROL_AE_STATE);
+ if (entry.count == 0) {
+ ALOGE("%s: ZSL queue frame has no AE state field!",
+ __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
+ entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
+ ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
+ __FUNCTION__, entry.data.u8[0]);
+ return NOT_ENOUGH_DATA;
+ }
+
+ uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
+ res = request.update(ANDROID_REQUEST_TYPE,
+ &requestType, 1);
+ uint8_t inputStreams[1] =
+ { static_cast<uint8_t>(mZslStreamId) };
+ if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
+ inputStreams, 1);
+ // TODO: Shouldn't we also update the latest preview frame?
+ uint8_t outputStreams[1] =
+ { static_cast<uint8_t>(client->getCaptureStreamId()) };
+ if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+ outputStreams, 1);
+ res = request.update(ANDROID_REQUEST_ID,
+ &requestId, 1);
+
+ if (res != OK ) {
+ ALOGE("%s: Unable to update frame to a reprocess request",
+ __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ res = client->stopStream();
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
+ "%s (%d)",
+ __FUNCTION__, client->getCameraId(), strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+
+ // Update JPEG settings
+ {
+ SharedParameters::Lock l(client->getParameters());
+ res = l.mParameters.updateRequestJpeg(&request);
+ if (res != OK) {
+ ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
+ "capture request: %s (%d)", __FUNCTION__,
+ client->getCameraId(),
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ mLatestCapturedRequest = request;
+ res = client->getCameraDevice()->capture(request);
+ if (res != OK ) {
+ ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
+ " (%d)", __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ mState = LOCKED;
+ }
+
+ return OK;
+}
+
+status_t ZslProcessor3::clearZslQueue() {
+ Mutex::Autolock l(mInputMutex);
+ // If in middle of capture, can't clear out queue
+ if (mState == LOCKED) return OK;
+
+ return clearZslQueueLocked();
+}
+
+status_t ZslProcessor3::clearZslQueueLocked() {
+ return mZslStream->clearInputRingBuffer();
+}
+
+void ZslProcessor3::dump(int fd, const Vector<String16>& /*args*/) const {
+ Mutex::Autolock l(mInputMutex);
+ if (!mLatestCapturedRequest.isEmpty()) {
+ String8 result(" Latest ZSL capture request:\n");
+ write(fd, result.string(), result.size());
+ mLatestCapturedRequest.dump(fd, 2, 6);
+ } else {
+ String8 result(" Latest ZSL capture request: none yet\n");
+ write(fd, result.string(), result.size());
+ }
+ dumpZslQueue(fd);
+}
+
+bool ZslProcessor3::threadLoop() {
+ // TODO: remove dependency on thread
+ return true;
+}
+
+void ZslProcessor3::dumpZslQueue(int fd) const {
+ String8 header("ZSL queue contents:");
+ String8 indent(" ");
+ ALOGV("%s", header.string());
+ if (fd != -1) {
+ header = indent + header + "\n";
+ write(fd, header.string(), header.size());
+ }
+ for (size_t i = 0; i < mZslQueue.size(); i++) {
+ const ZslPair &queueEntry = mZslQueue[i];
+ nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
+ camera_metadata_ro_entry_t entry;
+ nsecs_t frameTimestamp = 0;
+ int frameAeState = -1;
+ if (!queueEntry.frame.isEmpty()) {
+ entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count > 0) frameTimestamp = entry.data.i64[0];
+ entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
+ if (entry.count > 0) frameAeState = entry.data.u8[0];
+ }
+ String8 result =
+ String8::format(" %d: b: %lld\tf: %lld, AE state: %d", i,
+ bufferTimestamp, frameTimestamp, frameAeState);
+ ALOGV("%s", result.string());
+ if (fd != -1) {
+ result = indent + result + "\n";
+ write(fd, result.string(), result.size());
+ }
+
+ }
+}
+
+nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const {
+ /**
+ * Find the smallest timestamp we know about so far
+ * - ensure that aeState is either converged or locked
+ */
+
+ size_t idx = 0;
+ nsecs_t minTimestamp = -1;
+ for (size_t j = 0; j < mFrameList.size(); j++) {
+ const CameraMetadata &frame = mFrameList[j];
+ if (!frame.isEmpty()) {
+ camera_metadata_ro_entry_t entry;
+ entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+ if (entry.count == 0) {
+ ALOGE("%s: Can't find timestamp in frame!",
+ __FUNCTION__);
+ continue;
+ }
+ nsecs_t frameTimestamp = entry.data.i64[0];
+ if (minTimestamp > frameTimestamp) {
+
+ entry = frame.find(ANDROID_CONTROL_AE_STATE);
+ if (entry.count == 0) {
+ ALOGW("%s: ZSL queue frame has no AE state field!",
+ __FUNCTION__);
+ continue;
+ }
+ if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
+ entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
+ ALOGVV("%s: ZSL queue frame AE state is %d, need "
+ "full capture", __FUNCTION__, entry.data.u8[0]);
+ continue;
+ }
+
+ minTimestamp = frameTimestamp;
+ idx = j;
+ }
+ }
+ }
+
+ if (metadataIdx) {
+ *metadataIdx = idx;
+ }
+
+ return minTimestamp;
+}
+
+void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
+ // Intentionally left empty
+ // Although theoretically we could use this to get better dump info
+}
+
+void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) {
+ Mutex::Autolock l(mInputMutex);
+
+ // ignore output buffers
+ if (bufferInfo.mOutput) {
+ return;
+ }
+
+ // TODO: Verify that the buffer is in our queue by looking at timestamp
+ // theoretically unnecessary unless we change the following assumptions:
+ // -- only 1 buffer reprocessed at a time (which is the case now)
+
+ // Erase entire ZSL queue since we've now completed the capture and preview
+ // is stopped.
+ //
+ // We need to guarantee that if we do two back-to-back captures,
+ // the second won't use a buffer that's older/the same as the first, which
+ // is theoretically possible if we don't clear out the queue and the
+ // selection criteria is something like 'newest'. Clearing out the queue
+ // on a completed capture ensures we'll only use new data.
+ ALOGV("%s: Memory optimization, clearing ZSL queue",
+ __FUNCTION__);
+ clearZslQueueLocked();
+
+ // Required so we accept more ZSL requests
+ mState = RUNNING;
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.h b/services/camera/libcameraservice/camera2/ZslProcessor3.h
new file mode 100644
index 0000000..cb98b99
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessor3.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR3_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/BufferItemConsumer.h>
+#include "Parameters.h"
+#include "FrameProcessor.h"
+#include "camera/CameraMetadata.h"
+#include "Camera2Heap.h"
+#include "../CameraDeviceBase.h"
+#include "ZslProcessorInterface.h"
+#include "../camera3/Camera3ZslStream.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class CaptureSequencer;
+
+/***
+ * ZSL queue processing
+ */
+class ZslProcessor3 :
+ public ZslProcessorInterface,
+ public camera3::Camera3StreamBufferListener,
+ virtual public Thread,
+ virtual public FrameProcessor::FilteredListener {
+ public:
+ ZslProcessor3(sp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+ ~ZslProcessor3();
+
+ // From FrameProcessor
+ virtual void onFrameAvailable(int32_t frameId, const CameraMetadata &frame);
+
+ /**
+ ****************************************
+ * ZslProcessorInterface implementation *
+ ****************************************
+ */
+
+ virtual status_t updateStream(const Parameters ¶ms);
+ virtual status_t deleteStream();
+ virtual int getStreamId() const;
+
+ virtual status_t pushToReprocess(int32_t requestId);
+ virtual status_t clearZslQueue();
+
+ void dump(int fd, const Vector<String16>& args) const;
+
+ protected:
+ /**
+ **********************************************
+ * Camera3StreamBufferListener implementation *
+ **********************************************
+ */
+ typedef camera3::Camera3StreamBufferListener::BufferInfo BufferInfo;
+ // Buffer was acquired by the HAL
+ virtual void onBufferAcquired(const BufferInfo& bufferInfo);
+ // Buffer was released by the HAL
+ virtual void onBufferReleased(const BufferInfo& bufferInfo);
+
+ private:
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+
+ enum {
+ RUNNING,
+ LOCKED
+ } mState;
+
+ wp<Camera2Client> mClient;
+ wp<CaptureSequencer> mSequencer;
+
+ const int mId;
+
+ mutable Mutex mInputMutex;
+
+ enum {
+ NO_STREAM = -1
+ };
+
+ int mZslStreamId;
+ sp<camera3::Camera3ZslStream> mZslStream;
+
+ struct ZslPair {
+ BufferItemConsumer::BufferItem buffer;
+ CameraMetadata frame;
+ };
+
+ static const size_t kZslBufferDepth = 4;
+ static const size_t kFrameListDepth = kZslBufferDepth * 2;
+ Vector<CameraMetadata> mFrameList;
+ size_t mFrameListHead;
+
+ ZslPair mNextPair;
+
+ Vector<ZslPair> mZslQueue;
+ size_t mZslQueueHead;
+ size_t mZslQueueTail;
+
+ CameraMetadata mLatestCapturedRequest;
+
+ virtual bool threadLoop();
+
+ status_t clearZslQueueLocked();
+
+ void dumpZslQueue(int id) const;
+
+ nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const;
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h
new file mode 100644
index 0000000..183c0c2
--- /dev/null
+++ b/services/camera/libcameraservice/camera2/ZslProcessorInterface.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSORINTERFACE_H
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+namespace camera2 {
+
+class Parameters;
+
+class ZslProcessorInterface : virtual public RefBase {
+public:
+
+ // Get ID for use with android.request.outputStreams / inputStreams
+ virtual int getStreamId() const = 0;
+
+ // Update the streams by recreating them if the size/format has changed
+ virtual status_t updateStream(const Parameters& params) = 0;
+
+ // Delete the underlying CameraDevice streams
+ virtual status_t deleteStream() = 0;
+
+ /**
+ * Submits a ZSL capture request (id = requestId)
+ *
+ * An appropriate ZSL buffer is selected by the closest timestamp,
+ * then we push that buffer to be reprocessed by the HAL.
+ * A capture request is created and submitted on behalf of the client.
+ */
+ virtual status_t pushToReprocess(int32_t requestId) = 0;
+
+ // Flush the ZSL buffer queue, freeing up all the buffers
+ virtual status_t clearZslQueue() = 0;
+
+ // (Debugging only) Dump the current state to the specified file descriptor
+ virtual void dump(int fd, const Vector<String16>& args) const = 0;
+};
+
+}; //namespace camera2
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
index 8a48ee5..c7dd12a 100644
--- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
@@ -18,6 +18,9 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+// This is needed for stdint.h to define INT64_MAX in C++
+#define __STDC_LIMIT_MACROS
+
#include <utils/Log.h>
#include <utils/Trace.h>
#include "Camera3InputStream.h"
@@ -28,38 +31,262 @@
Camera3InputStream::Camera3InputStream(int id,
uint32_t width, uint32_t height, int format) :
- Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format) {
+ Camera3Stream(id, CAMERA3_STREAM_INPUT, width, height, 0, format),
+ mTotalBufferCount(0),
+ mDequeuedBufferCount(0),
+ mFrameCount(0),
+ mLastTimestamp(0) {
+ mCombinedFence = new Fence();
+
+ if (format == HAL_PIXEL_FORMAT_BLOB) {
+ ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__);
+ mState = STATE_ERROR;
+ }
}
-status_t Camera3InputStream::getBufferLocked(camera3_stream_buffer *buffer) {
- (void) buffer;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+Camera3InputStream::~Camera3InputStream() {
+ disconnectLocked();
}
-status_t Camera3InputStream::returnBufferLocked(
- const camera3_stream_buffer &buffer,
- nsecs_t timestamp) {
- (void) timestamp;
- (void) buffer;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+status_t Camera3InputStream::getInputBufferLocked(
+ camera3_stream_buffer *buffer) {
+ ATRACE_CALL();
+ status_t res;
+
+ // FIXME: will not work in (re-)registration
+ if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
+ ALOGE("%s: Stream %d: Buffer registration for input streams"
+ " not implemented (state %d)",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Allow acquire during IN_[RE]CONFIG for registration
+ if (mState != STATE_CONFIGURED &&
+ mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) {
+ ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Only limit acquire amount when fully configured
+ if (mState == STATE_CONFIGURED &&
+ mDequeuedBufferCount == camera3_stream::max_buffers) {
+ ALOGE("%s: Stream %d: Already acquired maximum number of simultaneous"
+ " buffers (%d)", __FUNCTION__, mId,
+ camera3_stream::max_buffers);
+ return INVALID_OPERATION;
+ }
+
+ ANativeWindowBuffer* anb;
+ int fenceFd;
+
+ assert(mConsumer != 0);
+
+ BufferItem bufferItem;
+ res = mConsumer->acquireBuffer(&bufferItem, /*waitForFence*/false);
+
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Can't acquire next output buffer: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+
+ anb = bufferItem.mGraphicBuffer->getNativeBuffer();
+ assert(anb != NULL);
+ fenceFd = bufferItem.mFence->dup();
+ /**
+ * FenceFD now owned by HAL except in case of error,
+ * in which case we reassign it to acquire_fence
+ */
+
+ // Handing out a raw pointer to this object. Increment internal refcount.
+ incStrong(this);
+ buffer->stream = this;
+ buffer->buffer = &(anb->handle);
+ buffer->acquire_fence = fenceFd;
+ buffer->release_fence = -1;
+ buffer->status = CAMERA3_BUFFER_STATUS_OK;
+
+ mDequeuedBufferCount++;
+
+ mBuffersInFlight.push_back(bufferItem);
+
+ return OK;
+}
+
+status_t Camera3InputStream::returnInputBufferLocked(
+ const camera3_stream_buffer &buffer) {
+ ATRACE_CALL();
+ status_t res;
+
+ // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be
+ // decrementing the internal refcount next. In case this is the last ref, we
+ // might get destructed on the decStrong(), so keep an sp around until the
+ // end of the call - otherwise have to sprinkle the decStrong on all exit
+ // points.
+ sp<Camera3InputStream> keepAlive(this);
+ decStrong(this);
+
+ // Allow buffers to be returned in the error state, to allow for disconnect
+ // and in the in-config states for registration
+ if (mState == STATE_CONSTRUCTED) {
+ ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+ if (mDequeuedBufferCount == 0) {
+ ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
+ mId);
+ return INVALID_OPERATION;
+ }
+
+ bool bufferFound = false;
+ BufferItem bufferItem;
+ {
+ // Find the buffer we are returning
+ Vector<BufferItem>::iterator it, end;
+ for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end();
+ it != end;
+ ++it) {
+
+ const BufferItem& tmp = *it;
+ ANativeWindowBuffer *anb = tmp.mGraphicBuffer->getNativeBuffer();
+ if (anb != NULL && &(anb->handle) == buffer.buffer) {
+ bufferFound = true;
+ bufferItem = tmp;
+ mBuffersInFlight.erase(it);
+ mDequeuedBufferCount--;
+ }
+ }
+ }
+ if (!bufferFound) {
+ ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL",
+ __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+ if (buffer.release_fence != -1) {
+ ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when "
+ "there is an error", __FUNCTION__, mId, buffer.release_fence);
+ close(buffer.release_fence);
+ }
+
+ /**
+ * Reassign release fence as the acquire fence incase of error
+ */
+ const_cast<camera3_stream_buffer*>(&buffer)->release_fence =
+ buffer.acquire_fence;
+ }
+
+ /**
+ * Unconditionally return buffer to the buffer queue.
+ * - Fwk takes over the release_fence ownership
+ */
+ sp<Fence> releaseFence = new Fence(buffer.release_fence);
+ res = mConsumer->releaseBuffer(bufferItem, releaseFence);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error releasing buffer back to buffer queue:"
+ " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+
+ mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+
+ mBufferReturnedSignal.signal();
+
+ return OK;
+
}
bool Camera3InputStream::hasOutstandingBuffersLocked() const {
- ALOGE("%s: Not implemented", __FUNCTION__);
+ nsecs_t signalTime = mCombinedFence->getSignalTime();
+ ALOGV("%s: Stream %d: Has %d outstanding buffers,"
+ " buffer signal time is %lld",
+ __FUNCTION__, mId, mDequeuedBufferCount, signalTime);
+ if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) {
+ return true;
+ }
return false;
}
status_t Camera3InputStream::waitUntilIdle(nsecs_t timeout) {
- (void) timeout;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+ status_t res;
+ {
+ Mutex::Autolock l(mLock);
+ while (mDequeuedBufferCount > 0) {
+ if (timeout != TIMEOUT_NEVER) {
+ nsecs_t startTime = systemTime();
+ res = mBufferReturnedSignal.waitRelative(mLock, timeout);
+ if (res == TIMED_OUT) {
+ return res;
+ } else if (res != OK) {
+ ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ nsecs_t deltaTime = systemTime() - startTime;
+ if (timeout <= deltaTime) {
+ timeout = 0;
+ } else {
+ timeout -= deltaTime;
+ }
+ } else {
+ res = mBufferReturnedSignal.wait(mLock);
+ if (res != OK) {
+ ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+ }
+ }
+
+ // No lock
+
+ unsigned int timeoutMs;
+ if (timeout == TIMEOUT_NEVER) {
+ timeoutMs = Fence::TIMEOUT_NEVER;
+ } else if (timeout == 0) {
+ timeoutMs = 0;
+ } else {
+ // Round up to wait at least 1 ms
+ timeoutMs = (timeout + 999999) / 1000000;
+ }
+
+ return mCombinedFence->wait(timeoutMs);
+}
+
+size_t Camera3InputStream::getBufferCountLocked() {
+ return mTotalBufferCount;
}
status_t Camera3InputStream::disconnectLocked() {
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+ switch (mState) {
+ case STATE_IN_RECONFIG:
+ case STATE_CONFIGURED:
+ // OK
+ break;
+ default:
+ // No connection, nothing to do
+ return OK;
+ }
+
+ if (mDequeuedBufferCount > 0) {
+ ALOGE("%s: Can't disconnect with %d buffers still acquired!",
+ __FUNCTION__, mDequeuedBufferCount);
+ return INVALID_OPERATION;
+ }
+
+ assert(mBuffersInFlight.size() == 0);
+
+ /**
+ * no-op since we can't disconnect the producer from the consumer-side
+ */
+
+ mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED;
+ return OK;
}
sp<IGraphicBufferProducer> Camera3InputStream::getProducerInterface() const {
@@ -67,9 +294,71 @@
}
void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
- (void) fd;
(void) args;
- ALOGE("%s: Not implemented", __FUNCTION__);
+ String8 lines;
+ lines.appendFormat(" Stream[%d]: Input\n", mId);
+ lines.appendFormat(" State: %d\n", mState);
+ lines.appendFormat(" Dims: %d x %d, format 0x%x\n",
+ camera3_stream::width, camera3_stream::height,
+ camera3_stream::format);
+ lines.appendFormat(" Max size: %d\n", mMaxSize);
+ lines.appendFormat(" Usage: %d, max HAL buffers: %d\n",
+ camera3_stream::usage, camera3_stream::max_buffers);
+ lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n",
+ mFrameCount, mLastTimestamp);
+ lines.appendFormat(" Total buffers: %d, currently acquired: %d\n",
+ mTotalBufferCount, mDequeuedBufferCount);
+ write(fd, lines.string(), lines.size());
+}
+
+status_t Camera3InputStream::configureQueueLocked() {
+ status_t res;
+
+ switch (mState) {
+ case STATE_IN_RECONFIG:
+ res = disconnectLocked();
+ if (res != OK) {
+ return res;
+ }
+ break;
+ case STATE_IN_CONFIG:
+ // OK
+ break;
+ default:
+ ALOGE("%s: Bad state: %d", __FUNCTION__, mState);
+ return INVALID_OPERATION;
+ }
+
+ assert(mMaxSize == 0);
+ assert(camera3_stream::format != HAL_PIXEL_FORMAT_BLOB);
+
+ mTotalBufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS +
+ camera3_stream::max_buffers;
+ mDequeuedBufferCount = 0;
+ mFrameCount = 0;
+
+ if (mConsumer.get() == 0) {
+ mConsumer = new BufferItemConsumer(camera3_stream::usage,
+ mTotalBufferCount,
+ /*synchronousMode*/true);
+ mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
+ }
+
+ res = mConsumer->setDefaultBufferSize(camera3_stream::width,
+ camera3_stream::height);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Could not set buffer dimensions %dx%d",
+ __FUNCTION__, mId, camera3_stream::width, camera3_stream::height);
+ return res;
+ }
+ res = mConsumer->setDefaultBufferFormat(camera3_stream::format);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Could not set buffer format %d",
+ __FUNCTION__, mId, camera3_stream::format);
+ return res;
+ }
+
+ return OK;
}
}; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/camera3/Camera3InputStream.h
index c4b5dd9..fd9f464 100644
--- a/services/camera/libcameraservice/camera3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/camera3/Camera3InputStream.h
@@ -29,6 +29,10 @@
/**
* A class for managing a single stream of input data to the camera device.
+ *
+ * This class serves as a consumer adapter for the HAL, and will consume the
+ * buffers by feeding them into the HAL, as well as releasing the buffers back
+ * the buffers once the HAL is done with them.
*/
class Camera3InputStream : public Camera3Stream {
public:
@@ -36,6 +40,7 @@
* Set up a stream for formats that have fixed size, such as RAW and YUV.
*/
Camera3InputStream(int id, uint32_t width, uint32_t height, int format);
+ ~Camera3InputStream();
virtual status_t waitUntilIdle(nsecs_t timeout);
virtual void dump(int fd, const Vector<String16> &args) const;
@@ -49,18 +54,32 @@
private:
+ typedef BufferItemConsumer::BufferItem BufferItem;
+
sp<BufferItemConsumer> mConsumer;
+ Vector<BufferItem> mBuffersInFlight;
+ size_t mTotalBufferCount;
+ size_t mDequeuedBufferCount;
+ Condition mBufferReturnedSignal;
+ uint32_t mFrameCount;
+ nsecs_t mLastTimestamp;
+
+ // The merged release fence for all returned buffers
+ sp<Fence> mCombinedFence;
/**
* Camera3Stream interface
*/
- virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
- virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
- nsecs_t timestamp);
+ virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+ virtual status_t returnInputBufferLocked(
+ const camera3_stream_buffer &buffer);
virtual bool hasOutstandingBuffersLocked() const;
virtual status_t disconnectLocked();
+ virtual status_t configureQueueLocked();
+ virtual size_t getBufferCountLocked();
+
}; // class Camera3InputStream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
index 276b940..ec8cf0d 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
@@ -298,7 +298,7 @@
switch (mState) {
case STATE_IN_RECONFIG:
- res = disconnect();
+ res = disconnectLocked();
if (res != OK) {
return res;
}
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/camera3/Camera3OutputStream.h
index d331a94..2464dce 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStream.h
@@ -21,6 +21,7 @@
#include <gui/Surface.h>
#include "Camera3Stream.h"
+#include "Camera3OutputStreamInterface.h"
namespace android {
@@ -29,7 +30,9 @@
/**
* A class for managing a single stream of output data from the camera device.
*/
-class Camera3OutputStream : public Camera3Stream {
+class Camera3OutputStream :
+ public Camera3Stream,
+ public Camera3OutputStreamInterface {
public:
/**
* Set up a stream for formats that have 2 dimensions, such as RAW and YUV.
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h
new file mode 100644
index 0000000..aae72cf
--- /dev/null
+++ b/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_INTERFACE_H
+
+#include "Camera3StreamInterface.h"
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * An interface for managing a single stream of output data from the camera
+ * device.
+ */
+class Camera3OutputStreamInterface : public virtual Camera3StreamInterface {
+ public:
+ /**
+ * Set the transform on the output stream; one of the
+ * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
+ */
+ virtual status_t setTransform(int transform) = 0;
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/camera3/Camera3Stream.cpp
index cf3072b..f137227 100644
--- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3Stream.cpp
@@ -178,14 +178,75 @@
status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
- return getBufferLocked(buffer);
+
+ status_t res = getBufferLocked(buffer);
+ if (res == OK) {
+ fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
+ }
+
+ return res;
}
status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
nsecs_t timestamp) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
- return returnBufferLocked(buffer, timestamp);
+
+ status_t res = returnBufferLocked(buffer, timestamp);
+ if (res == OK) {
+ fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true);
+ }
+
+ return res;
+}
+
+status_t Camera3Stream::getInputBuffer(camera3_stream_buffer *buffer) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res = getInputBufferLocked(buffer);
+ if (res == OK) {
+ fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/false);
+ }
+
+ return res;
+}
+
+status_t Camera3Stream::returnInputBuffer(const camera3_stream_buffer &buffer) {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res = returnInputBufferLocked(buffer);
+ if (res == OK) {
+ fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/false);
+ }
+ return res;
+}
+
+void Camera3Stream::fireBufferListenersLocked(
+ const camera3_stream_buffer& /*buffer*/, bool acquired, bool output) {
+ List<wp<Camera3StreamBufferListener> >::iterator it, end;
+
+ // TODO: finish implementing
+
+ Camera3StreamBufferListener::BufferInfo info =
+ Camera3StreamBufferListener::BufferInfo();
+ info.mOutput = output;
+ // TODO: rest of fields
+
+ for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();
+ it != end;
+ ++it) {
+
+ sp<Camera3StreamBufferListener> listener = it->promote();
+ if (listener != 0) {
+ if (acquired) {
+ listener->onBufferAcquired(info);
+ } else {
+ listener->onBufferReleased(info);
+ }
+ }
+ }
}
bool Camera3Stream::hasOutstandingBuffers() const {
@@ -259,6 +320,55 @@
return res;
}
+status_t Camera3Stream::getBufferLocked(camera3_stream_buffer *) {
+ ALOGE("%s: This type of stream does not support output", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+status_t Camera3Stream::returnBufferLocked(const camera3_stream_buffer &,
+ nsecs_t) {
+ ALOGE("%s: This type of stream does not support output", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+status_t Camera3Stream::getInputBufferLocked(camera3_stream_buffer *) {
+ ALOGE("%s: This type of stream does not support input", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+status_t Camera3Stream::returnInputBufferLocked(
+ const camera3_stream_buffer &) {
+ ALOGE("%s: This type of stream does not support input", __FUNCTION__);
+ return INVALID_OPERATION;
+}
+
+void Camera3Stream::addBufferListener(
+ wp<Camera3StreamBufferListener> listener) {
+ Mutex::Autolock l(mLock);
+ mBufferListenerList.push_back(listener);
+}
+
+void Camera3Stream::removeBufferListener(
+ const sp<Camera3StreamBufferListener>& listener) {
+ Mutex::Autolock l(mLock);
+
+ bool erased = true;
+ List<wp<Camera3StreamBufferListener> >::iterator it, end;
+ for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();
+ it != end;
+ ) {
+
+ if (*it == listener) {
+ it = mBufferListenerList.erase(it);
+ erased = true;
+ } else {
+ ++it;
+ }
+ }
+
+ if (!erased) {
+ ALOGW("%s: Could not find listener to remove, already removed",
+ __FUNCTION__);
+ }
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/camera3/Camera3Stream.h
index 2364cfd..d992cfe 100644
--- a/services/camera/libcameraservice/camera3/Camera3Stream.h
+++ b/services/camera/libcameraservice/camera3/Camera3Stream.h
@@ -21,9 +21,13 @@
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/String16.h>
+#include <utils/List.h>
#include "hardware/camera3.h"
+#include "Camera3StreamBufferListener.h"
+#include "Camera3StreamInterface.h"
+
namespace android {
namespace camera3 {
@@ -81,7 +85,8 @@
*/
class Camera3Stream :
protected camera3_stream,
- public LightRefBase<Camera3Stream> {
+ public virtual Camera3StreamInterface,
+ public virtual RefBase {
public:
virtual ~Camera3Stream();
@@ -157,6 +162,25 @@
nsecs_t timestamp);
/**
+ * Fill in the camera3_stream_buffer with the next valid buffer for this
+ * stream, to hand over to the HAL.
+ *
+ * This method may only be called once finishConfiguration has been called.
+ * For bidirectional streams, this method applies to the input-side
+ * buffers.
+ *
+ */
+ status_t getInputBuffer(camera3_stream_buffer *buffer);
+
+ /**
+ * Return a buffer to the stream after use by the HAL.
+ *
+ * This method may only be called for buffers provided by getBuffer().
+ * For bidirectional streams, this method applies to the input-side buffers
+ */
+ status_t returnInputBuffer(const camera3_stream_buffer &buffer);
+
+ /**
* Whether any of the stream's buffers are currently in use by the HAL,
* including buffers that have been returned but not yet had their
* release fence signaled.
@@ -186,6 +210,11 @@
*/
virtual void dump(int fd, const Vector<String16> &args) const = 0;
+ void addBufferListener(
+ wp<Camera3StreamBufferListener> listener);
+ void removeBufferListener(
+ const sp<Camera3StreamBufferListener>& listener);
+
protected:
const int mId;
const String8 mName;
@@ -215,9 +244,12 @@
// cast to camera3_stream*, implementations must increment the
// refcount of the stream manually in getBufferLocked, and decrement it in
// returnBufferLocked.
- virtual status_t getBufferLocked(camera3_stream_buffer *buffer) = 0;
+ virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
- nsecs_t timestamp) = 0;
+ nsecs_t timestamp);
+ virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+ virtual status_t returnInputBufferLocked(
+ const camera3_stream_buffer &buffer);
virtual bool hasOutstandingBuffersLocked() const = 0;
virtual status_t disconnectLocked() = 0;
@@ -239,6 +271,10 @@
// Gets all buffers from endpoint and registers them with the HAL.
status_t registerBuffersLocked(camera3_device *hal3Device);
+ void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
+ bool acquired, bool output);
+ List<wp<Camera3StreamBufferListener> > mBufferListenerList;
+
}; // class Camera3Stream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h
new file mode 100644
index 0000000..62ea6c0
--- /dev/null
+++ b/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
+#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
+
+#include <gui/Surface.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+namespace camera3 {
+
+class Camera3StreamBufferListener : public virtual RefBase {
+public:
+
+ struct BufferInfo {
+ bool mOutput; // if false then input buffer
+ Rect mCrop;
+ uint32_t mTransform;
+ uint32_t mScalingMode;
+ int64_t mTimestamp;
+ uint64_t mFrameNumber;
+ };
+
+ // Buffer was acquired by the HAL
+ virtual void onBufferAcquired(const BufferInfo& bufferInfo) = 0;
+ // Buffer was released by the HAL
+ virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0;
+};
+
+}; //namespace camera3
+}; //namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h
new file mode 100644
index 0000000..4768536
--- /dev/null
+++ b/services/camera/libcameraservice/camera3/Camera3StreamInterface.h
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
+#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
+
+#include <utils/RefBase.h>
+#include "Camera3StreamBufferListener.h"
+
+struct camera3_stream_buffer;
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * An interface for managing a single stream of input and/or output data from
+ * the camera device.
+ */
+class Camera3StreamInterface : public virtual RefBase {
+ public:
+ /**
+ * Get the stream's ID
+ */
+ virtual int getId() const = 0;
+
+ /**
+ * Get the stream's dimensions and format
+ */
+ virtual uint32_t getWidth() const = 0;
+ virtual uint32_t getHeight() const = 0;
+ virtual int getFormat() const = 0;
+
+ /**
+ * Start the stream configuration process. Returns a handle to the stream's
+ * information to be passed into the HAL device's configure_streams call.
+ *
+ * Until finishConfiguration() is called, no other methods on the stream may
+ * be called. The usage and max_buffers fields of camera3_stream may be
+ * modified between start/finishConfiguration, but may not be changed after
+ * that. The priv field of camera3_stream may be modified at any time after
+ * startConfiguration.
+ *
+ * Returns NULL in case of error starting configuration.
+ */
+ virtual camera3_stream* startConfiguration() = 0;
+
+ /**
+ * Check if the stream is mid-configuration (start has been called, but not
+ * finish). Used for lazy completion of configuration.
+ */
+ virtual bool isConfiguring() const = 0;
+
+ /**
+ * Completes the stream configuration process. During this call, the stream
+ * may call the device's register_stream_buffers() method. The stream
+ * information structure returned by startConfiguration() may no longer be
+ * modified after this call, but can still be read until the destruction of
+ * the stream.
+ *
+ * Returns:
+ * OK on a successful configuration
+ * NO_INIT in case of a serious error from the HAL device
+ * NO_MEMORY in case of an error registering buffers
+ * INVALID_OPERATION in case connecting to the consumer failed
+ */
+ virtual status_t finishConfiguration(camera3_device *hal3Device) = 0;
+
+ /**
+ * Fill in the camera3_stream_buffer with the next valid buffer for this
+ * stream, to hand over to the HAL.
+ *
+ * This method may only be called once finishConfiguration has been called.
+ * For bidirectional streams, this method applies to the output-side
+ * buffers.
+ *
+ */
+ virtual status_t getBuffer(camera3_stream_buffer *buffer) = 0;
+
+ /**
+ * Return a buffer to the stream after use by the HAL.
+ *
+ * This method may only be called for buffers provided by getBuffer().
+ * For bidirectional streams, this method applies to the output-side buffers
+ */
+ virtual status_t returnBuffer(const camera3_stream_buffer &buffer,
+ nsecs_t timestamp) = 0;
+
+ /**
+ * Fill in the camera3_stream_buffer with the next valid buffer for this
+ * stream, to hand over to the HAL.
+ *
+ * This method may only be called once finishConfiguration has been called.
+ * For bidirectional streams, this method applies to the input-side
+ * buffers.
+ *
+ */
+ virtual status_t getInputBuffer(camera3_stream_buffer *buffer) = 0;
+
+ /**
+ * Return a buffer to the stream after use by the HAL.
+ *
+ * This method may only be called for buffers provided by getBuffer().
+ * For bidirectional streams, this method applies to the input-side buffers
+ */
+ virtual status_t returnInputBuffer(const camera3_stream_buffer &buffer) = 0;
+
+ /**
+ * Whether any of the stream's buffers are currently in use by the HAL,
+ * including buffers that have been returned but not yet had their
+ * release fence signaled.
+ */
+ virtual bool hasOutstandingBuffers() const = 0;
+
+ enum {
+ TIMEOUT_NEVER = -1
+ };
+ /**
+ * Wait until the HAL is done with all of this stream's buffers, including
+ * signalling all release fences. Returns TIMED_OUT if the timeout is
+ * exceeded, OK on success. Pass in TIMEOUT_NEVER for timeout to indicate
+ * an indefinite wait.
+ */
+ virtual status_t waitUntilIdle(nsecs_t timeout) = 0;
+
+ /**
+ * Disconnect stream from its non-HAL endpoint. After this,
+ * start/finishConfiguration must be called before the stream can be used
+ * again. This cannot be called if the stream has outstanding dequeued
+ * buffers.
+ */
+ virtual status_t disconnect() = 0;
+
+ /**
+ * Debug dump of the stream's state.
+ */
+ virtual void dump(int fd, const Vector<String16> &args) const = 0;
+
+ virtual void addBufferListener(
+ wp<Camera3StreamBufferListener> listener) = 0;
+ virtual void removeBufferListener(
+ const sp<Camera3StreamBufferListener>& listener) = 0;
+};
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
index e8a5ca6..0345d5b 100644
--- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
@@ -18,70 +18,652 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+// This is needed for stdint.h to define INT64_MAX in C++
+#define __STDC_LIMIT_MACROS
+
#include <utils/Log.h>
#include <utils/Trace.h>
#include "Camera3ZslStream.h"
+#ifndef container_of
+#define container_of(ptr, type, member) \
+ (type *)((char*)(ptr) - offsetof(type, member))
+#endif
+
+typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
+
namespace android {
namespace camera3 {
+namespace {
+struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
+ typedef RingBufferConsumer::BufferInfo BufferInfo;
+
+ enum {
+ SELECT_I1 = -1,
+ SELECT_I2 = 1,
+ SELECT_NEITHER = 0,
+ };
+
+ TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {}
+ ~TimestampFinder() {}
+
+ template <typename T>
+ static void swap(T& a, T& b) {
+ T tmp = a;
+ a = b;
+ b = tmp;
+ }
+
+ /**
+ * Try to find the best candidate for a ZSL buffer.
+ * Match priority from best to worst:
+ * 1) Timestamps match.
+ * 2) Timestamp is closest to the needle (and lower).
+ * 3) Timestamp is closest to the needle (and higher).
+ *
+ */
+ virtual int compare(const BufferInfo *i1,
+ const BufferInfo *i2) const {
+ // Try to select non-null object first.
+ if (i1 == NULL) {
+ return SELECT_I2;
+ } else if (i2 == NULL) {
+ return SELECT_I1;
+ }
+
+ // Best result: timestamp is identical
+ if (i1->mTimestamp == mTimestamp) {
+ return SELECT_I1;
+ } else if (i2->mTimestamp == mTimestamp) {
+ return SELECT_I2;
+ }
+
+ const BufferInfo* infoPtrs[2] = {
+ i1,
+ i2
+ };
+ int infoSelectors[2] = {
+ SELECT_I1,
+ SELECT_I2
+ };
+
+ // Order i1,i2 so that always i1.timestamp < i2.timestamp
+ if (i1->mTimestamp > i2->mTimestamp) {
+ swap(infoPtrs[0], infoPtrs[1]);
+ swap(infoSelectors[0], infoSelectors[1]);
+ }
+
+ // Second best: closest (lower) timestamp
+ if (infoPtrs[1]->mTimestamp < mTimestamp) {
+ return infoSelectors[1];
+ } else if (infoPtrs[0]->mTimestamp < mTimestamp) {
+ return infoSelectors[0];
+ }
+
+ // Worst: closest (higher) timestamp
+ return infoSelectors[0];
+
+ /**
+ * The above cases should cover all the possibilities,
+ * and we get an 'empty' result only if the ring buffer
+ * was empty itself
+ */
+ }
+
+ const nsecs_t mTimestamp;
+}; // struct TimestampFinder
+} // namespace anonymous
+
Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height,
int depth) :
Camera3Stream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, 0,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
- mDepth(depth) {
+ mDepth(depth),
+ mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL,
+ depth)),
+ mConsumer(new Surface(mProducer->getProducerInterface())),
+ //mTransform(0),
+ mTotalBufferCount(0),
+ mDequeuedBufferCount(0),
+ mFrameCount(0),
+ mLastTimestamp(0),
+ mCombinedFence(new Fence()) {
+}
+
+Camera3ZslStream::~Camera3ZslStream() {
+ disconnectLocked();
}
status_t Camera3ZslStream::getBufferLocked(camera3_stream_buffer *buffer) {
- (void) buffer;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+ // same as output stream code
+ ATRACE_CALL();
+ status_t res;
+
+ // Allow dequeue during IN_[RE]CONFIG for registration
+ if (mState != STATE_CONFIGURED &&
+ mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) {
+ ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Only limit dequeue amount when fully configured
+ if (mState == STATE_CONFIGURED &&
+ mDequeuedBufferCount == camera3_stream::max_buffers) {
+ ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous"
+ " buffers (%d)", __FUNCTION__, mId,
+ camera3_stream::max_buffers);
+ return INVALID_OPERATION;
+ }
+
+ ANativeWindowBuffer* anb;
+ int fenceFd;
+
+ res = mConsumer->dequeueBuffer(mConsumer.get(), &anb, &fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+
+ // Handing out a raw pointer to this object. Increment internal refcount.
+ incStrong(this);
+ buffer->stream = this;
+ buffer->buffer = &(anb->handle);
+ buffer->acquire_fence = fenceFd;
+ buffer->release_fence = -1;
+ buffer->status = CAMERA3_BUFFER_STATUS_OK;
+
+ mDequeuedBufferCount++;
+
+ return OK;
}
status_t Camera3ZslStream::returnBufferLocked(
const camera3_stream_buffer &buffer,
nsecs_t timestamp) {
- (void) buffer;
- (void) timestamp;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+ // same as output stream code
+ ATRACE_CALL();
+ status_t res;
+
+ // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be
+ // decrementing the internal refcount next. In case this is the last ref, we
+ // might get destructed on the decStrong(), so keep an sp around until the
+ // end of the call - otherwise have to sprinkle the decStrong on all exit
+ // points.
+ sp<Camera3ZslStream> keepAlive(this);
+ decStrong(this);
+
+ // Allow buffers to be returned in the error state, to allow for disconnect
+ // and in the in-config states for registration
+ if (mState == STATE_CONSTRUCTED) {
+ ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+ if (mDequeuedBufferCount == 0) {
+ ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
+ mId);
+ return INVALID_OPERATION;
+ }
+ if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+ res = mConsumer->cancelBuffer(mConsumer.get(),
+ container_of(buffer.buffer, ANativeWindowBuffer, handle),
+ buffer.release_fence);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
+ " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+ } else {
+ res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+
+ sp<Fence> releaseFence = new Fence(buffer.release_fence);
+ int anwReleaseFence = releaseFence->dup();
+
+ res = mConsumer->queueBuffer(mConsumer.get(),
+ container_of(buffer.buffer, ANativeWindowBuffer, handle),
+ anwReleaseFence);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error queueing buffer to native window: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ close(anwReleaseFence);
+ return res;
+ }
+
+ mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+ }
+
+ mDequeuedBufferCount--;
+ mBufferReturnedSignal.signal();
+ mLastTimestamp = timestamp;
+
+ return OK;
}
bool Camera3ZslStream::hasOutstandingBuffersLocked() const {
- ALOGE("%s: Not implemented", __FUNCTION__);
+ // same as output stream
+ nsecs_t signalTime = mCombinedFence->getSignalTime();
+ ALOGV("%s: Stream %d: Has %d outstanding buffers,"
+ " buffer signal time is %lld",
+ __FUNCTION__, mId, mDequeuedBufferCount, signalTime);
+ if (mDequeuedBufferCount > 0 || signalTime == INT64_MAX) {
+ return true;
+ }
return false;
}
status_t Camera3ZslStream::waitUntilIdle(nsecs_t timeout) {
- (void) timeout;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+ // same as output stream
+ status_t res;
+ {
+ Mutex::Autolock l(mLock);
+ while (mDequeuedBufferCount > 0) {
+ if (timeout != TIMEOUT_NEVER) {
+ nsecs_t startTime = systemTime();
+ res = mBufferReturnedSignal.waitRelative(mLock, timeout);
+ if (res == TIMED_OUT) {
+ return res;
+ } else if (res != OK) {
+ ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ nsecs_t deltaTime = systemTime() - startTime;
+ if (timeout <= deltaTime) {
+ timeout = 0;
+ } else {
+ timeout -= deltaTime;
+ }
+ } else {
+ res = mBufferReturnedSignal.wait(mLock);
+ if (res != OK) {
+ ALOGE("%s: Error waiting for outstanding buffers: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+ }
+ }
+
+ // No lock
+
+ unsigned int timeoutMs;
+ if (timeout == TIMEOUT_NEVER) {
+ timeoutMs = Fence::TIMEOUT_NEVER;
+ } else if (timeout == 0) {
+ timeoutMs = 0;
+ } else {
+ // Round up to wait at least 1 ms
+ timeoutMs = (timeout + 999999) / 1000000;
+ }
+
+ return mCombinedFence->wait(timeoutMs);
+}
+
+status_t Camera3ZslStream::configureQueueLocked() {
+ status_t res;
+
+ switch (mState) {
+ case STATE_IN_RECONFIG:
+ res = disconnectLocked();
+ if (res != OK) {
+ return res;
+ }
+ break;
+ case STATE_IN_CONFIG:
+ // OK
+ break;
+ default:
+ ALOGE("%s: Bad state: %d", __FUNCTION__, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Configure consumer-side ANativeWindow interface
+ res = native_window_api_connect(mConsumer.get(),
+ NATIVE_WINDOW_API_CAMERA);
+ if (res != OK) {
+ ALOGE("%s: Unable to connect to native window for stream %d",
+ __FUNCTION__, mId);
+ return res;
+ }
+
+ res = native_window_set_usage(mConsumer.get(), camera3_stream::usage);
+ if (res != OK) {
+ ALOGE("%s: Unable to configure usage %08x for stream %d",
+ __FUNCTION__, camera3_stream::usage, mId);
+ return res;
+ }
+
+ res = native_window_set_scaling_mode(mConsumer.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+ if (res != OK) {
+ ALOGE("%s: Unable to configure stream scaling: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ if (mMaxSize == 0) {
+ // For buffers of known size
+ res = native_window_set_buffers_geometry(mConsumer.get(),
+ camera3_stream::width, camera3_stream::height,
+ camera3_stream::format);
+ } else {
+ // For buffers with bounded size
+ res = native_window_set_buffers_geometry(mConsumer.get(),
+ mMaxSize, 1,
+ camera3_stream::format);
+ }
+ if (res != OK) {
+ ALOGE("%s: Unable to configure stream buffer geometry"
+ " %d x %d, format %x for stream %d",
+ __FUNCTION__, camera3_stream::width, camera3_stream::height,
+ camera3_stream::format, mId);
+ return res;
+ }
+
+ int maxConsumerBuffers;
+ res = mConsumer->query(mConsumer.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
+ if (res != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mId);
+ return res;
+ }
+
+ ALOGV("%s: Consumer wants %d buffers", __FUNCTION__,
+ maxConsumerBuffers);
+
+ mTotalBufferCount = maxConsumerBuffers + camera3_stream::max_buffers;
+ mDequeuedBufferCount = 0;
+ mFrameCount = 0;
+ mLastTimestamp = 0;
+
+ res = native_window_set_buffer_count(mConsumer.get(),
+ mTotalBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Unable to set buffer count for stream %d",
+ __FUNCTION__, mId);
+ return res;
+ }
+
+ return OK;
+}
+
+size_t Camera3ZslStream::getBufferCountLocked() {
+ return mTotalBufferCount;
}
status_t Camera3ZslStream::disconnectLocked() {
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+ status_t res;
+
+ switch (mState) {
+ case STATE_IN_RECONFIG:
+ case STATE_CONFIGURED:
+ // OK
+ break;
+ default:
+ // No connection, nothing to do
+ return OK;
+ }
+
+ if (mDequeuedBufferCount > 0) {
+ ALOGE("%s: Can't disconnect with %d buffers still dequeued!",
+ __FUNCTION__, mDequeuedBufferCount);
+ return INVALID_OPERATION;
+ }
+
+ res = native_window_api_disconnect(mConsumer.get(), NATIVE_WINDOW_API_CAMERA);
+
+ /**
+ * This is not an error. if client calling process dies, the window will
+ * also die and all calls to it will return DEAD_OBJECT, thus it's already
+ * "disconnected"
+ */
+ if (res == DEAD_OBJECT) {
+ ALOGW("%s: While disconnecting stream %d from native window, the"
+ " native window died from under us", __FUNCTION__, mId);
+ }
+ else if (res != OK) {
+ ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)",
+ __FUNCTION__, mId, res, strerror(-res));
+ mState = STATE_ERROR;
+ return res;
+ }
+
+ mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG : STATE_CONSTRUCTED;
+ return OK;
}
-status_t Camera3ZslStream::getInputBuffer(camera3_stream_buffer *buffer,
- nsecs_t timestamp) {
- (void) buffer;
- (void) timestamp;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+status_t Camera3ZslStream::getInputBufferLocked(camera3_stream_buffer *buffer) {
+ ATRACE_CALL();
+
+ // TODO: potentially register from inputBufferLocked
+ // this should be ok, registerBuffersLocked only calls getBuffer for now
+ // register in output mode instead of input mode for ZSL streams.
+ if (mState == STATE_IN_CONFIG || mState == STATE_IN_RECONFIG) {
+ ALOGE("%s: Stream %d: Buffer registration for input streams"
+ " not implemented (state %d)",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Allow dequeue during IN_[RE]CONFIG for registration
+ if (mState != STATE_CONFIGURED &&
+ mState != STATE_IN_CONFIG && mState != STATE_IN_RECONFIG) {
+ ALOGE("%s: Stream %d: Can't get buffers in unconfigured state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // Only limit dequeue amount when fully configured
+ if (mState == STATE_CONFIGURED &&
+ mDequeuedBufferCount == camera3_stream::max_buffers) {
+ ALOGE("%s: Stream %d: Already dequeued maximum number of simultaneous"
+ " buffers (%d)", __FUNCTION__, mId,
+ camera3_stream::max_buffers);
+ return INVALID_OPERATION;
+ }
+
+ ANativeWindowBuffer* anb;
+ int fenceFd;
+
+ assert(mProducer != 0);
+
+ sp<PinnedBufferItem> bufferItem;
+ {
+ List<sp<RingBufferConsumer::PinnedBufferItem> >::iterator it, end;
+ it = mInputBufferQueue.begin();
+ end = mInputBufferQueue.end();
+
+ // Need to call enqueueInputBufferByTimestamp as a prerequisite
+ if (it == end) {
+ ALOGE("%s: Stream %d: No input buffer was queued",
+ __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+ bufferItem = *it;
+ mInputBufferQueue.erase(it);
+ }
+
+ anb = bufferItem->getBufferItem().mGraphicBuffer->getNativeBuffer();
+ assert(anb != NULL);
+ fenceFd = bufferItem->getBufferItem().mFence->dup();
+
+ /**
+ * FenceFD now owned by HAL except in case of error,
+ * in which case we reassign it to acquire_fence
+ */
+
+ // Handing out a raw pointer to this object. Increment internal refcount.
+ incStrong(this);
+ buffer->stream = this;
+ buffer->buffer = &(anb->handle);
+ buffer->acquire_fence = fenceFd;
+ buffer->release_fence = -1;
+ buffer->status = CAMERA3_BUFFER_STATUS_OK;
+
+ mDequeuedBufferCount++;
+
+ mBuffersInFlight.push_back(bufferItem);
+
+ return OK;
}
-status_t Camera3ZslStream::returnInputBuffer(const camera3_stream_buffer &buffer) {
- (void) buffer;
- ALOGE("%s: Not implemented", __FUNCTION__);
- return INVALID_OPERATION;
+status_t Camera3ZslStream::returnInputBufferLocked(
+ const camera3_stream_buffer &buffer) {
+ ATRACE_CALL();
+
+ // returnBuffer may be called from a raw pointer, not a sp<>, and we'll be
+ // decrementing the internal refcount next. In case this is the last ref, we
+ // might get destructed on the decStrong(), so keep an sp around until the
+ // end of the call - otherwise have to sprinkle the decStrong on all exit
+ // points.
+ sp<Camera3ZslStream> keepAlive(this);
+ decStrong(this);
+
+ // Allow buffers to be returned in the error state, to allow for disconnect
+ // and in the in-config states for registration
+ if (mState == STATE_CONSTRUCTED) {
+ ALOGE("%s: Stream %d: Can't return buffers in unconfigured state %d",
+ __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+ if (mDequeuedBufferCount == 0) {
+ ALOGE("%s: Stream %d: No buffers outstanding to return", __FUNCTION__,
+ mId);
+ return INVALID_OPERATION;
+ }
+
+ bool bufferFound = false;
+ sp<PinnedBufferItem> bufferItem;
+ {
+ // Find the buffer we are returning
+ Vector<sp<PinnedBufferItem> >::iterator it, end;
+ for (it = mBuffersInFlight.begin(), end = mBuffersInFlight.end();
+ it != end;
+ ++it) {
+
+ const sp<PinnedBufferItem>& tmp = *it;
+ ANativeWindowBuffer *anb =
+ tmp->getBufferItem().mGraphicBuffer->getNativeBuffer();
+ if (anb != NULL && &(anb->handle) == buffer.buffer) {
+ bufferFound = true;
+ bufferItem = tmp;
+ mBuffersInFlight.erase(it);
+ mDequeuedBufferCount--;
+ }
+ }
+ }
+ if (!bufferFound) {
+ ALOGE("%s: Stream %d: Can't return buffer that wasn't sent to HAL",
+ __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ int releaseFenceFd = buffer.release_fence;
+
+ if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
+ if (buffer.release_fence != -1) {
+ ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when "
+ "there is an error", __FUNCTION__, mId, buffer.release_fence);
+ close(buffer.release_fence);
+ }
+
+ /**
+ * Reassign release fence as the acquire fence incase of error
+ */
+ releaseFenceFd = buffer.acquire_fence;
+ }
+
+ /**
+ * Unconditionally return buffer to the buffer queue.
+ * - Fwk takes over the release_fence ownership
+ */
+ sp<Fence> releaseFence = new Fence(releaseFenceFd);
+ bufferItem->getBufferItem().mFence = releaseFence;
+ bufferItem.clear(); // dropping last reference unpins buffer
+
+ mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+
+ mBufferReturnedSignal.signal();
+
+ return OK;
+
}
void Camera3ZslStream::dump(int fd, const Vector<String16> &args) const {
- (void) fd;
(void) args;
- ALOGE("%s: Not implemented", __FUNCTION__);
+
+ String8 lines;
+ lines.appendFormat(" Stream[%d]: ZSL\n", mId);
+ lines.appendFormat(" State: %d\n", mState);
+ lines.appendFormat(" Dims: %d x %d, format 0x%x\n",
+ camera3_stream::width, camera3_stream::height,
+ camera3_stream::format);
+ lines.appendFormat(" Usage: %d, max HAL buffers: %d\n",
+ camera3_stream::usage, camera3_stream::max_buffers);
+ lines.appendFormat(" Frames produced: %d, last timestamp: %lld ns\n",
+ mFrameCount, mLastTimestamp);
+ lines.appendFormat(" Total buffers: %d, currently dequeued: %d\n",
+ mTotalBufferCount, mDequeuedBufferCount);
+ lines.appendFormat(" Input buffers pending: %d, in flight %d\n",
+ mInputBufferQueue.size(), mBuffersInFlight.size());
+ write(fd, lines.string(), lines.size());
+}
+
+status_t Camera3ZslStream::enqueueInputBufferByTimestamp(
+ nsecs_t timestamp,
+ nsecs_t* actualTimestamp) {
+
+ Mutex::Autolock l(mLock);
+
+ TimestampFinder timestampFinder = TimestampFinder(timestamp);
+
+ sp<RingBufferConsumer::PinnedBufferItem> pinnedBuffer =
+ mProducer->pinSelectedBuffer(timestampFinder,
+ /*waitForFence*/false);
+
+ if (pinnedBuffer == 0) {
+ ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__);
+ return NO_BUFFER_AVAILABLE;
+ }
+
+ nsecs_t actual = pinnedBuffer->getBufferItem().mTimestamp;
+
+ if (actual != timestamp) {
+ ALOGW("%s: ZSL buffer candidate search didn't find an exact match --"
+ " requested timestamp = %lld, actual timestamp = %lld",
+ __FUNCTION__, timestamp, actual);
+ }
+
+ mInputBufferQueue.push_back(pinnedBuffer);
+
+ if (actualTimestamp != NULL) {
+ *actualTimestamp = actual;
+ }
+
+ return OK;
+}
+
+status_t Camera3ZslStream::clearInputRingBuffer() {
+ Mutex::Autolock l(mLock);
+
+ mInputBufferQueue.clear();
+
+ return mProducer->clear();
+}
+
+status_t Camera3ZslStream::setTransform(int /*transform*/) {
+ ALOGV("%s: Not implemented", __FUNCTION__);
+ return INVALID_OPERATION;
}
}; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/camera3/Camera3ZslStream.h
index 39d5995..b863e7f 100644
--- a/services/camera/libcameraservice/camera3/Camera3ZslStream.h
+++ b/services/camera/libcameraservice/camera3/Camera3ZslStream.h
@@ -19,8 +19,10 @@
#include <utils/RefBase.h>
#include <gui/Surface.h>
+#include <gui/RingBufferConsumer.h>
#include "Camera3Stream.h"
+#include "Camera3OutputStreamInterface.h"
namespace android {
@@ -32,32 +34,62 @@
* most output buffers, and when directed, pushes a buffer back to the HAL for
* processing.
*/
-class Camera3ZslStream: public Camera3Stream {
+class Camera3ZslStream :
+ public Camera3Stream,
+ public Camera3OutputStreamInterface {
public:
/**
* Set up a ZSL stream of a given resolution. Depth is the number of buffers
* cached within the stream that can be retrieved for input.
*/
Camera3ZslStream(int id, uint32_t width, uint32_t height, int depth);
+ ~Camera3ZslStream();
virtual status_t waitUntilIdle(nsecs_t timeout);
virtual void dump(int fd, const Vector<String16> &args) const;
- /**
- * Get an input buffer matching a specific timestamp. If no buffer matching
- * the timestamp is available, NO_MEMORY is returned.
- */
- status_t getInputBuffer(camera3_stream_buffer *buffer, nsecs_t timestamp);
+ enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE };
/**
- * Return input buffer from HAL. The buffer is then marked as unfilled, and
- * returned to the output-side stream for refilling.
+ * Locate a buffer matching this timestamp in the RingBufferConsumer,
+ * and mark it to be queued at the next getInputBufferLocked invocation.
+ *
+ * Errors: Returns NO_BUFFER_AVAILABLE if we could not find a match.
+ *
*/
- status_t returnInputBuffer(const camera3_stream_buffer &buffer);
+ status_t enqueueInputBufferByTimestamp(nsecs_t timestamp,
+ nsecs_t* actualTimestamp);
+
+ /**
+ * Clears the buffers that can be used by enqueueInputBufferByTimestamp
+ */
+ status_t clearInputRingBuffer();
+
+ /**
+ * Camera3OutputStreamInterface implementation
+ */
+ status_t setTransform(int transform);
private:
int mDepth;
+ // Input buffers pending to be queued into HAL
+ List<sp<RingBufferConsumer::PinnedBufferItem> > mInputBufferQueue;
+ sp<RingBufferConsumer> mProducer;
+ sp<ANativeWindow> mConsumer;
+
+ // Input buffers in flight to HAL
+ Vector<sp<RingBufferConsumer::PinnedBufferItem> > mBuffersInFlight;
+ size_t mTotalBufferCount;
+ // sum of input and output buffers that are currently acquired by HAL
+ size_t mDequeuedBufferCount;
+ Condition mBufferReturnedSignal;
+ uint32_t mFrameCount;
+ // Last received output buffer's timestamp
+ nsecs_t mLastTimestamp;
+
+ // The merged release fence for all returned buffers
+ sp<Fence> mCombinedFence;
/**
* Camera3Stream interface
@@ -67,9 +99,18 @@
virtual status_t getBufferLocked(camera3_stream_buffer *buffer);
virtual status_t returnBufferLocked(const camera3_stream_buffer &buffer,
nsecs_t timestamp);
+ // getInputBuffer/returnInputBuffer operate the input stream side of the
+ // ZslStream.
+ virtual status_t getInputBufferLocked(camera3_stream_buffer *buffer);
+ virtual status_t returnInputBufferLocked(
+ const camera3_stream_buffer &buffer);
+
virtual bool hasOutstandingBuffersLocked() const;
virtual status_t disconnectLocked();
+ virtual status_t configureQueueLocked();
+ virtual size_t getBufferCountLocked();
+
}; // class Camera3ZslStream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
new file mode 100644
index 0000000..1b2a717
--- /dev/null
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef NDEBUG
+#include <cassert>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RingBufferConsumer"
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+#include <utils/Log.h>
+
+#include <gui/RingBufferConsumer.h>
+
+#define BI_LOGV(x, ...) ALOGV("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGD(x, ...) ALOGD("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGI(x, ...) ALOGI("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGW(x, ...) ALOGW("[%s] "x, mName.string(), ##__VA_ARGS__)
+#define BI_LOGE(x, ...) ALOGE("[%s] "x, mName.string(), ##__VA_ARGS__)
+
+typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
+
+namespace android {
+
+RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage,
+ int bufferCount) :
+ ConsumerBase(new BufferQueue(true)),
+ mBufferCount(bufferCount)
+{
+ mBufferQueue->setConsumerUsageBits(consumerUsage);
+ mBufferQueue->setSynchronousMode(true);
+ mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+
+ assert(bufferCount > 0);
+}
+
+RingBufferConsumer::~RingBufferConsumer() {
+}
+
+void RingBufferConsumer::setName(const String8& name) {
+ Mutex::Autolock _l(mMutex);
+ mName = name;
+ mBufferQueue->setConsumerName(name);
+}
+
+sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
+ const RingBufferComparator& filter,
+ bool waitForFence) {
+
+ sp<PinnedBufferItem> pinnedBuffer;
+
+ {
+ List<RingBufferItem>::iterator it, end, accIt;
+ BufferInfo acc, cur;
+ BufferInfo* accPtr = NULL;
+
+ Mutex::Autolock _l(mMutex);
+
+ for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+ it != end;
+ ++it) {
+
+ const RingBufferItem& item = *it;
+
+ cur.mCrop = item.mCrop;
+ cur.mTransform = item.mTransform;
+ cur.mScalingMode = item.mScalingMode;
+ cur.mTimestamp = item.mTimestamp;
+ cur.mFrameNumber = item.mFrameNumber;
+ cur.mPinned = item.mPinCount > 0;
+
+ int ret = filter.compare(accPtr, &cur);
+
+ if (ret == 0) {
+ accPtr = NULL;
+ } else if (ret > 0) {
+ acc = cur;
+ accPtr = &acc;
+ accIt = it;
+ } // else acc = acc
+ }
+
+ if (!accPtr) {
+ return NULL;
+ }
+
+ pinnedBuffer = new PinnedBufferItem(this, *accIt);
+ pinBufferLocked(pinnedBuffer->getBufferItem());
+
+ } // end scope of mMutex autolock
+
+ if (pinnedBuffer != 0) {
+ BI_LOGV("Pinned buffer frame %lld, timestamp %lld",
+ pinnedBuffer->getBufferItem().mFrameNumber,
+ pinnedBuffer->getBufferItem().mTimestamp);
+ }
+
+ if (waitForFence) {
+ status_t err = pinnedBuffer->getBufferItem().mFence->waitForever(1000,
+ "RingBufferConsumer::pinSelectedBuffer");
+ if (err != OK) {
+ BI_LOGE("Failed to wait for fence of acquired buffer: %s (%d)",
+ strerror(-err), err);
+ }
+ }
+
+ return pinnedBuffer;
+}
+
+status_t RingBufferConsumer::clear() {
+
+ status_t err;
+ Mutex::Autolock _l(mMutex);
+
+ BI_LOGV("%s", __FUNCTION__);
+
+ // Avoid annoying log warnings by returning early
+ if (mBufferItemList.size() == 0) {
+ return OK;
+ }
+
+ do {
+ size_t pinnedFrames = 0;
+ err = releaseOldestBufferLocked(&pinnedFrames);
+
+ if (err == NO_BUFFER_AVAILABLE) {
+ assert(pinnedFrames == mBufferItemList.size());
+ break;
+ }
+
+ if (err == NOT_ENOUGH_DATA) {
+ // Fine. Empty buffer item list.
+ break;
+ }
+
+ if (err != OK) {
+ BI_LOGE("Clear failed, could not release buffer");
+ return err;
+ }
+
+ } while(true);
+
+ return OK;
+}
+
+void RingBufferConsumer::pinBufferLocked(const BufferItem& item) {
+ List<RingBufferItem>::iterator it, end;
+
+ for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+ it != end;
+ ++it) {
+
+ RingBufferItem& find = *it;
+ if (item.mGraphicBuffer == find.mGraphicBuffer) {
+ find.mPinCount++;
+ break;
+ }
+ }
+
+ if (it == end) {
+ BI_LOGE("Failed to pin buffer (timestamp %lld, framenumber %lld)",
+ item.mTimestamp, item.mFrameNumber);
+ }
+}
+
+status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) {
+ status_t err = OK;
+
+ List<RingBufferItem>::iterator it, end, accIt;
+
+ it = mBufferItemList.begin();
+ end = mBufferItemList.end();
+ accIt = it;
+
+ if (it == end) {
+ /**
+ * This is fine. We really care about being able to acquire a buffer
+ * successfully after this function completes, not about it releasing
+ * some buffer.
+ */
+ BI_LOGV("%s: No buffers yet acquired, can't release anything",
+ __FUNCTION__);
+ return NOT_ENOUGH_DATA;
+ }
+
+ for (; it != end; ++it) {
+ RingBufferItem& find = *it;
+ if (find.mTimestamp < accIt->mTimestamp && find.mPinCount <= 0) {
+ accIt = it;
+ }
+
+ if (find.mPinCount > 0 && pinnedFrames != NULL) {
+ ++(*pinnedFrames);
+ }
+ }
+
+ if (accIt != end) {
+ RingBufferItem& item = *accIt;
+
+ // In case the object was never pinned, pass the acquire fence
+ // back to the release fence. If the fence was already waited on,
+ // it'll just be a no-op to wait on it again.
+ err = addReleaseFenceLocked(item.mBuf, item.mFence);
+
+ if (err != OK) {
+ BI_LOGE("Failed to add release fence to buffer "
+ "(timestamp %lld, framenumber %lld",
+ item.mTimestamp, item.mFrameNumber);
+ return err;
+ }
+
+ BI_LOGV("Attempting to release buffer timestamp %lld, frame %lld",
+ item.mTimestamp, item.mFrameNumber);
+
+ err = releaseBufferLocked(item.mBuf,
+ EGL_NO_DISPLAY,
+ EGL_NO_SYNC_KHR);
+ if (err != OK) {
+ BI_LOGE("Failed to release buffer: %s (%d)",
+ strerror(-err), err);
+ return err;
+ }
+
+ BI_LOGV("Buffer timestamp %lld, frame %lld evicted",
+ item.mTimestamp, item.mFrameNumber);
+
+ size_t currentSize = mBufferItemList.size();
+ mBufferItemList.erase(accIt);
+ assert(mBufferItemList.size() == currentSize - 1);
+ } else {
+ BI_LOGW("All buffers pinned, could not find any to release");
+ return NO_BUFFER_AVAILABLE;
+
+ }
+
+ return OK;
+}
+
+void RingBufferConsumer::onFrameAvailable() {
+ status_t err;
+
+ {
+ Mutex::Autolock _l(mMutex);
+
+ /**
+ * Release oldest frame
+ */
+ if (mBufferItemList.size() >= (size_t)mBufferCount) {
+ err = releaseOldestBufferLocked(/*pinnedFrames*/NULL);
+ assert(err != NOT_ENOUGH_DATA);
+
+ // TODO: implement the case for NO_BUFFER_AVAILABLE
+ assert(err != NO_BUFFER_AVAILABLE);
+ if (err != OK) {
+ return;
+ }
+ // TODO: in unpinBuffer rerun this routine if we had buffers
+ // we could've locked but didn't because there was no space
+ }
+
+ RingBufferItem& item = *mBufferItemList.insert(mBufferItemList.end(),
+ RingBufferItem());
+
+ /**
+ * Acquire new frame
+ */
+ err = acquireBufferLocked(&item);
+ if (err != OK) {
+ if (err != NO_BUFFER_AVAILABLE) {
+ BI_LOGE("Error acquiring buffer: %s (%d)", strerror(err), err);
+ }
+
+ mBufferItemList.erase(--mBufferItemList.end());
+ return;
+ }
+
+ BI_LOGV("New buffer acquired (timestamp %lld), "
+ "buffer items %u out of %d",
+ item.mTimestamp,
+ mBufferItemList.size(), mBufferCount);
+
+ item.mGraphicBuffer = mSlots[item.mBuf].mGraphicBuffer;
+ } // end of mMutex lock
+
+ ConsumerBase::onFrameAvailable();
+}
+
+void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
+ Mutex::Autolock _l(mMutex);
+
+ List<RingBufferItem>::iterator it, end, accIt;
+
+ for (it = mBufferItemList.begin(), end = mBufferItemList.end();
+ it != end;
+ ++it) {
+
+ RingBufferItem& find = *it;
+ if (item.mGraphicBuffer == find.mGraphicBuffer) {
+ status_t res = addReleaseFenceLocked(item.mBuf, item.mFence);
+
+ if (res != OK) {
+ BI_LOGE("Failed to add release fence to buffer "
+ "(timestamp %lld, framenumber %lld",
+ item.mTimestamp, item.mFrameNumber);
+ return;
+ }
+
+ find.mPinCount--;
+ break;
+ }
+ }
+
+ if (it == end) {
+ BI_LOGE("Failed to unpin buffer (timestamp %lld, framenumber %lld",
+ item.mTimestamp, item.mFrameNumber);
+ }
+}
+
+status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
+ Mutex::Autolock _l(mMutex);
+ return mBufferQueue->setDefaultBufferSize(w, h);
+}
+
+status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
+ Mutex::Autolock _l(mMutex);
+ return mBufferQueue->setDefaultBufferFormat(defaultFormat);
+}
+
+status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) {
+ Mutex::Autolock _l(mMutex);
+ return mBufferQueue->setConsumerUsageBits(usage);
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
new file mode 100644
index 0000000..454fbae
--- /dev/null
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H
+#define ANDROID_GUI_RINGBUFFERCONSUMER_H
+
+#include <gui/ConsumerBase.h>
+
+#include <ui/GraphicBuffer.h>
+
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include <utils/threads.h>
+#include <utils/List.h>
+
+#define ANDROID_GRAPHICS_RINGBUFFERCONSUMER_JNI_ID "mRingBufferConsumer"
+
+namespace android {
+
+/**
+ * The RingBufferConsumer maintains a ring buffer of BufferItem objects,
+ * (which are 'acquired' as long as they are part of the ring buffer, and
+ * 'released' when they leave the ring buffer).
+ *
+ * When new buffers are produced, the oldest non-pinned buffer item is immediately
+ * dropped from the ring buffer, and overridden with the newest buffer.
+ *
+ * Users can only access a buffer item after pinning it (which also guarantees
+ * that during its duration it will not be released back into the BufferQueue).
+ *
+ * Note that the 'oldest' buffer is the one with the smallest timestamp.
+ *
+ * Edge cases:
+ * - If ringbuffer is not full, no drops occur when a buffer is produced.
+ * - If all the buffers get filled or pinned then there will be no empty
+ * buffers left, so the producer will block on dequeue.
+ */
+class RingBufferConsumer : public ConsumerBase,
+ public ConsumerBase::FrameAvailableListener
+{
+ public:
+ typedef ConsumerBase::FrameAvailableListener FrameAvailableListener;
+
+ typedef BufferQueue::BufferItem BufferItem;
+
+ enum { INVALID_BUFFER_SLOT = BufferQueue::INVALID_BUFFER_SLOT };
+ enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE };
+
+ // Create a new ring buffer consumer. The consumerUsage parameter determines
+ // the consumer usage flags passed to the graphics allocator. The
+ // bufferCount parameter specifies how many buffers can be pinned for user
+ // access at the same time.
+ RingBufferConsumer(uint32_t consumerUsage,
+ int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS);
+
+ virtual ~RingBufferConsumer();
+
+ // set the name of the RingBufferConsumer that will be used to identify it in
+ // log messages.
+ void setName(const String8& name);
+
+ sp<IGraphicBufferProducer> getProducerInterface() const { return getBufferQueue(); }
+
+ // setDefaultBufferSize is used to set the size of buffers returned by
+ // requestBuffers when a with and height of zero is requested.
+ status_t setDefaultBufferSize(uint32_t w, uint32_t h);
+
+ // setDefaultBufferFormat allows the BufferQueue to create
+ // GraphicBuffers of a defaultFormat if no format is specified
+ // by the producer endpoint.
+ status_t setDefaultBufferFormat(uint32_t defaultFormat);
+
+ // setConsumerUsage allows the BufferQueue consumer usage to be
+ // set at a later time after construction.
+ status_t setConsumerUsage(uint32_t usage);
+
+ // Buffer info, minus the graphics buffer/slot itself.
+ struct BufferInfo {
+ // mCrop is the current crop rectangle for this buffer slot.
+ Rect mCrop;
+
+ // mTransform is the current transform flags for this buffer slot.
+ uint32_t mTransform;
+
+ // mScalingMode is the current scaling mode for this buffer slot.
+ uint32_t mScalingMode;
+
+ // mTimestamp is the current timestamp for this buffer slot. This gets
+ // to set by queueBuffer each time this slot is queued.
+ int64_t mTimestamp;
+
+ // mFrameNumber is the number of the queued frame for this slot.
+ uint64_t mFrameNumber;
+
+ // mPinned is whether or not the buffer has been pinned already.
+ bool mPinned;
+ };
+
+ struct RingBufferComparator {
+ // Return < 0 to select i1, > 0 to select i2, 0 for neither
+ // i1 or i2 can be NULL.
+ //
+ // The comparator has to implement a total ordering. Otherwise
+ // a linear scan won't find the most preferred buffer.
+ virtual int compare(const BufferInfo* i1,
+ const BufferInfo* i2) const = 0;
+
+ virtual ~RingBufferComparator() {}
+ };
+
+ struct PinnedBufferItem : public LightRefBase<PinnedBufferItem> {
+ PinnedBufferItem(wp<RingBufferConsumer> consumer,
+ const BufferItem& item) :
+ mConsumer(consumer),
+ mBufferItem(item) {
+ }
+
+ ~PinnedBufferItem() {
+ sp<RingBufferConsumer> consumer = mConsumer.promote();
+ if (consumer != NULL) {
+ consumer->unpinBuffer(mBufferItem);
+ }
+ }
+
+ bool isEmpty() {
+ return mBufferItem.mBuf == BufferQueue::INVALID_BUFFER_SLOT;
+ }
+
+ BufferItem& getBufferItem() { return mBufferItem; }
+ const BufferItem& getBufferItem() const { return mBufferItem; }
+
+ private:
+ wp<RingBufferConsumer> mConsumer;
+ BufferItem mBufferItem;
+ };
+
+ // Find a buffer using the filter, then pin it before returning it.
+ //
+ // The filter will be invoked on each buffer item in the ring buffer,
+ // passing the item that was selected from each previous iteration,
+ // as well as the current iteration's item.
+ //
+ // Pinning will ensure that the buffer will not be dropped when a new
+ // frame is available.
+ sp<PinnedBufferItem> pinSelectedBuffer(const RingBufferComparator& filter,
+ bool waitForFence = true);
+
+ // Release all the non-pinned buffers in the ring buffer
+ status_t clear();
+
+ private:
+
+ // Override ConsumerBase::onFrameAvailable
+ virtual void onFrameAvailable();
+
+ void pinBufferLocked(const BufferItem& item);
+ void unpinBuffer(const BufferItem& item);
+
+ // Releases oldest buffer. Returns NO_BUFFER_AVAILABLE
+ // if all the buffers were pinned.
+ // Returns NOT_ENOUGH_DATA if list was empty.
+ status_t releaseOldestBufferLocked(size_t* pinnedFrames);
+
+ struct RingBufferItem : public BufferItem {
+ RingBufferItem() : BufferItem(), mPinCount(0) {}
+ int mPinCount;
+ };
+
+ // List of acquired buffers in our ring buffer
+ List<RingBufferItem> mBufferItemList;
+ const int mBufferCount;
+};
+
+} // namespace android
+
+#endif // ANDROID_GUI_CPUCONSUMER_H