Camera2: Compile with warnings, fix warnings
Change-Id: I557350abb32b0480f5da7dcecadfbe9edbe53361
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index eff47c8..801afe9 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -40,6 +40,9 @@
system/media/camera/include \
external/jpeg
+
+LOCAL_CFLAGS += -Wall -Wextra
+
LOCAL_MODULE:= libcameraservice
include $(BUILD_SHARED_LIBRARY)
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/Camera2Client.cpp
index e59a240..9627416 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/Camera2Client.cpp
@@ -37,10 +37,6 @@
return IPCThreadState::self()->getCallingPid();
}
-static int getCallingUid() {
- return IPCThreadState::self()->getCallingUid();
-}
-
// Interface used by CameraService
Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
@@ -370,7 +366,6 @@
void Camera2Client::disconnect() {
ATRACE_CALL();
Mutex::Autolock icl(mICameraLock);
- status_t res;
// Allow both client and the media server to disconnect at all times
int callingPid = getCallingPid();
@@ -575,7 +570,7 @@
ATRACE_CALL();
ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
Mutex::Autolock icl(mICameraLock);
- status_t res;
+
if ( checkPid(__FUNCTION__) != OK) return;
SharedParameters::Lock l(mParameters);
@@ -1062,7 +1057,7 @@
return OK;
}
-status_t Camera2Client::takePicture(int msgType) {
+status_t Camera2Client::takePicture(int /*msgType*/) {
ATRACE_CALL();
Mutex::Autolock icl(mICameraLock);
status_t res;
@@ -1244,7 +1239,7 @@
return OK;
}
-status_t Camera2Client::commandStartFaceDetectionL(int type) {
+status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
ALOGV("%s: Camera %d: Starting face detection",
__FUNCTION__, mCameraId);
status_t res;
@@ -1331,6 +1326,8 @@
}
void Camera2Client::notifyShutter(int frameNumber, nsecs_t timestamp) {
+ (void)frameNumber;
+ (void)timestamp;
ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__,
frameNumber, timestamp);
}
@@ -1452,6 +1449,8 @@
}
void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
+ (void)newState;
+ (void)triggerId;
ALOGV("%s: Auto-whitebalance state now %d, last trigger %d",
__FUNCTION__, newState, triggerId);
}
diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/Camera2Device.cpp
index d6445c1..5bfa085 100644
--- a/services/camera/libcameraservice/Camera2Device.cpp
+++ b/services/camera/libcameraservice/Camera2Device.cpp
@@ -765,7 +765,6 @@
ATRACE_CALL();
ALOGV("%s: E", __FUNCTION__);
Mutex::Autolock l(mMutex);
- status_t res;
if (mStreamSlotCount > 0) {
freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
@@ -785,7 +784,7 @@
}
status_t Camera2Device::MetadataQueue::dump(int fd,
- const Vector<String16>& args) {
+ const Vector<String16>& /*args*/) {
ATRACE_CALL();
String8 result;
status_t notLocked;
@@ -894,12 +893,13 @@
{
ATRACE_CALL();
MetadataQueue *queue = getInstance(q);
+ (void)queue;
free_camera_metadata(old_buffer);
return OK;
}
int Camera2Device::MetadataQueue::producer_dequeue(
- const camera2_frame_queue_dst_ops_t *q,
+ const camera2_frame_queue_dst_ops_t * /*q*/,
size_t entries, size_t bytes,
camera_metadata_t **buffer)
{
@@ -912,7 +912,7 @@
}
int Camera2Device::MetadataQueue::producer_cancel(
- const camera2_frame_queue_dst_ops_t *q,
+ const camera2_frame_queue_dst_ops_t * /*q*/,
camera_metadata_t *old_buffer)
{
ATRACE_CALL();
@@ -1184,7 +1184,7 @@
}
status_t Camera2Device::StreamAdapter::dump(int fd,
- const Vector<String16>& args) {
+ const Vector<String16>& /*args*/) {
ATRACE_CALL();
String8 result = String8::format(" Stream %d: %d x %d, format 0x%x\n",
mId, mWidth, mHeight, mFormat);
@@ -1423,7 +1423,7 @@
}
status_t Camera2Device::ReprocessStreamAdapter::dump(int fd,
- const Vector<String16>& args) {
+ const Vector<String16>& /*args*/) {
ATRACE_CALL();
String8 result =
String8::format(" Reprocess stream %d: %d x %d, fmt 0x%x\n",
@@ -1444,7 +1444,7 @@
const camera2_stream_in_ops_t *w,
buffer_handle_t** buffer) {
ATRACE_CALL();
- int res;
+
ReprocessStreamAdapter* stream =
const_cast<ReprocessStreamAdapter*>(
static_cast<const ReprocessStreamAdapter*>(w));
diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/CameraClient.cpp
index b930c02..006a9c9 100644
--- a/services/camera/libcameraservice/CameraClient.cpp
+++ b/services/camera/libcameraservice/CameraClient.cpp
@@ -34,10 +34,6 @@
return IPCThreadState::self()->getCallingPid();
}
-static int getCallingUid() {
- return IPCThreadState::self()->getCallingUid();
-}
-
CameraClient::CameraClient(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
int cameraId, int cameraFacing, int clientPid, int servicePid):
diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/CameraHardwareInterface.h
index 05ac9fa..167b37c 100644
--- a/services/camera/libcameraservice/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/CameraHardwareInterface.h
@@ -427,7 +427,7 @@
/**
* Dump state of the camera hardware
*/
- status_t dump(int fd, const Vector<String16>& args) const
+ status_t dump(int fd, const Vector<String16>& /*args*/) const
{
ALOGV("%s(%s)", __FUNCTION__, mName.string());
if (mDevice->ops->dump)
@@ -584,9 +584,10 @@
#endif
static int __lock_buffer(struct preview_stream_ops* w,
- buffer_handle_t* buffer)
+ buffer_handle_t* /*buffer*/)
{
ANativeWindow *a = anw(w);
+ (void)a;
return 0;
}
diff --git a/services/camera/libcameraservice/camera2/BurstCapture.cpp b/services/camera/libcameraservice/camera2/BurstCapture.cpp
index f56c50c..192d419 100644
--- a/services/camera/libcameraservice/camera2/BurstCapture.cpp
+++ b/services/camera/libcameraservice/camera2/BurstCapture.cpp
@@ -38,7 +38,8 @@
BurstCapture::~BurstCapture() {
}
-status_t BurstCapture::start(Vector<CameraMetadata> &metadatas, int32_t firstCaptureId) {
+status_t BurstCapture::start(Vector<CameraMetadata> &/*metadatas*/,
+ int32_t /*firstCaptureId*/) {
ALOGE("Not completely implemented");
return INVALID_OPERATION;
}
@@ -75,7 +76,7 @@
CpuConsumer::LockedBuffer* BurstCapture::jpegEncode(
CpuConsumer::LockedBuffer *imgBuffer,
- int quality)
+ int /*quality*/)
{
ALOGV("%s", __FUNCTION__);
@@ -91,7 +92,7 @@
buffers.push_back(imgEncoded);
sp<JpegCompressor> jpeg = new JpegCompressor();
- status_t res = jpeg->start(buffers, 1);
+ jpeg->start(buffers, 1);
bool success = jpeg->waitForDone(10 * 1e9);
if(success) {
@@ -103,7 +104,7 @@
}
}
-status_t BurstCapture::processFrameAvailable(sp<Camera2Client> &client) {
+status_t BurstCapture::processFrameAvailable(sp<Camera2Client> &/*client*/) {
ALOGE("Not implemented");
return INVALID_OPERATION;
}
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
index 3e9c255..307cfab 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
@@ -119,7 +119,6 @@
status_t CallbackProcessor::deleteStream() {
ATRACE_CALL();
- status_t res;
Mutex::Autolock l(mInputMutex);
@@ -144,7 +143,7 @@
return mCallbackStreamId;
}
-void CallbackProcessor::dump(int fd, const Vector<String16>& args) const {
+void CallbackProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
}
bool CallbackProcessor::threadLoop() {
@@ -173,7 +172,6 @@
ATRACE_CALL();
status_t res;
- int callbackHeapId;
sp<Camera2Heap> callbackHeap;
size_t heapIdx;
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
index fe4abc0..b228faf 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
@@ -128,7 +128,7 @@
}
-void CaptureSequencer::dump(int fd, const Vector<String16>& args) {
+void CaptureSequencer::dump(int fd, const Vector<String16>& /*args*/) {
String8 result;
if (mCaptureRequest.entryCount() != 0) {
result = " Capture request:\n";
@@ -182,7 +182,6 @@
};
bool CaptureSequencer::threadLoop() {
- status_t res;
sp<Camera2Client> client = mClient.promote();
if (client == 0) return false;
@@ -213,7 +212,8 @@
return true;
}
-CaptureSequencer::CaptureState CaptureSequencer::manageIdle(sp<Camera2Client> &client) {
+CaptureSequencer::CaptureState CaptureSequencer::manageIdle(
+ sp<Camera2Client> &/*client*/) {
status_t res;
Mutex::Autolock l(mInputMutex);
while (!mStartCapture) {
@@ -350,13 +350,13 @@
}
CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting(
- sp<Camera2Client> &client) {
+ sp<Camera2Client> &/*client*/) {
ALOGV("%s", __FUNCTION__);
return DONE;
}
CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing(
- sp<Camera2Client> &client) {
+ sp<Camera2Client> &/*client*/) {
ALOGV("%s", __FUNCTION__);
return START;
}
@@ -378,7 +378,7 @@
}
CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait(
- sp<Camera2Client> &client) {
+ sp<Camera2Client> &/*client*/) {
status_t res;
ATRACE_CALL();
Mutex::Autolock l(mInputMutex);
@@ -578,7 +578,7 @@
}
CaptureSequencer::CaptureState CaptureSequencer::manageBurstCaptureWait(
- sp<Camera2Client> &client) {
+ sp<Camera2Client> &/*client*/) {
status_t res;
ATRACE_CALL();
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
index 064607c..e032522 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/FrameProcessor.cpp
@@ -62,7 +62,7 @@
return OK;
}
-void FrameProcessor::dump(int fd, const Vector<String16>& args) {
+void FrameProcessor::dump(int fd, const Vector<String16>& /*args*/) {
String8 result(" Latest received frame:\n");
write(fd, result.string(), result.size());
mLastFrame.dump(fd, 2, 6);
@@ -128,7 +128,6 @@
status_t FrameProcessor::processListeners(const CameraMetadata &frame,
sp<Camera2Client> &client) {
- status_t res;
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
@@ -173,7 +172,7 @@
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
bool enableFaceDetect;
- int maxFaces;
+
{
SharedParameters::Lock l(client->getParameters());
enableFaceDetect = l.mParameters.enableFaceDetect;
diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.cpp b/services/camera/libcameraservice/camera2/JpegCompressor.cpp
index 702ef58..c9af71e 100644
--- a/services/camera/libcameraservice/camera2/JpegCompressor.cpp
+++ b/services/camera/libcameraservice/camera2/JpegCompressor.cpp
@@ -144,7 +144,7 @@
}
// old function -- TODO: update for new buffer type
-bool JpegCompressor::isStreamInUse(uint32_t id) {
+bool JpegCompressor::isStreamInUse(uint32_t /*id*/) {
ALOGV("%s", __FUNCTION__);
Mutex::Autolock lock(mBusyMutex);
@@ -203,14 +203,14 @@
dest->free_in_buffer = kMaxJpegSize;
}
-boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
+boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr /*cinfo*/) {
ALOGV("%s", __FUNCTION__);
ALOGE("%s: JPEG destination buffer overflow!",
__FUNCTION__);
return true;
}
-void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
+void JpegCompressor::jpegTermDestination(j_compress_ptr /*cinfo*/) {
ALOGV("%s", __FUNCTION__);
ALOGV("%s: Done writing JPEG data. %d bytes left in buffer",
__FUNCTION__, cinfo->dest->free_in_buffer);
diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
index ffc072b..6280f83 100644
--- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/JpegProcessor.cpp
@@ -139,7 +139,6 @@
status_t JpegProcessor::deleteStream() {
ATRACE_CALL();
- status_t res;
Mutex::Autolock l(mInputMutex);
@@ -164,7 +163,7 @@
return mCaptureStreamId;
}
-void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
+void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
}
bool JpegProcessor::threadLoop() {
@@ -356,7 +355,7 @@
// Find End of Image
// Scan JPEG buffer until End of Image (EOI)
bool foundEnd = false;
- for (size; size <= maxSize - MARKER_LENGTH; size++) {
+ for ( ; size <= maxSize - MARKER_LENGTH; size++) {
if ( checkJpegEnd(jpegBuffer + size) ) {
foundEnd = true;
size += MARKER_LENGTH;
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/camera2/Parameters.cpp
index e9e5e79..93927e6 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/camera2/Parameters.cpp
@@ -951,7 +951,6 @@
camera_metadata_ro_entry_t Parameters::staticInfo(uint32_t tag,
size_t minCount, size_t maxCount) const {
- status_t res;
camera_metadata_ro_entry_t entry = info->find(tag);
if (CC_UNLIKELY( entry.count == 0 )) {
@@ -2430,7 +2429,7 @@
return crop;
}
-int32_t Parameters::fpsFromRange(int32_t min, int32_t max) const {
+int32_t Parameters::fpsFromRange(int32_t /*min*/, int32_t max) const {
return max;
}
diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
index 207f780..6ea27b2 100644
--- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
@@ -447,7 +447,6 @@
ATRACE_CALL();
Mutex::Autolock m(mMutex);
- status_t res;
mPreviewRequestId++;
if (mPreviewRequestId >= Camera2Client::kPreviewRequestIdEnd) {
mPreviewRequestId = Camera2Client::kPreviewRequestIdStart;
@@ -628,7 +627,7 @@
}
-status_t StreamingProcessor::dump(int fd, const Vector<String16>& args) {
+status_t StreamingProcessor::dump(int fd, const Vector<String16>& /*args*/) {
String8 result;
result.append(" Current requests:\n");
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
index 1937955..9584028 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
@@ -69,11 +69,12 @@
}
}
-void ZslProcessor::onFrameAvailable(int32_t frameId, const CameraMetadata &frame) {
+void ZslProcessor::onFrameAvailable(int32_t /*frameId*/, const CameraMetadata &frame) {
Mutex::Autolock l(mInputMutex);
camera_metadata_ro_entry_t entry;
entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
nsecs_t timestamp = entry.data.i64[0];
+ (void)timestamp;
ALOGVV("Got preview frame for timestamp %lld", timestamp);
if (mState != RUNNING) return;
@@ -367,7 +368,7 @@
return OK;
}
-void ZslProcessor::dump(int fd, const Vector<String16>& args) const {
+void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
Mutex::Autolock l(mInputMutex);
if (!mLatestCapturedRequest.isEmpty()) {
String8 result(" Latest ZSL capture request:\n");