merge in ics-mr0-release history after reset to ics-mr0
diff --git a/jni/feature_mos/src/mosaic/Blend.cpp b/jni/feature_mos/src/mosaic/Blend.cpp
index 1f3d93c..18972ee 100644
--- a/jni/feature_mos/src/mosaic/Blend.cpp
+++ b/jni/feature_mos/src/mosaic/Blend.cpp
@@ -416,40 +416,49 @@
// the two component images,
int tw = STRIP_CROSS_FADE_WIDTH * width;
- for(int y = 0; y < imgMos.Y.height; y++)
+ // Proceed with the image index calculation for cross-fading
+ // only if the cross-fading width is larger than 0
+ if (tw > 0)
{
- for(int x = tw; x < imgMos.Y.width - tw + 1; )
+ for(int y = 0; y < imgMos.Y.height; y++)
{
- // Determine where the seam is...
- if (imgMos.Y.ptr[y][x] != imgMos.Y.ptr[y][x+1] &&
- imgMos.Y.ptr[y][x] != 255 &&
- imgMos.Y.ptr[y][x+1] != 255)
+ // Since we compare two adjecant pixels to determine
+ // whether there is a seam, the termination condition of x
+ // is set to imgMos.Y.width - tw, so that x+1 below
+ // won't exceed the imgMos' boundary.
+ for(int x = tw; x < imgMos.Y.width - tw; )
{
- // Find the image indices on both sides of the seam
- unsigned char idx1 = imgMos.Y.ptr[y][x];
- unsigned char idx2 = imgMos.Y.ptr[y][x+1];
-
- for (int o = tw; o >= 0; o--)
+ // Determine where the seam is...
+ if (imgMos.Y.ptr[y][x] != imgMos.Y.ptr[y][x+1] &&
+ imgMos.Y.ptr[y][x] != 255 &&
+ imgMos.Y.ptr[y][x+1] != 255)
{
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y][x - o] = idx2;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y][x - o] = 50 + (99 - 50) * o / tw;
- }
+ // Find the image indices on both sides of the seam
+ unsigned char idx1 = imgMos.Y.ptr[y][x];
+ unsigned char idx2 = imgMos.Y.ptr[y][x+1];
- for (int o = 1; o <= tw; o++)
+ for (int o = tw; o >= 0; o--)
+ {
+ // Set the image index to use for cross-fading
+ imgMos.V.ptr[y][x - o] = idx2;
+ // Set the intensity weights to use for cross-fading
+ imgMos.U.ptr[y][x - o] = 50 + (99 - 50) * o / tw;
+ }
+
+ for (int o = 1; o <= tw; o++)
+ {
+ // Set the image index to use for cross-fading
+ imgMos.V.ptr[y][x + o] = idx1;
+ // Set the intensity weights to use for cross-fading
+ imgMos.U.ptr[y][x + o] = imgMos.U.ptr[y][x - o];
+ }
+
+ x += (tw + 1);
+ }
+ else
{
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y][x + o] = idx1;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y][x + o] = imgMos.U.ptr[y][x - o];
+ x++;
}
-
- x += (tw + 1);
- }
- else
- {
- x++;
}
}
}
@@ -460,40 +469,49 @@
// the two component images,
int tw = STRIP_CROSS_FADE_WIDTH * height;
- for(int x = 0; x < imgMos.Y.width; x++)
+ // Proceed with the image index calculation for cross-fading
+ // only if the cross-fading width is larger than 0
+ if (tw > 0)
{
- for(int y = tw; y < imgMos.Y.height - tw + 1; )
+ for(int x = 0; x < imgMos.Y.width; x++)
{
- // Determine where the seam is...
- if (imgMos.Y.ptr[y][x] != imgMos.Y.ptr[y+1][x] &&
- imgMos.Y.ptr[y][x] != 255 &&
- imgMos.Y.ptr[y+1][x] != 255)
+ // Since we compare two adjecant pixels to determine
+ // whether there is a seam, the termination condition of y
+ // is set to imgMos.Y.height - tw, so that y+1 below
+ // won't exceed the imgMos' boundary.
+ for(int y = tw; y < imgMos.Y.height - tw; )
{
- // Find the image indices on both sides of the seam
- unsigned char idx1 = imgMos.Y.ptr[y][x];
- unsigned char idx2 = imgMos.Y.ptr[y+1][x];
-
- for (int o = tw; o >= 0; o--)
+ // Determine where the seam is...
+ if (imgMos.Y.ptr[y][x] != imgMos.Y.ptr[y+1][x] &&
+ imgMos.Y.ptr[y][x] != 255 &&
+ imgMos.Y.ptr[y+1][x] != 255)
{
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y - o][x] = idx2;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y - o][x] = 50 + (99 - 50) * o / tw;
- }
+ // Find the image indices on both sides of the seam
+ unsigned char idx1 = imgMos.Y.ptr[y][x];
+ unsigned char idx2 = imgMos.Y.ptr[y+1][x];
- for (int o = 1; o <= tw; o++)
+ for (int o = tw; o >= 0; o--)
+ {
+ // Set the image index to use for cross-fading
+ imgMos.V.ptr[y - o][x] = idx2;
+ // Set the intensity weights to use for cross-fading
+ imgMos.U.ptr[y - o][x] = 50 + (99 - 50) * o / tw;
+ }
+
+ for (int o = 1; o <= tw; o++)
+ {
+ // Set the image index to use for cross-fading
+ imgMos.V.ptr[y + o][x] = idx1;
+ // Set the intensity weights to use for cross-fading
+ imgMos.U.ptr[y + o][x] = imgMos.U.ptr[y - o][x];
+ }
+
+ y += (tw + 1);
+ }
+ else
{
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y + o][x] = idx1;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y + o][x] = imgMos.U.ptr[y - o][x];
+ y++;
}
-
- y += (tw + 1);
- }
- else
- {
- y++;
}
}
}
diff --git a/jni/feature_mos/src/mosaic/Blend.h b/jni/feature_mos/src/mosaic/Blend.h
index a5bc05b..80bb577 100644
--- a/jni/feature_mos/src/mosaic/Blend.h
+++ b/jni/feature_mos/src/mosaic/Blend.h
@@ -42,7 +42,7 @@
// This threshold determines the number of pixels on either side of the strip
// to cross-fade using the images contributing to each seam. This threshold
// is specified as a fraction of the input image frame width.
-const float STRIP_CROSS_FADE_WIDTH = 0.02;
+const float STRIP_CROSS_FADE_WIDTH = 0.002;
/**
* Class for pyramid blending a mosaic.
*/
diff --git a/proguard.flags b/proguard.flags
index 39948cc..5bce096 100644
--- a/proguard.flags
+++ b/proguard.flags
@@ -10,6 +10,8 @@
-keep class com.android.camera.VideoCamera {
public boolean isRecording();
+ public void onCancelBgTraining(...);
+ public void onProtectiveCurtainClick(...);
}
-keep class * extends android.app.Activity {
diff --git a/res/raw/backdropper.graph b/res/raw/backdropper.graph
index 6a3f641..3a48fc4 100644
--- a/res/raw/backdropper.graph
+++ b/res/raw/backdropper.graph
@@ -29,6 +29,7 @@
@external recordingWidth;
@external recordingHeight;
@external recordingProfile;
+@external recordingDoneListener;
@external audioSource;
@external previewSurface;
@@ -74,6 +75,7 @@
@filter MediaEncoderFilter recorder {
audioSource = $audioSource;
recordingProfile = $recordingProfile;
+ recordingDoneListener = $recordingDoneListener;
recording = false;
// outputFile, orientationHint, inputRegion, listeners
// will be set when recording starts
diff --git a/res/raw/goofy_face.graph b/res/raw/goofy_face.graph
index fe2c5d9..7145033 100644
--- a/res/raw/goofy_face.graph
+++ b/res/raw/goofy_face.graph
@@ -31,6 +31,7 @@
@external recordingWidth;
@external recordingHeight;
@external recordingProfile;
+@external recordingDoneListener;
@external audioSource;
@external previewSurface;
@@ -98,6 +99,7 @@
@filter MediaEncoderFilter recorder {
audioSource = $audioSource;
recordingProfile = $recordingProfile;
+ recordingDoneListener = $recordingDoneListener;
recording = false;
// outputFile, orientationHint, inputRegion, listeners
// will be set when recording starts
diff --git a/src/com/android/camera/Camera.java b/src/com/android/camera/Camera.java
index 79ab74f..8cc5816 100644
--- a/src/com/android/camera/Camera.java
+++ b/src/com/android/camera/Camera.java
@@ -1236,9 +1236,13 @@
private void checkStorage() {
mPicturesRemaining = Storage.getAvailableSpace();
- if (mPicturesRemaining > 0) {
- mPicturesRemaining /= 1500000;
+ if (mPicturesRemaining > Storage.LOW_STORAGE_THRESHOLD) {
+ mPicturesRemaining = (mPicturesRemaining - Storage.LOW_STORAGE_THRESHOLD)
+ / Storage.PICTURE_SIZE;
+ } else if (mPicturesRemaining > 0) {
+ mPicturesRemaining = 0;
}
+
updateStorageHint();
}
diff --git a/src/com/android/camera/EffectsRecorder.java b/src/com/android/camera/EffectsRecorder.java
index 9b3e3b1..bd0aff5 100644
--- a/src/com/android/camera/EffectsRecorder.java
+++ b/src/com/android/camera/EffectsRecorder.java
@@ -25,9 +25,10 @@
import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
import android.filterfw.geometry.Point;
import android.filterfw.geometry.Quad;
-import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
import android.filterpacks.videoproc.BackDropperFilter;
import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
+import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
+import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
@@ -46,6 +47,7 @@
import java.io.FileNotFoundException;
import java.io.File;
import java.lang.Runnable;
+import java.io.FileDescriptor;
/**
@@ -70,6 +72,7 @@
public static final int EFFECT_MSG_DONE_LEARNING = 1;
public static final int EFFECT_MSG_SWITCHING_EFFECT = 2;
public static final int EFFECT_MSG_EFFECTS_STOPPED = 3;
+ public static final int EFFECT_MSG_RECORDING_DONE = 4;
private Context mContext;
private Handler mHandler;
@@ -84,7 +87,9 @@
private MediaRecorder.OnErrorListener mErrorListener;
private String mOutputFile;
+ private FileDescriptor mFd;
private int mOrientationHint = 0;
+ private long mMaxFileSize = 0;
private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
private int mEffect = EFFECT_NONE;
@@ -110,7 +115,7 @@
private static final int STATE_RELEASED = 4;
private int mState = STATE_CONFIGURE;
- private boolean mLogVerbose = true; //Log.isLoggable(TAG, Log.VERBOSE);
+ private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
private static final String TAG = "effectsrecorder";
/** Determine if a given effect is supported at runtime
@@ -193,6 +198,31 @@
}
mOutputFile = outputFile;
+ mFd = null;
+ }
+
+ public void setOutputFile(FileDescriptor fd) {
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setOutputFile cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setOutputFile called on an already released recorder!");
+ default:
+ break;
+ }
+
+ mOutputFile = null;
+ mFd = fd;
+ }
+
+ /**
+ * Sets the maximum filesize (in bytes) of the recording session.
+ * This will be passed on to the MediaEncoderFilter and then to the
+ * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
+ * disable the limit
+ */
+ public synchronized void setMaxFileSize(long maxFileSize) {
+ mMaxFileSize = maxFileSize;
}
public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
@@ -353,7 +383,8 @@
"recordingHeight", mProfile.videoFrameHeight,
"recordingProfile", mProfile,
"audioSource", MediaRecorder.AudioSource.CAMCORDER,
- "learningDoneListener", mLearningDoneListener);
+ "learningDoneListener", mLearningDoneListener,
+ "recordingDoneListener", mRecordingDoneListener);
mRunner = null;
mGraphId = -1;
@@ -525,6 +556,16 @@
}
};
+ // A callback to finalize the media after the recording is done.
+ private OnRecordingDoneListener mRecordingDoneListener =
+ new OnRecordingDoneListener() {
+ // Forward the callback to the VideoCamera object (as an asynchronous event).
+ public void onRecordingDone() {
+ if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
+ sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
+ }
+ };
+
public synchronized void startRecording() {
if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
@@ -537,15 +578,21 @@
break;
}
- if (mOutputFile == null) {
- throw new RuntimeException("No output file name provided!");
+ if ((mOutputFile == null) && (mFd == null)) {
+ throw new RuntimeException("No output file name or descriptor provided!");
}
if (mState == STATE_CONFIGURE) {
startPreview();
}
+
Filter recorder = mRunner.getGraph().getFilter("recorder");
- recorder.setInputValue("outputFile", mOutputFile);
+ if (mFd != null) {
+ recorder.setInputValue("outputFileDescriptor", mFd);
+ } else {
+ recorder.setInputValue("outputFile", mOutputFile);
+ }
+
recorder.setInputValue("orientationHint", mOrientationHint);
if (mInfoListener != null) {
recorder.setInputValue("infoListener", mInfoListener);
@@ -555,6 +602,7 @@
}
recorder.setInputValue("recording", true);
if (mRecordSound != null) mRecordSound.play();
+ recorder.setInputValue("maxFileSize", mMaxFileSize);
mState = STATE_RECORD;
}
@@ -684,6 +732,7 @@
} else if (mState != STATE_RELEASED) {
// Shutting down effects
if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
+ tryEnable3ALocks(false);
sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
} else {
// STATE_RELEASED - camera will be/has been released as well, do nothing.
@@ -722,9 +771,14 @@
if (mEffectsListener != null) {
mHandler.post(new Runnable() {
public void run() {
- mEffectsListener.onEffectsError(exception, mOutputFile);
+ if (mFd != null) {
+ mEffectsListener.onEffectsError(exception, null);
+ } else {
+ mEffectsListener.onEffectsError(exception, mOutputFile);
+ }
}
});
}
}
+
}
diff --git a/src/com/android/camera/Storage.java b/src/com/android/camera/Storage.java
index a9f67c9..564c088 100644
--- a/src/com/android/camera/Storage.java
+++ b/src/com/android/camera/Storage.java
@@ -44,6 +44,8 @@
public static final long UNAVAILABLE = -1L;
public static final long PREPARING = -2L;
public static final long UNKNOWN_SIZE = -3L;
+ public static final long LOW_STORAGE_THRESHOLD= 50000000;
+ public static final long PICTURE_SIZE = 1500000;
private static final int BUFSIZE = 4096;
diff --git a/src/com/android/camera/VideoCamera.java b/src/com/android/camera/VideoCamera.java
index 3bf852d..df8adb9 100755
--- a/src/com/android/camera/VideoCamera.java
+++ b/src/com/android/camera/VideoCamera.java
@@ -101,8 +101,6 @@
// The reason why it is set to 0.7 is just because 1.0 is too bright.
private static final float DEFAULT_CAMERA_BRIGHTNESS = 0.7f;
- private static final long LOW_STORAGE_THRESHOLD = 512L * 1024L;
-
private static final boolean SWITCH_CAMERA = true;
private static final boolean SWITCH_VIDEO = false;
@@ -657,7 +655,7 @@
errorMessage = getString(R.string.preparing_sd);
} else if (mStorageSpace == Storage.UNKNOWN_SIZE) {
errorMessage = getString(R.string.access_sd_fail);
- } else if (mStorageSpace < LOW_STORAGE_THRESHOLD) {
+ } else if (mStorageSpace < Storage.LOW_STORAGE_THRESHOLD) {
errorMessage = getString(R.string.spaceIsLow_content);
}
@@ -1193,10 +1191,7 @@
mMediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface());
// Set maximum file size.
- // remaining >= LOW_STORAGE_THRESHOLD at this point, reserve a quarter
- // of that to make it more likely that recording can complete
- // successfully.
- long maxFileSize = mStorageSpace - LOW_STORAGE_THRESHOLD / 4;
+ long maxFileSize = mStorageSpace - Storage.LOW_STORAGE_THRESHOLD;
if (requestedSizeLimit > 0 && requestedSizeLimit < maxFileSize) {
maxFileSize = requestedSizeLimit;
}
@@ -1270,7 +1265,7 @@
mSurfaceHeight);
if (mEffectType == EffectsRecorder.EFFECT_BACKDROPPER &&
- ((String) mEffectParameter).equals(EFFECT_BG_FROM_GALLERY)) {
+ ((String) mEffectParameter).equals(EFFECT_BG_FROM_GALLERY)) {
mEffectsRecorder.setEffect(mEffectType, mEffectUriFromGallery);
} else {
mEffectsRecorder.setEffect(mEffectType, mEffectParameter);
@@ -1283,24 +1278,38 @@
Intent intent = getIntent();
Bundle myExtras = intent.getExtras();
+ long requestedSizeLimit = 0;
if (mIsVideoCaptureIntent && myExtras != null) {
Uri saveUri = (Uri) myExtras.getParcelable(MediaStore.EXTRA_OUTPUT);
if (saveUri != null) {
- mVideoFilename = saveUri.toString();
- } else {
- mVideoFilename = null;
+ try {
+ mVideoFileDescriptor =
+ mContentResolver.openFileDescriptor(saveUri, "rw");
+ mCurrentVideoUri = saveUri;
+ } catch (java.io.FileNotFoundException ex) {
+ // invalid uri
+ Log.e(TAG, ex.toString());
+ }
}
- } else {
- mVideoFilename = null;
+ requestedSizeLimit = myExtras.getLong(MediaStore.EXTRA_SIZE_LIMIT);
}
// TODO: Timelapse
// Set output file
- if (mVideoFilename == null) {
+ if (mVideoFileDescriptor != null) {
+ mEffectsRecorder.setOutputFile(mVideoFileDescriptor.getFileDescriptor());
+ } else {
generateVideoFilename(mProfile.fileFormat);
+ mEffectsRecorder.setOutputFile(mVideoFilename);
}
- mEffectsRecorder.setOutputFile(mVideoFilename);
+
+ // Set maximum file size.
+ long maxFileSize = mStorageSpace - Storage.LOW_STORAGE_THRESHOLD;
+ if (requestedSizeLimit > 0 && requestedSizeLimit < maxFileSize) {
+ maxFileSize = requestedSizeLimit;
+ }
+ mEffectsRecorder.setMaxFileSize(maxFileSize);
}
@@ -1462,13 +1471,12 @@
public void onInfo(MediaRecorder mr, int what, int extra) {
if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
if (mMediaRecorderRecording) onStopVideoRecording(true);
- } else if (what
- == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
+ } else if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
if (mMediaRecorderRecording) onStopVideoRecording(true);
// Show the toast.
Toast.makeText(this, R.string.video_reach_size_limit,
- Toast.LENGTH_LONG).show();
+ Toast.LENGTH_LONG).show();
}
}
@@ -1494,7 +1502,7 @@
Log.v(TAG, "startVideoRecording");
updateAndShowStorageHint();
- if (mStorageSpace < LOW_STORAGE_THRESHOLD) {
+ if (mStorageSpace < Storage.LOW_STORAGE_THRESHOLD) {
Log.v(TAG, "Storage issue, ignore the start request");
return;
}
@@ -1634,20 +1642,24 @@
private void stopVideoRecording() {
Log.v(TAG, "stopVideoRecording");
if (mMediaRecorderRecording) {
- boolean shouldAddToMediaStore = false;
+ boolean shouldAddToMediaStoreNow = false;
try {
if (effectsActive()) {
+ // This is asynchronous, so we cant add to media store now because thumbnail
+ // may not be ready. In such case addVideoToMediaStore is called later
+ // through a callback from the MediaEncoderFilter to EffectsRecorder,
+ // and then to the VideoCamera.
mEffectsRecorder.stopRecording();
} else {
mMediaRecorder.setOnErrorListener(null);
mMediaRecorder.setOnInfoListener(null);
mMediaRecorder.stop();
+ shouldAddToMediaStoreNow = true;
}
mCurrentVideoFilename = mVideoFilename;
Log.v(TAG, "Setting current video filename: "
+ mCurrentVideoFilename);
- shouldAddToMediaStore = true;
} catch (RuntimeException e) {
Log.e(TAG, "stop fail", e);
if (mVideoFilename != null) deleteVideoFile(mVideoFilename);
@@ -1659,7 +1671,7 @@
enableCameraControls(true);
}
keepScreenOnAwhile();
- if (shouldAddToMediaStore && mStorageSpace >= LOW_STORAGE_THRESHOLD) {
+ if (shouldAddToMediaStoreNow) {
addVideoToMediaStore();
}
}
@@ -1924,6 +1936,9 @@
// and restart regular preview.
mBgLearningMessageFrame.setVisibility(View.GONE);
checkQualityAndStartPreview();
+ } else if (effectMsg == EffectsRecorder.EFFECT_MSG_RECORDING_DONE) {
+ addVideoToMediaStore();
+ getThumbnail();
} else if (effectId == EffectsRecorder.EFFECT_BACKDROPPER) {
switch (effectMsg) {
case EffectsRecorder.EFFECT_MSG_STARTED_LEARNING: