am 9f61907c: Reconcile with ics-mr1-release

* commit '9f61907cc1991fd56c008cb5042d8d3d2d0db51f':
diff --git a/src/Android.mk b/src/Android.mk
index 6da321a..93f85a0 100644
--- a/src/Android.mk
+++ b/src/Android.mk
@@ -24,6 +24,7 @@
 LOCAL_CFLAGS += -Wno-override-init
 # -Wno-missing-field-initializers
 # optional, see comments in MPH_to.c: -DUSE_DESIGNATED_INITIALIZERS -S
+# and also see ../tools/mphgen/Makefile
 LOCAL_CFLAGS += -DUSE_DESIGNATED_INITIALIZERS -UNDEBUG
 
 LOCAL_SRC_FILES:=                     \
@@ -56,7 +57,7 @@
 LOCAL_CFLAGS += -DUSE_LOG=SLAndroidLogLevel_Info
 # or -DUSE_LOG=SLAndroidLogLevel_Verbose for verbose logging
 
-# log all API entries and exits
+# log all API entries and exits (also requires Debug or Verbose log level)
 # LOCAL_CFLAGS += -DSL_TRACE_DEFAULT=SL_TRACE_ALL
 # (otherwise a warning log on error results only)
 
diff --git a/src/ThreadPool.h b/src/ThreadPool.h
index 0e4ae94..27031a3 100644
--- a/src/ThreadPool.h
+++ b/src/ThreadPool.h
@@ -87,7 +87,7 @@
 extern void ThreadPool_deinit(ThreadPool *tp);
 extern SLresult ThreadPool_add(ThreadPool *tp, ClosureKind kind,
         ClosureHandler_generic,
-        void *cntxt1, void *cntxt2, int param1, int param2);
+        void *cntxt1, void *cntxt2, void *cntxt3, int param1, int param2);
 extern Closure *ThreadPool_remove(ThreadPool *tp);
 extern SLresult ThreadPool_add_ppi(ThreadPool *tp, ClosureHandler_ppi handler,
         void *cntxt1, void *cntxt2, int param1);
diff --git a/src/android/AacBqToPcmCbRenderer.cpp b/src/android/AacBqToPcmCbRenderer.cpp
index 41879e8..194662b 100644
--- a/src/android/AacBqToPcmCbRenderer.cpp
+++ b/src/android/AacBqToPcmCbRenderer.cpp
@@ -108,12 +108,12 @@
 }
 
 //--------------------------------------------------------------------------------------------------
-AacBqToPcmCbRenderer::AacBqToPcmCbRenderer(AudioPlayback_Parameters* params) :
+AacBqToPcmCbRenderer::AacBqToPcmCbRenderer(const AudioPlayback_Parameters* params,
+        IAndroidBufferQueue *androidBufferQueue) :
         AudioToCbRenderer(params),
-        mBqSource(0)
+        mBqSource(new BufferQueueSource(androidBufferQueue))
 {
     SL_LOGD("AacBqToPcmCbRenderer::AacBqToPcmCbRenderer()");
-
 }
 
 
@@ -124,20 +124,6 @@
 
 
 //--------------------------------------------------
-void AacBqToPcmCbRenderer::registerSourceQueueCallback(
-        const void* user, void *context,  const void *caller) {
-    SL_LOGD("AacBqToPcmCbRenderer::registerQueueCallback");
-
-    Mutex::Autolock _l(mBqSourceLock);
-
-    mBqSource = new BufferQueueSource(user, context, caller);
-
-    CHECK(mBqSource != 0);
-    SL_LOGD("AacBqToPcmCbRenderer::registerSourceQueueCallback end");
-}
-
-
-//--------------------------------------------------
 // Event handlers
 void AacBqToPcmCbRenderer::onPrepare() {
     SL_LOGD("AacBqToPcmCbRenderer::onPrepare()");
@@ -157,23 +143,7 @@
         mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] = UNKNOWN_CHANNELMASK;
     }
 
-    sp<DataSource> dataSource;
-    {
-        Mutex::Autolock _l(mBqSourceLock);
-        dataSource = mBqSource;
-    }
-    if (dataSource == 0) {
-        SL_LOGE("AacBqToPcmCbRenderer::onPrepare(): Error no data source");
-        notifyPrepared(MEDIA_ERROR_BASE);
-        return;
-    }
-
-    sp<MediaExtractor> extractor = new AacAdtsExtractor(dataSource);
-    if (extractor == 0) {
-        SL_LOGE("AacBqToPcmCbRenderer::onPrepare: Could not instantiate AAC extractor.");
-        notifyPrepared(ERROR_UNSUPPORTED);
-        return;
-    }
+    sp<MediaExtractor> extractor = new AacAdtsExtractor(mBqSource);
 
     // only decoding a single track of data
     const size_t kTrackToDecode = 0;
@@ -235,7 +205,7 @@
 
     //---------------------------------
     // The data source, and audio source (a decoder) are ready to be used
-    mDataSource = dataSource;
+    mDataSource = mBqSource;
     mAudioSource = source;
     mAudioSourceStarted = true;
 
@@ -243,6 +213,7 @@
     // signal successful completion of prepare
     mStateFlags |= kFlagPrepared;
 
+    // skipping past AudioToCbRenderer and AudioSfDecoder
     GenericPlayer::onPrepare();
 
     SL_LOGD("AacBqToPcmCbRenderer::onPrepare() done, mStateFlags=0x%x", mStateFlags);
diff --git a/src/android/AudioPlayer_to_android.cpp b/src/android/AudioPlayer_to_android.cpp
index 8f60ead..8f85e59 100644
--- a/src/android/AudioPlayer_to_android.cpp
+++ b/src/android/AudioPlayer_to_android.cpp
@@ -713,7 +713,8 @@
         ap->mAndroidObjState = ANDROID_READY;
 
         if (PLAYER_SUCCESS == data1) {
-            // Most of successful prepare completion is handled by a subclass.
+            // Most of successful prepare completion for ap->mAPlayer
+            // is handled by GenericPlayer and its subclasses.
         } else {
             // SfPlayer prepare() failed prefetching, there is no event in SLPrefetchStatus to
             //  indicate a prefetch error, so we signal it by sending simultaneously two events:
@@ -869,6 +870,9 @@
       }
       break;
 
+    case android::GenericPlayer::kEventHasVideoSize:
+        //SL_LOGW("Unexpected kEventHasVideoSize");
+        break;
 
     default:
         break;
@@ -1232,6 +1236,9 @@
         audioTrack_handleUnderrun_lockPlay(ap);
         break;
 
+    case android::AudioTrack::EVENT_BUFFER_END:
+    case android::AudioTrack::EVENT_LOOP_END:
+        // These are unexpected so fall through
     default:
         // FIXME where does the notification of SL_PLAYEVENT_HEADMOVING fit?
         SL_LOGE("Encountered unknown AudioTrack event %d for CAudioPlayer %p", event,
@@ -1356,7 +1363,12 @@
     SLresult result = SL_RESULT_SUCCESS;
     SL_LOGV("Realize pAudioPlayer=%p", pAudioPlayer);
 
+    AudioPlayback_Parameters app;
+    app.sessionId = pAudioPlayer->mSessionId;
+    app.streamType = pAudioPlayer->mStreamType;
+
     switch (pAudioPlayer->mAndroidObjType) {
+
     //-----------------------------------
     // AudioTrack
     case AUDIOPLAYER_FROM_PCM_BUFFERQUEUE:
@@ -1370,7 +1382,7 @@
 
         uint32_t sampleRate = sles_to_android_sampleRate(df_pcm->samplesPerSec);
 
-        pAudioPlayer->mAudioTrack = new android::AudioTrackProxy(new android::AudioTrack(
+        pAudioPlayer->mAudioTrack = new android::AudioTrack(
                 pAudioPlayer->mStreamType,                           // streamType
                 sampleRate,                                          // sampleRate
                 sles_to_android_sampleFormat(df_pcm->bitsPerSample), // format
@@ -1382,7 +1394,7 @@
                 (void *) pAudioPlayer,                               // user
                 0      // FIXME find appropriate frame count         // notificationFrame
                 , pAudioPlayer->mSessionId
-                ));
+                );
         android::status_t status = pAudioPlayer->mAudioTrack->initCheck();
         if (status != android::NO_ERROR) {
             SL_LOGE("AudioTrack::initCheck status %u", status);
@@ -1400,18 +1412,10 @@
         pAudioPlayer->mAndroidObjState = ANDROID_READY;
         }
         break;
+
     //-----------------------------------
     // MediaPlayer
     case AUDIOPLAYER_FROM_URIFD: {
-        assert(pAudioPlayer->mAndroidObjState == ANDROID_UNINITIALIZED);
-        assert(pAudioPlayer->mNumChannels == UNKNOWN_NUMCHANNELS);
-        assert(pAudioPlayer->mSampleRateMilliHz == UNKNOWN_SAMPLERATE);
-        assert(pAudioPlayer->mAudioTrack == 0);
-
-        AudioPlayback_Parameters app;
-        app.sessionId = pAudioPlayer->mSessionId;
-        app.streamType = pAudioPlayer->mStreamType;
-
         pAudioPlayer->mAPlayer = new android::LocAVPlayer(&app, false /*hasVideo*/);
         pAudioPlayer->mAPlayer->init(sfplayer_handlePrefetchEvent,
                         (void*)pAudioPlayer /*notifUSer*/);
@@ -1469,25 +1473,20 @@
 
         }
         break;
+
     //-----------------------------------
     // StreamPlayer
     case AUDIOPLAYER_FROM_TS_ANDROIDBUFFERQUEUE: {
-        AudioPlayback_Parameters ap_params;
-        ap_params.sessionId = pAudioPlayer->mSessionId;
-        ap_params.streamType = pAudioPlayer->mStreamType;
-        android::StreamPlayer* splr = new android::StreamPlayer(&ap_params, false /*hasVideo*/,
+        android::StreamPlayer* splr = new android::StreamPlayer(&app, false /*hasVideo*/,
                 &pAudioPlayer->mAndroidBufferQueue, pAudioPlayer->mCallbackProtector);
         pAudioPlayer->mAPlayer = splr;
         splr->init(sfplayer_handlePrefetchEvent, (void*)pAudioPlayer);
         }
         break;
+
     //-----------------------------------
     // AudioToCbRenderer
     case AUDIOPLAYER_FROM_URIFD_TO_PCM_BUFFERQUEUE: {
-        AudioPlayback_Parameters app;
-        app.sessionId = pAudioPlayer->mSessionId;
-        app.streamType = pAudioPlayer->mStreamType;
-
         android::AudioToCbRenderer* decoder = new android::AudioToCbRenderer(&app);
         pAudioPlayer->mAPlayer = decoder;
         // configures the callback for the sink buffer queue
@@ -1516,13 +1515,12 @@
 
         }
         break;
+
     //-----------------------------------
     // AacBqToPcmCbRenderer
     case AUDIOPLAYER_FROM_ADTS_ABQ_TO_PCM_BUFFERQUEUE: {
-        AudioPlayback_Parameters app;
-        app.sessionId = pAudioPlayer->mSessionId;
-        app.streamType = pAudioPlayer->mStreamType;
-        android::AacBqToPcmCbRenderer* bqtobq = new android::AacBqToPcmCbRenderer(&app);
+        android::AacBqToPcmCbRenderer* bqtobq = new android::AacBqToPcmCbRenderer(&app,
+                &pAudioPlayer->mAndroidBufferQueue);
         // configures the callback for the sink buffer queue
         bqtobq->setDataPushListener(adecoder_writeToBufferQueue, pAudioPlayer);
         pAudioPlayer->mAPlayer = bqtobq;
@@ -1531,6 +1529,7 @@
         pAudioPlayer->mAPlayer->init(sfplayer_handlePrefetchEvent, (void*)pAudioPlayer);
         }
         break;
+
     //-----------------------------------
     default:
         SL_LOGE(ERROR_PLAYERREALIZE_UNEXPECTED_OBJECT_TYPE_D, pAudioPlayer->mAndroidObjType);
@@ -1538,7 +1537,6 @@
         break;
     }
 
-
     // proceed with effect initialization
     // initialize EQ
     // FIXME use a table of effect descriptors when adding support for more effects
@@ -2147,30 +2145,6 @@
 
 
 //-----------------------------------------------------------------------------
-SLresult android_audioPlayer_androidBufferQueue_registerCallback_l(CAudioPlayer *ap) {
-    SLresult result = SL_RESULT_SUCCESS;
-    assert(ap->mAPlayer != 0);
-    // FIXME investigate why these two cases are not handled symmetrically any more
-    switch (ap->mAndroidObjType) {
-      case AUDIOPLAYER_FROM_TS_ANDROIDBUFFERQUEUE: {
-        } break;
-      case AUDIOPLAYER_FROM_ADTS_ABQ_TO_PCM_BUFFERQUEUE: {
-          android::AacBqToPcmCbRenderer* dec =
-                  static_cast<android::AacBqToPcmCbRenderer*>(ap->mAPlayer.get());
-          dec->registerSourceQueueCallback((const void*)ap /*user*/,
-                  ap->mAndroidBufferQueue.mContext /*context*/,
-                  (const void*)&(ap->mAndroidBufferQueue.mItf) /*caller*/);
-        } break;
-      default:
-        SL_LOGE("Error registering AndroidBufferQueue callback: unexpected object type %d",
-                ap->mAndroidObjType);
-        result = SL_RESULT_INTERNAL_ERROR;
-        break;
-    }
-    return result;
-}
-
-//-----------------------------------------------------------------------------
 void android_audioPlayer_androidBufferQueue_clear_l(CAudioPlayer *ap) {
     switch (ap->mAndroidObjType) {
     case AUDIOPLAYER_FROM_TS_ANDROIDBUFFERQUEUE:
diff --git a/src/android/AudioPlayer_to_android.h b/src/android/AudioPlayer_to_android.h
index 96855e6..430b622 100644
--- a/src/android/AudioPlayer_to_android.h
+++ b/src/android/AudioPlayer_to_android.h
@@ -138,9 +138,6 @@
  * Android Buffer Queue
  ****************************/
 /* must be called with a lock on pAudioPlayer->mThis */
-extern SLresult android_audioPlayer_androidBufferQueue_registerCallback_l(
-        CAudioPlayer *pAudioPlayer);
-/* must be called with a lock on pAudioPlayer->mThis */
 extern void android_audioPlayer_androidBufferQueue_clear_l(CAudioPlayer *pAudioPlayer);
 /* must be called with a lock on pAudioPlayer->mThis */
 extern void android_audioPlayer_androidBufferQueue_onRefilled_l(CAudioPlayer *pAudioPlayer);
diff --git a/src/android/AudioTrackProxy.h b/src/android/AudioTrackProxy.h
deleted file mode 100644
index 6b711b8..0000000
--- a/src/android/AudioTrackProxy.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// sp<> capable proxy for AudioTrack
-
-#include <media/AudioTrack.h>
-
-namespace android {
-
-class AudioTrackProxy : public RefBase {
-
-public:
-
-    AudioTrackProxy(AudioTrack *raw) : mRaw(raw) { assert(raw != NULL); }
-
-    // don't define all methods, just the ones needed
-
-    void setVolume(float left, float right)
-            { mRaw->setVolume(left, right); }
-    void stop()
-            { mRaw->stop(); }
-    void start()
-            { mRaw->start(); }
-    status_t initCheck()
-            { return mRaw->initCheck(); }
-    status_t setSampleRate(int sampleRate)
-            { return mRaw->setSampleRate(sampleRate); }
-    void pause()
-            { mRaw->pause(); }
-    void getPosition(uint32_t *p)
-            { mRaw->getPosition(p); }
-    void mute(bool muted)
-            { mRaw->mute(muted); }
-    void flush()
-            { mRaw->flush(); }
-    void setMarkerPosition(uint32_t marker)
-            { mRaw->setMarkerPosition(marker); }
-    void setPositionUpdatePeriod(uint32_t updatePeriod)
-            { mRaw->setPositionUpdatePeriod(updatePeriod); }
-    status_t attachAuxEffect(int effectId)
-            { return mRaw->attachAuxEffect(effectId); }
-    status_t setAuxEffectSendLevel(float level)
-            { return mRaw->setAuxEffectSendLevel(level); }
-
-protected:
-
-    virtual ~AudioTrackProxy()
-            { }
-    virtual void onLastStrongRef(const void* id)
-            {
-                assert(mRaw != NULL);
-                delete mRaw;
-                mRaw = NULL;
-            }
-
-private:
-    android::AudioTrack *mRaw;
-
-    AudioTrackProxy(const AudioTrackProxy &);
-    AudioTrackProxy &operator=(const AudioTrackProxy &);
-};
-
-}
diff --git a/src/android/BufferQueueSource.cpp b/src/android/BufferQueueSource.cpp
index 323a3a0..9257f53 100644
--- a/src/android/BufferQueueSource.cpp
+++ b/src/android/BufferQueueSource.cpp
@@ -36,17 +36,11 @@
 };
 
 
-BufferQueueSource::BufferQueueSource(const void* user, void *context,  const void *caller) :
-          mAndroidBufferQueueSource(NULL),
+BufferQueueSource::BufferQueueSource(IAndroidBufferQueue *androidBufferQueue) :
+          mAndroidBufferQueueSource(androidBufferQueue),
           mStreamToBqOffset(0),
           mEosReached(false)
 {
-    if (NULL != user) {
-        mAndroidBufferQueueSource = &((CAudioPlayer*)user)->mAndroidBufferQueue;
-    } else {
-        SL_LOGE("Can't create BufferQueueSource with NULL user");
-    }
-
 }
 
 
@@ -172,7 +166,7 @@
     SL_LOGD("BufferQueueSource::getSize()");
     // we're streaming, we don't know how much there is
     *size = 0;
-    return OK;
+    return ERROR_UNSUPPORTED;
 }
 
 }  // namespace android
diff --git a/src/android/BufferQueueSource.h b/src/android/BufferQueueSource.h
index c91b7b1..c362df2 100644
--- a/src/android/BufferQueueSource.h
+++ b/src/android/BufferQueueSource.h
@@ -33,7 +33,7 @@
     // store an item structure to indicate a processed buffer
     static const SLuint32 kItemProcessed[NB_BUFFEREVENT_ITEM_FIELDS];
 
-    BufferQueueSource(const void* user, void *context,  const void *caller);
+    BufferQueueSource(IAndroidBufferQueue *androidBufferQueue);
 
     virtual status_t initCheck() const;
 
@@ -45,7 +45,7 @@
 
 private:
     // the Android Buffer Queue from which data is consumed
-    IAndroidBufferQueue* mAndroidBufferQueueSource;
+    IAndroidBufferQueue* const mAndroidBufferQueueSource;
 
     // a monotonically increasing offset used to translate an offset from the beginning
     // of the stream, to an offset in each buffer from the buffer queue source
diff --git a/src/android/MediaPlayer_to_android.cpp b/src/android/MediaPlayer_to_android.cpp
index 083738d..692aa5c 100644
--- a/src/android/MediaPlayer_to_android.cpp
+++ b/src/android/MediaPlayer_to_android.cpp
@@ -52,36 +52,45 @@
     switch(event) {
 
       case android::GenericPlayer::kEventPrepared: {
-
-        SL_LOGV("Received AVPlayer::kEventPrepared for CMediaPlayer %p", mp);
+        SL_LOGV("Received GenericPlayer::kEventPrepared for CMediaPlayer %p", mp);
 
         // assume no callback
         slPrefetchCallback callback = NULL;
-        void* callbackPContext = NULL;
+        void* callbackPContext;
+        XAuint32 events;
 
         object_lock_exclusive(&mp->mObject);
-        // mark object as prepared; same state is used for successfully or unsuccessful prepare
+
+        // mark object as prepared; same state is used for successful or unsuccessful prepare
+        assert(mp->mAndroidObjState == ANDROID_PREPARING);
         mp->mAndroidObjState = ANDROID_READY;
 
-        // AVPlayer prepare() failed prefetching, there is no event in XAPrefetchStatus to
-        //  indicate a prefetch error, so we signal it by sending simulataneously two events:
-        //  - SL_PREFETCHEVENT_FILLLEVELCHANGE with a level of 0
-        //  - SL_PREFETCHEVENT_STATUSCHANGE with a status of SL_PREFETCHSTATUS_UNDERFLOW
-        if (PLAYER_SUCCESS != data1 && IsInterfaceInitialized(&mp->mObject, MPH_XAPREFETCHSTATUS)) {
-            mp->mPrefetchStatus.mLevel = 0;
-            mp->mPrefetchStatus.mStatus = SL_PREFETCHSTATUS_UNDERFLOW;
-            if (!(~mp->mPrefetchStatus.mCallbackEventsMask &
-                    (SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE))) {
-                callback = mp->mPrefetchStatus.mCallback;
-                callbackPContext = mp->mPrefetchStatus.mContext;
+        if (PLAYER_SUCCESS == data1) {
+            // Most of successful prepare completion for mp->mAVPlayer
+            // is handled by GenericPlayer and its subclasses.
+        } else {
+            // AVPlayer prepare() failed prefetching, there is no event in XAPrefetchStatus to
+            //  indicate a prefetch error, so we signal it by sending simultaneously two events:
+            //  - SL_PREFETCHEVENT_FILLLEVELCHANGE with a level of 0
+            //  - SL_PREFETCHEVENT_STATUSCHANGE with a status of SL_PREFETCHSTATUS_UNDERFLOW
+            SL_LOGE(ERROR_PLAYER_PREFETCH_d, data1);
+            if (IsInterfaceInitialized(&mp->mObject, MPH_XAPREFETCHSTATUS)) {
+                mp->mPrefetchStatus.mLevel = 0;
+                mp->mPrefetchStatus.mStatus = SL_PREFETCHSTATUS_UNDERFLOW;
+                if (!(~mp->mPrefetchStatus.mCallbackEventsMask &
+                        (SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE))) {
+                    callback = mp->mPrefetchStatus.mCallback;
+                    callbackPContext = mp->mPrefetchStatus.mContext;
+                    events = SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE;
+                }
             }
         }
+
         object_unlock_exclusive(&mp->mObject);
 
         // callback with no lock held
         if (NULL != callback) {
-            (*callback)(&mp->mPrefetchStatus.mItf, callbackPContext,
-                    SL_PREFETCHEVENT_FILLLEVELCHANGE | SL_PREFETCHEVENT_STATUSCHANGE);
+            (*callback)(&mp->mPrefetchStatus.mItf, callbackPContext, events);
         }
 
         break;
@@ -113,6 +122,7 @@
         contInfo.containerInfo.numStreams = 1;
         ssize_t index = mp->mStreamInfo.mStreamInfoTable.add(streamInfo);
 
+        // callback is unconditional; there is no bitmask of enabled events
         xaStreamEventChangeCallback callback = mp->mStreamInfo.mCallback;
         void* callbackPContext = mp->mStreamInfo.mContext;
 
diff --git a/src/android/VideoCodec_to_android.cpp b/src/android/VideoCodec_to_android.cpp
index e2d8295..20aad28 100644
--- a/src/android/VideoCodec_to_android.cpp
+++ b/src/android/VideoCodec_to_android.cpp
@@ -25,8 +25,6 @@
 
 namespace android {
 
-static sp<IOMX> omx;
-
 // listed in same order as VideoCodecIds[] in file "../devices.c" with ANDROID defined
 static const char *kVideoMimeTypes[] = {
         MEDIA_MIMETYPE_VIDEO_MPEG2,
@@ -35,9 +33,11 @@
         MEDIA_MIMETYPE_VIDEO_AVC,
         MEDIA_MIMETYPE_VIDEO_VPX
 };
+// must == kMaxVideoDecoders
 static const size_t kNbVideoMimeTypes = sizeof(kVideoMimeTypes) / sizeof(kVideoMimeTypes[0]);
 
 // codec capabilities in the following arrays maps to the mime types defined in kVideoMimeTypes
+// CodecCapabilities is from OMXCodec.h
 static Vector<CodecCapabilities> VideoDecoderCapabilities[kNbVideoMimeTypes];
 static XAuint32 VideoDecoderNbProfLevel[kNbVideoMimeTypes];
 
@@ -63,7 +63,7 @@
         return false;
     }
 
-    omx = service->getOMX();
+    sp<IOMX> omx(service->getOMX());
     if (omx.get() == NULL) {
         LOGE("android_videoCodec_expose() couldn't access OMX interface");
         return false;
@@ -72,22 +72,37 @@
     // used to check whether no codecs were found, which is a sign of failure
     NbSupportedDecoderTypes = 0;
     for (size_t m = 0 ; m < kNbVideoMimeTypes ; m++) {
+        // QueryCodecs is from OMXCodec.h
         if (OK == QueryCodecs(omx, kVideoMimeTypes[m], true /* queryDecoders */,
                 true /* hwCodecOnly */, &VideoDecoderCapabilities[m])) {
-            if (!VideoDecoderCapabilities[m].empty()) {
-                NbSupportedDecoderTypes++;
-            }
-            // for each decoder of the given decoder ID, verify it is a hardware decoder
-            for (size_t c = 0 ; c < VideoDecoderCapabilities[m].size() ; c++) {
-                VideoDecoderNbProfLevel[c] = 0;
-                const String8& compName =
-                        VideoDecoderCapabilities[m].itemAt(c).mComponentName;
-                // get the number of profiles and levels for this decoder
-                VideoDecoderNbProfLevel[m] =
-                        VideoDecoderCapabilities[m].itemAt(c).mProfileLevels.size();
-                if (VideoDecoderNbProfLevel[m] != 0) {
-                    SL_LOGV("codec %d nb prof/level=%d", m, VideoDecoderNbProfLevel[m]);
-                    break;
+            if (VideoDecoderCapabilities[m].empty()) {
+                VideoDecoderNbProfLevel[m] = 0;
+            } else {
+                // get the number of profiles and levels for the first codec implementation
+                // for a given decoder ID / MIME type
+                Vector<CodecProfileLevel> &profileLevels =
+                        VideoDecoderCapabilities[m].editItemAt(0).mProfileLevels;
+#if 0   // Intentionally disabled example of making modifications to profile / level combinations
+                if (VideoDecoderIds[m] == XA_VIDEOCODEC_AVC) {
+                    // remove non-core profile / level combinations
+                    for (size_t i = 0, size = profileLevels.size(); i < size; ) {
+                        CodecProfileLevel profileLevel = profileLevels.itemAt(i);
+                        if (profileLevel.mProfile == XA_VIDEOPROFILE_AVC_BASELINE) {
+                            // either skip past this item and don't change vector size
+                            ++i;
+                        } else {
+                            // or remove this item, decrement the vector size,
+                            // and next time through the loop check a different item at same index
+                            profileLevels.removeAt(i);
+                            --size;
+                        }
+                    }
+                }
+#endif
+                if ((VideoDecoderNbProfLevel[m] = profileLevels.size()) > 0) {
+                    NbSupportedDecoderTypes++;
+                } else {
+                    VideoDecoderCapabilities[m].clear();
                 }
             }
         }
@@ -102,6 +117,9 @@
     for (size_t m = 0 ; m < kNbVideoMimeTypes ; m++) {
         VideoDecoderCapabilities[m].clear();
     }
+    // not needed
+    // memset(VideoDecoderNbProfLevel, 0, sizeof(VideoDecoderNbProfLevel));
+    // NbSupportedDecoderTypes = 0;
 }
 
 
@@ -131,16 +149,17 @@
 {
     // translate a decoder ID to an index in the codec table
     size_t decoderIndex = 0;
-    *pNb = 0;
     while (decoderIndex < kNbVideoMimeTypes) {
         if (decoderId == VideoDecoderIds[decoderIndex]) {
             *pNb = VideoDecoderNbProfLevel[decoderIndex];
-            break;
+            return XA_RESULT_SUCCESS;
         }
         decoderIndex++;
     }
 
-    return XA_RESULT_SUCCESS;
+    // spec doesn't allow a decoder to report zero profile/level combinations
+    *pNb = 0;
+    return XA_RESULT_PARAMETER_INVALID;
 }
 
 
@@ -151,13 +170,13 @@
     size_t decoderIndex = 0;
     while (decoderIndex < kNbVideoMimeTypes) {
         if (decoderId == VideoDecoderIds[decoderIndex]) {
+            // We only look at the first codec implementation for a given decoder ID / MIME type.
+            // OpenMAX AL doesn't let you expose the capabilities of multiple codec implementations.
             if (!(plIndex < VideoDecoderCapabilities[decoderIndex].itemAt(0).mProfileLevels.size()))
             {
                 // asking for invalid profile/level
                 return XA_RESULT_PARAMETER_INVALID;
             }
-            // we only look at the first codec, OpenMAX AL doesn't let you expose the capabilities
-            //  of multiple codecs
             //     set the fields we know about
             pDescr->codecId = decoderId;
             pDescr->profileSetting = convertOpenMaxIlToAl(VideoDecoderCapabilities[decoderIndex].
diff --git a/src/android/android_AudioSfDecoder.cpp b/src/android/android_AudioSfDecoder.cpp
index de07265..a29f09f 100644
--- a/src/android/android_AudioSfDecoder.cpp
+++ b/src/android/android_AudioSfDecoder.cpp
@@ -216,12 +216,14 @@
         break;
     }
 
+    // AndroidBufferQueue data source is handled by a subclass,
+    // which does not call up to this method.  Hence, the missing case.
     default:
         TRESPASS();
     }
 
     //---------------------------------
-    // Instanciate and initialize the decoder attached to the data source
+    // Instantiate and initialize the decoder attached to the data source
     sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
     if (extractor == NULL) {
         SL_LOGE("AudioSfDecoder::onPrepare: Could not instantiate extractor.");
@@ -497,6 +499,8 @@
             } else {
                 CHECK(mDecodeBuffer->meta_data()->findInt64(kKeyTime, &timeUsec));
             }
+        } else {
+            // errors are handled below
         }
     }
 
@@ -510,7 +514,7 @@
             // Note that though we've decoded this position, we haven't rendered it yet.
             // So a GetPosition called after this point will observe the advanced position,
             // even though the PCM may not have been supplied to the sink.  That's OK as
-            // we don't claim to provide frame-accurate (let alone sample-accurate) GetPosition.
+            // we don't claim to provide AAC frame-accurate (let alone sample-accurate) GetPosition.
             mLastDecodedPositionUs = timeUsec;
         }
     }
@@ -526,7 +530,7 @@
                 }
                 // handle notification and looping at end of stream
                 if (mStateFlags & kFlagPlaying) {
-                    notify(PLAYEREVENT_ENDOFSTREAM, 1, true);
+                    notify(PLAYEREVENT_ENDOFSTREAM, 1, true /*async*/);
                 }
                 if (mStateFlags & kFlagLooping) {
                     seek(0);
@@ -602,7 +606,7 @@
     // Do not call the superclass onPrepare to notify, because it uses a default error
     // status code but we can provide a more specific one.
     // GenericPlayer::onPrepare();
-    notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true);
+    notify(PLAYEREVENT_PREPARED, (int32_t)prepareRes, true /*async*/);
     SL_LOGD("AudioSfDecoder::onPrepare() done, mStateFlags=0x%x", mStateFlags);
 }
 
@@ -708,12 +712,12 @@
             //   cache status is evaluated against duration thresholds
             if (dataRemainingUs > DURATION_CACHED_HIGH_MS*1000) {
                 mCacheStatus = kStatusHigh;
-                //LOGV("high");
+                //ALOGV("high");
             } else if (dataRemainingUs > DURATION_CACHED_MED_MS*1000) {
-                //LOGV("enough");
+                //ALOGV("enough");
                 mCacheStatus = kStatusEnough;
             } else if (dataRemainingUs < DURATION_CACHED_LOW_MS*1000) {
-                //LOGV("low");
+                //ALOGV("low");
                 mCacheStatus = kStatusLow;
             } else {
                 mCacheStatus = kStatusIntermediate;
@@ -771,6 +775,8 @@
             mPcmFormatValues[ANDROID_KEY_INDEX_PCMFORMAT_CHANNELMASK] =
                     channelCountToMask(channelCount);
         }
+        // there's no need to do a notify of PLAYEREVENT_CHANNEL_COUNT,
+        // because the only listener is for volume updates, and decoders don't support that
     }
 
     // alert users of those params
diff --git a/src/android/android_AudioToCbRenderer.cpp b/src/android/android_AudioToCbRenderer.cpp
index 1cc894b..779d19f 100644
--- a/src/android/android_AudioToCbRenderer.cpp
+++ b/src/android/android_AudioToCbRenderer.cpp
@@ -24,7 +24,8 @@
 namespace android {
 
 //--------------------------------------------------------------------------------------------------
-AudioToCbRenderer::AudioToCbRenderer(AudioPlayback_Parameters* params) : AudioSfDecoder(params),
+AudioToCbRenderer::AudioToCbRenderer(const AudioPlayback_Parameters* params) :
+        AudioSfDecoder(params),
         mDecodeCbf(NULL),
         mDecodeUser(NULL)
 {
diff --git a/src/android/android_AudioToCbRenderer.h b/src/android/android_AudioToCbRenderer.h
index 3d5658d..782e76f 100644
--- a/src/android/android_AudioToCbRenderer.h
+++ b/src/android/android_AudioToCbRenderer.h
@@ -26,7 +26,7 @@
 {
 public:
 
-    AudioToCbRenderer(AudioPlayback_Parameters* params);
+    AudioToCbRenderer(const AudioPlayback_Parameters* params);
     virtual ~AudioToCbRenderer();
 
     void setDataPushListener(const data_push_cbf_t pushCbf, CAudioPlayer* pushUser);
diff --git a/src/android/android_Effect.cpp b/src/android/android_Effect.cpp
index f277f10..a8e69ad 100644
--- a/src/android/android_Effect.cpp
+++ b/src/android/android_Effect.cpp
@@ -367,7 +367,7 @@
     uint16_t preset;
     if (android::NO_ERROR == android_prev_getPreset(ipr->mPresetReverbEffect, &preset)) {
         ipr->mPreset = preset;
-        // enable the effect is it has a effective preset loaded
+        // enable the effect if it has a preset loaded
         ipr->mPresetReverbEffect->setEnabled(SL_REVERBPRESET_NONE != preset);
     }
 }
@@ -442,6 +442,16 @@
 
 
 //-----------------------------------------------------------------------------
+/**
+ * pre-condition:
+ *    ap != NULL
+ *    for media players:
+ *      ap->mAPlayer != 0
+ *      ap->mAudioTrack == 0
+ *    for buffer queue players:
+ *      ap->mAPlayer == 0
+ *      ap->mAudioTrack != 0 is optional; if no track yet then the setting is deferred
+ */
 android::status_t android_fxSend_attach(CAudioPlayer* ap, bool attach,
         android::sp<android::AudioEffect> pFx, SLmillibel sendLevel) {
 
@@ -520,6 +530,16 @@
 }
 
 //-----------------------------------------------------------------------------
+/**
+ * pre-condition:
+ *    ap != NULL
+ *    for media players:
+ *      ap->mAPlayer != 0
+ *      ap->mAudioTrack == 0
+ *    for buffer queue players:
+ *      ap->mAPlayer == 0
+ *      ap->mAudioTrack != 0 is optional; if no track yet then the setting is deferred
+ */
 android::status_t android_fxSend_setSendLevel(CAudioPlayer* ap, SLmillibel sendLevel) {
     // we keep track of the send level, independently of the current audio player level
     ap->mAuxSendLevel = sendLevel - ap->mVolume.mLevel;
diff --git a/src/android/android_Effect.h b/src/android/android_Effect.h
index d58c72e..6ca4b5a 100644
--- a/src/android/android_Effect.h
+++ b/src/android/android_Effect.h
@@ -107,6 +107,7 @@
 /**
  * sendLevel is the total energy going to the send bus. This implies that the volume attenuation
  *   should be combined with the send level for the aux level to follow volume changes.
+ *   This one is used by Android-specific APIs, not portable Khronos APIs.
  */
 extern SLresult android_fxSend_attachToAux(CAudioPlayer* ap, SLInterfaceID pUuid,
         SLboolean attach, SLmillibel sendLevel);
diff --git a/src/android/android_GenericMediaPlayer.cpp b/src/android/android_GenericMediaPlayer.cpp
index 4abea4c..f3cebf5 100644
--- a/src/android/android_GenericMediaPlayer.cpp
+++ b/src/android/android_GenericMediaPlayer.cpp
@@ -23,6 +23,7 @@
 #include <surfaceflinger/ISurfaceComposer.h>
 #include <surfaceflinger/SurfaceComposerClient.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/mediaplayer.h>  // media_event_type media_error_type media_info_type
 
 // default delay in Us used when reposting an event when the player is not ready to accept
 // the command yet. This is for instance used when seeking on a MediaPlayer that's still preparing
@@ -57,20 +58,53 @@
     SL_LOGV("MediaPlayerNotificationClient::~MediaPlayerNotificationClient()");
 }
 
-// Map a MEDIA_* enum to a string
-static const char *media_to_string(int msg)
+// Map a media_event_type enum (the msg of an IMediaPlayerClient::notify) to a string or NULL
+static const char *media_event_type_to_string(media_event_type msg)
 {
     switch (msg) {
-#define _(x) case MEDIA_##x: return "MEDIA_" #x;
-      _(PREPARED)
-      _(SET_VIDEO_SIZE)
-      _(SEEK_COMPLETE)
-      _(PLAYBACK_COMPLETE)
-      _(BUFFERING_UPDATE)
-      _(ERROR)
-      _(NOP)
-      _(TIMED_TEXT)
-      _(INFO)
+#define _(code) case code: return #code;
+    _(MEDIA_NOP)
+    _(MEDIA_PREPARED)
+    _(MEDIA_PLAYBACK_COMPLETE)
+    _(MEDIA_BUFFERING_UPDATE)
+    _(MEDIA_SEEK_COMPLETE)
+    _(MEDIA_SET_VIDEO_SIZE)
+    _(MEDIA_TIMED_TEXT)
+    _(MEDIA_ERROR)
+    _(MEDIA_INFO)
+#undef _
+    default:
+        return NULL;
+    }
+}
+
+// Map a media_error_type enum (the ext1 of a MEDIA_ERROR event) to a string or NULL
+static const char *media_error_type_to_string(media_error_type err)
+{
+    switch (err) {
+#define _(code, msg) case code: return msg;
+    _(MEDIA_ERROR_UNKNOWN,                              "Unknown media error")
+    _(MEDIA_ERROR_SERVER_DIED,                          "Server died")
+    _(MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK,   "Not valid for progressive playback")
+#undef _
+    default:
+        return NULL;
+    }
+}
+
+// Map a media_info_type enum (the ext1 of a MEDIA_INFO event) to a string or NULL
+static const char *media_info_type_to_string(media_info_type info)
+{
+    switch (info) {
+#define _(code, msg) case code: return msg;
+    _(MEDIA_INFO_UNKNOWN,             "Unknown info")
+    _(MEDIA_INFO_VIDEO_TRACK_LAGGING, "Video track lagging")
+    _(MEDIA_INFO_BUFFERING_START,     "Buffering start")
+    _(MEDIA_INFO_BUFFERING_END,       "Buffering end")
+    _(MEDIA_INFO_NETWORK_BANDWIDTH,   "Network bandwidth")
+    _(MEDIA_INFO_BAD_INTERLEAVING,    "Bad interleaving")
+    _(MEDIA_INFO_NOT_SEEKABLE,        "Not seekable")
+    _(MEDIA_INFO_METADATA_UPDATE,     "Metadata update")
 #undef _
     default:
         return NULL;
@@ -81,7 +115,7 @@
 // IMediaPlayerClient implementation
 void MediaPlayerNotificationClient::notify(int msg, int ext1, int ext2, const Parcel *obj) {
     SL_LOGV("MediaPlayerNotificationClient::notify(msg=%s (%d), ext1=%d, ext2=%d)",
-            media_to_string(msg), msg, ext1, ext2);
+            media_event_type_to_string((enum media_event_type) msg), msg, ext1, ext2);
 
     sp<GenericMediaPlayer> genericMediaPlayer(mGenericMediaPlayer.promote());
     if (genericMediaPlayer == NULL) {
@@ -89,7 +123,7 @@
         return;
     }
 
-    switch (msg) {
+    switch ((media_event_type) msg) {
       case MEDIA_PREPARED:
         {
         Mutex::Autolock _l(mLock);
@@ -109,9 +143,9 @@
         // so it would normally be racy to access fields within genericMediaPlayer.
         // But in this case mHasVideo is const, so it is safe to access.
         // Or alternatively, we could notify unconditionally and let it decide whether to handle.
-        if (genericMediaPlayer->mHasVideo) {
+        if (genericMediaPlayer->mHasVideo && (ext1 != 0 || ext2 != 0)) {
             genericMediaPlayer->notify(PLAYEREVENT_VIDEO_SIZE_UPDATE,
-                    (int32_t)ext1, (int32_t)ext2, true /*async*/);
+                    (int32_t)ext1 /*width*/, (int32_t)ext2 /*height*/, true /*async*/);
         }
         break;
 
@@ -124,6 +158,14 @@
         break;
 
       case MEDIA_BUFFERING_UPDATE:
+        // if we receive any out-of-range data, then clamp it to reduce further harm
+        if (ext1 < 0) {
+            SL_LOGE("MEDIA_BUFFERING_UPDATE %d%% < 0", ext1);
+            ext1 = 0;
+        } else if (ext1 > 100) {
+            SL_LOGE("MEDIA_BUFFERING_UPDATE %d%% > 100", ext1);
+            ext1 = 100;
+        }
         // values received from Android framework for buffer fill level use percent,
         //   while SL/XA use permille, so does GenericPlayer
         genericMediaPlayer->bufferingUpdate(ext1 * 10 /*fillLevelPerMille*/);
@@ -131,6 +173,8 @@
 
       case MEDIA_ERROR:
         {
+        SL_LOGV("MediaPlayerNotificationClient::notify(msg=MEDIA_ERROR, ext1=%s (%d), ext2=%d)",
+                media_error_type_to_string((media_error_type) ext1), ext1, ext2);
         Mutex::Autolock _l(mLock);
         if (PREPARE_IN_PROGRESS == mPlayerPrepared) {
             mPlayerPrepared = PREPARE_COMPLETED_UNSUCCESSFULLY;
@@ -144,10 +188,31 @@
 
       case MEDIA_NOP:
       case MEDIA_TIMED_TEXT:
-      case MEDIA_INFO:
         break;
 
-      default: { }
+      case MEDIA_INFO:
+        SL_LOGV("MediaPlayerNotificationClient::notify(msg=MEDIA_INFO, ext1=%s (%d), ext2=%d)",
+                media_info_type_to_string((media_info_type) ext1), ext1, ext2);
+        switch (ext1) {
+        case MEDIA_INFO_VIDEO_TRACK_LAGGING:
+            SL_LOGV("MEDIA_INFO_VIDEO_TRACK_LAGGING by %d ms", ext1);
+            break;
+        case MEDIA_INFO_NETWORK_BANDWIDTH:
+            SL_LOGV("MEDIA_INFO_NETWORK_BANDWIDTH %d kbps", ext2);
+            break;
+        case MEDIA_INFO_UNKNOWN:
+        case MEDIA_INFO_BUFFERING_START:
+        case MEDIA_INFO_BUFFERING_END:
+        case MEDIA_INFO_BAD_INTERLEAVING:
+        case MEDIA_INFO_NOT_SEEKABLE:
+        case MEDIA_INFO_METADATA_UPDATE:
+        default:
+            break;
+        }
+        break;
+
+      default:
+        break;
     }
 
 }
@@ -338,6 +403,7 @@
             } else if (OK != mPlayer->seekTo(timeMsec)) {
                 mStateFlags &= ~kFlagSeeking;
                 mSeekTimeMsec = ANDROID_UNKNOWN_TIME;
+                // don't call updateOneShot because seek not yet done
             }
         }
     }
@@ -363,9 +429,9 @@
 
 void GenericMediaPlayer::onVolumeUpdate() {
     SL_LOGD("GenericMediaPlayer::onVolumeUpdate()");
-    // use settings lock to read the volume settings
-    Mutex::Autolock _l(mSettingsLock);
     if (mPlayer != 0) {
+        // use settings lock to read the volume settings
+        Mutex::Autolock _l(mSettingsLock);
         mPlayer->setVolume(mAndroidAudioLevels.mFinalVolume[0],
                 mAndroidAudioLevels.mFinalVolume[1]);
     }
diff --git a/src/android/android_GenericPlayer.cpp b/src/android/android_GenericPlayer.cpp
index 85a8b1a..219c856 100644
--- a/src/android/android_GenericPlayer.cpp
+++ b/src/android/android_GenericPlayer.cpp
@@ -46,6 +46,7 @@
 
     mLooper = new android::ALooper();
 
+    // Post-construction accesses need to be protected by mSettingsLock
     mAndroidAudioLevels.mFinalVolume[0] = 1.0f;
     mAndroidAudioLevels.mFinalVolume[1] = 1.0f;
 }
@@ -411,6 +412,7 @@
     if (msg->findInt32(PLAYEREVENT_PREFETCHSTATUSCHANGE, &val1)) {
         SL_LOGV("GenericPlayer notifying %s = %d", PLAYEREVENT_PREFETCHSTATUSCHANGE, val1);
         notifClient(kEventPrefetchStatusChange, val1, 0, notifUser);
+    // There is exactly one notification per message, hence "else if" instead of "if"
     } else if (msg->findInt32(PLAYEREVENT_PREFETCHFILLLEVELUPDATE, &val1)) {
         SL_LOGV("GenericPlayer notifying %s = %d", PLAYEREVENT_PREFETCHFILLLEVELUPDATE, val1);
         notifClient(kEventPrefetchFillLevelUpdate, val1, 0, notifUser);
@@ -595,7 +597,7 @@
         }
         if (ANDROID_UNKNOWN_TIME == positionMs) {
             // getPositionMsec is not working for some reason, give up
-            //LOGV("Does anyone really know what time it is?");
+            //ALOGV("Does anyone really know what time it is?");
             return;
         }
     }
diff --git a/src/android/android_GenericPlayer.h b/src/android/android_GenericPlayer.h
index cbea27c..d3206a1 100644
--- a/src/android/android_GenericPlayer.h
+++ b/src/android/android_GenericPlayer.h
@@ -40,6 +40,7 @@
 
 namespace android {
 
+// abstract base class
 class GenericPlayer : public AHandler
 {
 public:
@@ -97,14 +98,14 @@
     // Constants used to identify the messages in this player's AHandler message loop
     //   in onMessageReceived()
     enum {
-        kWhatPrepare         = 'prep',
-        kWhatNotif           = 'noti',
-        kWhatPlay            = 'play',
-        kWhatPause           = 'paus',
-        kWhatSeek            = 'seek',
-        kWhatSeekComplete    = 'skcp',
-        kWhatLoop            = 'loop',
-        kWhatVolumeUpdate    = 'volu',
+        kWhatPrepare         = 'prep',  // start preparation
+        kWhatNotif           = 'noti',  // send a notification to client
+        kWhatPlay            = 'play',  // start player
+        kWhatPause           = 'paus',  // pause or stop player
+        kWhatSeek            = 'seek',  // request a seek to specified position
+        kWhatSeekComplete    = 'skcp',  // seek request has completed
+        kWhatLoop            = 'loop',  // set the player's looping status
+        kWhatVolumeUpdate    = 'volu',  // set the channel gains to specified values
         kWhatBufferingUpdate = 'bufu',
         kWhatBuffUpdateThres = 'buut',
         kWhatAttachAuxEffect = 'aaux',
@@ -168,12 +169,14 @@
         kFlagPreparedUnsuccessfully = 1 << 6,
     };
 
+    // Only accessed from event loop, does not need a mutex
     uint32_t mStateFlags;
 
     sp<ALooper> mLooper;
 
-    AudioPlayback_Parameters mPlaybackParams;
+    const AudioPlayback_Parameters mPlaybackParams;
 
+    // protected by mSettingsLock after construction
     AndroidAudioLevels mAndroidAudioLevels;
 
     // protected by mSettingsLock
diff --git a/src/android/android_LocAVPlayer.cpp b/src/android/android_LocAVPlayer.cpp
index 3b23ae1..90e7b5f 100644
--- a/src/android/android_LocAVPlayer.cpp
+++ b/src/android/android_LocAVPlayer.cpp
@@ -24,7 +24,7 @@
 namespace android {
 
 //--------------------------------------------------------------------------------------------------
-LocAVPlayer::LocAVPlayer(AudioPlayback_Parameters* params, bool hasVideo) :
+LocAVPlayer::LocAVPlayer(const AudioPlayback_Parameters* params, bool hasVideo) :
         GenericMediaPlayer(params, hasVideo)
 {
     SL_LOGD("LocAVPlayer::LocAVPlayer()");
diff --git a/src/android/android_LocAVPlayer.h b/src/android/android_LocAVPlayer.h
index 64115e0..da961fb 100644
--- a/src/android/android_LocAVPlayer.h
+++ b/src/android/android_LocAVPlayer.h
@@ -22,7 +22,7 @@
 class LocAVPlayer : public GenericMediaPlayer
 {
 public:
-    LocAVPlayer(AudioPlayback_Parameters* params, bool hasVideo);
+    LocAVPlayer(const AudioPlayback_Parameters* params, bool hasVideo);
     virtual ~LocAVPlayer();
 
 protected:
diff --git a/src/android/android_StreamPlayer.cpp b/src/android/android_StreamPlayer.cpp
index b87733c..ade5280 100644
--- a/src/android/android_StreamPlayer.cpp
+++ b/src/android/android_StreamPlayer.cpp
@@ -85,7 +85,8 @@
         SLAint64 length = (SLAint64) mem->size();
 #endif
         mAvailableBuffers.push_back(index);
-        //SL_LOGD("onBufferAvailable() now %d buffers available in queue", mAvailableBuffers.size());
+        //SL_LOGD("onBufferAvailable() now %d buffers available in queue",
+        //         mAvailableBuffers.size());
     }
 
     // a new shared mem buffer is available: let's try to fill immediately
@@ -292,16 +293,13 @@
 
 
 //--------------------------------------------------------------------------------------------------
-StreamPlayer::StreamPlayer(AudioPlayback_Parameters* params, bool hasVideo,
+StreamPlayer::StreamPlayer(const AudioPlayback_Parameters* params, bool hasVideo,
         IAndroidBufferQueue *androidBufferQueue, const sp<CallbackProtector> &callbackProtector) :
         GenericMediaPlayer(params, hasVideo),
         mAppProxy(new StreamSourceAppProxy(androidBufferQueue, callbackProtector, this)),
         mStopForDestroyCompleted(false)
 {
     SL_LOGD("StreamPlayer::StreamPlayer()");
-
-    mPlaybackParams = *params;
-
 }
 
 StreamPlayer::~StreamPlayer() {
diff --git a/src/android/android_StreamPlayer.h b/src/android/android_StreamPlayer.h
index 1f520ca..cb47dbd 100644
--- a/src/android/android_StreamPlayer.h
+++ b/src/android/android_StreamPlayer.h
@@ -81,7 +81,7 @@
 class StreamPlayer : public GenericMediaPlayer
 {
 public:
-    StreamPlayer(AudioPlayback_Parameters* params, bool hasVideo,
+    StreamPlayer(const AudioPlayback_Parameters* params, bool hasVideo,
            IAndroidBufferQueue *androidBufferQueue, const sp<CallbackProtector> &callbackProtector);
     virtual ~StreamPlayer();
 
@@ -90,6 +90,8 @@
     virtual void preDestroy();
 
     void queueRefilled();
+    // Called after AndroidBufferQueue::Clear.
+    // The "_l" indicates the caller still has it's (now empty) AndroidBufferQueue locked.
     void appClear_l();
 
 protected:
diff --git a/src/android/android_defs.h b/src/android/android_defs.h
index 3106b41..5ac11e1 100644
--- a/src/android/android_defs.h
+++ b/src/android/android_defs.h
@@ -62,17 +62,18 @@
 
 #define PLAYER_FD_FIND_FILE_SIZE ((int64_t)0xFFFFFFFFFFFFFFFFll)
 
-#define MPEG2_TS_BLOCK_SIZE 188
+#define MPEG2_TS_PACKET_SIZE 188
+#define MPEG2_TS_PACKET_SYNC 0x47
 
-typedef struct AudioPlayback_Parameters_struct {
+struct AudioPlayback_Parameters {
     int streamType;
     int sessionId;
-} AudioPlayback_Parameters;
+};
 
 /**
  * Structure to maintain the set of audio levels about a player
  */
-typedef struct AndroidAudioLevels_t {
+struct AndroidAudioLevels {
     /**
      * Send level to aux effect, there's a single aux bus, so there's a single level
      */
@@ -89,7 +90,7 @@
      * (or attenuation) represented as a float from 0.0f to 1.0f
      */
     float mFinalVolume[STEREO_CHANNELS];
-} AndroidAudioLevels;
+};
 
 
 /**
diff --git a/src/android/include/AacBqToPcmCbRenderer.h b/src/android/include/AacBqToPcmCbRenderer.h
index cd1020c..62cbd6e 100644
--- a/src/android/include/AacBqToPcmCbRenderer.h
+++ b/src/android/include/AacBqToPcmCbRenderer.h
@@ -33,11 +33,10 @@
 {
 public:
 
-    AacBqToPcmCbRenderer(AudioPlayback_Parameters* params);
+    AacBqToPcmCbRenderer(const AudioPlayback_Parameters* params,
+            IAndroidBufferQueue *androidBufferQueue);
     virtual ~AacBqToPcmCbRenderer();
 
-    void registerSourceQueueCallback(const void* user, void *context,  const void *caller);
-
     // verifies the given memory starts and ends on ADTS frame boundaries.
     // This is for instance used whenever ADTS data is being enqueued through an
     // SL / XA AndroidBufferQueue interface so only parseable ADTS data goes in
@@ -51,9 +50,7 @@
 
 
 private:
-    // mutex used to protect mBqSource
-    Mutex                 mBqSourceLock;
-    sp<BufferQueueSource> mBqSource;
+    const sp<BufferQueueSource> mBqSource;
 
 private:
     DISALLOW_EVIL_CONSTRUCTORS(AacBqToPcmCbRenderer);
diff --git a/src/android/util/AacAdtsExtractor.cpp b/src/android/util/AacAdtsExtractor.cpp
index d405cf6..4a175bf 100644
--- a/src/android/util/AacAdtsExtractor.cpp
+++ b/src/android/util/AacAdtsExtractor.cpp
@@ -125,13 +125,14 @@
     // Never fails
     mMeta = MakeAACCodecSpecificData(profile, sf_index, channel);
 
-    off64_t offset = 0;
-    off64_t streamSize, numFrames = 0;
-    size_t frameSize = 0;
-    int64_t duration = 0;
+    // Round up and get the duration of each frame
+    mFrameDurationUs = (1024 * 1000000ll + (sr - 1)) / sr;
 
+    off64_t streamSize;
     if (mDataSource->getSize(&streamSize) == OK) {
+        off64_t offset = 0, numFrames = 0;
         while (offset < streamSize) {
+            size_t frameSize;
             if ((frameSize = getFrameSize(mDataSource, offset)) == 0) {
                 // Usually frameSize == 0 due to EOS is benign (and getFrameSize() doesn't SL_LOGE),
                 // but in this case we were told the total size of the data source and so an EOS
@@ -149,9 +150,8 @@
             numFrames ++;
         }
 
-        // Round up and get the duration
-        mFrameDurationUs = (1024 * 1000000ll + (sr - 1)) / sr;
-        duration = numFrames * mFrameDurationUs;
+        // Compute total duration
+        int64_t duration = numFrames * mFrameDurationUs;
         mMeta->setInt64(kKeyDuration, duration);
     }
 
diff --git a/src/autogen/IID_to_MPH.c b/src/autogen/IID_to_MPH.c
index 35046ce..c37d07e 100644
--- a/src/autogen/IID_to_MPH.c
+++ b/src/autogen/IID_to_MPH.c
@@ -245,7 +245,6 @@
     if (&SL_IID_array[0] <= iid && &SL_IID_array[MPH_MAX] > iid)
         return iid - &SL_IID_array[0];
     if (NULL != iid) {
-        static const unsigned len = sizeof(struct SLInterfaceID_);
         unsigned key = asso_values[((unsigned char *)iid)[8]] +
             asso_values[((unsigned char *)iid)[0]];
         if (key <= MAX_HASH_VALUE) {
diff --git a/src/classes.h b/src/classes.h
index 6bbadd1..3c481ee 100644
--- a/src/classes.h
+++ b/src/classes.h
@@ -15,7 +15,6 @@
  */
 
 #ifdef ANDROID
-#include "android/AudioTrackProxy.h"
 #include "android/CallbackProtector.h"
 #include "android/android_Effect.h"
 #include "android/android_GenericPlayer.h"
@@ -99,7 +98,7 @@
     int mStreamType;
     // FIXME consolidate the next several variables into one class to avoid placement new
     /** plays the PCM data for this player */
-    android::sp<android::AudioTrackProxy> mAudioTrack;
+    android::sp<android::AudioTrack> mAudioTrack;
     android::sp<android::CallbackProtector> mCallbackProtector;
     android::sp<android::GenericPlayer> mAPlayer;
     /** aux effect the AudioTrack will be attached to if aux send enabled */
diff --git a/src/devices.c b/src/devices.c
index bb110f1..920dd5e 100644
--- a/src/devices.c
+++ b/src/devices.c
@@ -123,9 +123,10 @@
     SL_AUDIOCODEC_VORBIS
 };
 
-const SLuint32 *Decoder_IDs = Codec_IDs;
-const SLuint32 *Encoder_IDs = Codec_IDs;
+const SLuint32 * const Decoder_IDs = Codec_IDs;
+const SLuint32 * const Encoder_IDs = Codec_IDs;
 
+// for ANDROID, must match size and order of kVideoMimeTypes
 static const SLuint32 VideoCodecIds[] = {
         XA_VIDEOCODEC_MPEG2,
         XA_VIDEOCODEC_H263,
@@ -139,8 +140,9 @@
 #endif
 };
 
-const SLuint32 *VideoDecoderIds = VideoCodecIds;
+const SLuint32 * const VideoDecoderIds = VideoCodecIds;
 
+// for ANDROID, must == kNbVideoMimeTypes
 const XAuint32 kMaxVideoDecoders = sizeof(VideoCodecIds) / sizeof(VideoCodecIds[0]);
 
 static const SLmilliHertz SamplingRates_A[] = {
diff --git a/src/devices.h b/src/devices.h
index 5dd99bd..bf2814e 100644
--- a/src/devices.h
+++ b/src/devices.h
@@ -51,11 +51,13 @@
 #define MAX_DECODERS 9 ///< (sizeof(Decoder_IDs) / sizeof(Decoder_IDs[0]))
 #define MAX_ENCODERS 9 ///< (sizeof(Encoder_IDs) / sizeof(Encoder_IDs[0]))
 
-extern const XAuint32 *VideoDecoderIds;
+extern const XAuint32 * const VideoDecoderIds;
+#ifndef ANDROID
 extern const XAuint32 kMaxVideoDecoders;
+#endif
 
 // For now, but encoders might be different than decoders later
-extern const SLuint32 *Decoder_IDs, *Encoder_IDs;
+extern const SLuint32 * const Decoder_IDs, * const Encoder_IDs;
 
 extern const CodecDescriptor DecoderDescriptors[], EncoderDescriptors[];
 
diff --git a/src/handlers.c b/src/handlers.c
index 8e21734..d08c31a 100644
--- a/src/handlers.c
+++ b/src/handlers.c
@@ -19,6 +19,8 @@
 #include "attr.h"
 #include "handlers.h"
 
+#define NULL 0
+
 /* The entries in this table are sorted first by object ID, and second
  * by attribute mask. The holes in the table are deliberate to permit
  * direct lookup. Don't cross streams!
@@ -39,7 +41,9 @@
 #define _(id) ((id) - SL_OBJECTID_ENGINE + XA_OBJECTID_CAMERADEVICE + 1)
 
     [_(SL_OBJECTID_AUDIOPLAYER)] = {
+#ifdef ANDROID
         [ATTR_INDEX_GAIN]        = handler_AudioPlayer_gain,
+#endif
         [ATTR_INDEX_TRANSPORT]   = handler_AudioPlayer_transport,
         [ATTR_INDEX_POSITION]    = handler_AudioPlayer_position,
         [ATTR_INDEX_BQ_ENQUEUE]  = handler_AudioPlayer_bq_enqueue,
diff --git a/src/itf/IAndroidBufferQueue.c b/src/itf/IAndroidBufferQueue.c
index 884f637..289fbc4 100644
--- a/src/itf/IAndroidBufferQueue.c
+++ b/src/itf/IAndroidBufferQueue.c
@@ -23,7 +23,7 @@
 #include "android/include/AacBqToPcmCbRenderer.h"
 
 /**
- * Determine the state of the audio player or audio recorder associated with a buffer queue.
+ * Determine the state of the audio player or media player associated with a buffer queue.
  *  Note that PLAYSTATE and RECORDSTATE values are equivalent (where PLAYING == RECORDING).
  */
 
@@ -51,8 +51,9 @@
  * parse and set the items associated with the given buffer, based on the buffer type,
  * which determines the set of authorized items and format
  */
-static void setItems(const SLAndroidBufferItem *pItems, SLuint32 itemsLength,
-        SLuint16 bufferType, AdvancedBufferHeader *pBuff)
+static SLresult setItems(SLuint32 dataLength,
+        const SLAndroidBufferItem *pItems, SLuint32 itemsLength,
+        SLuint16 bufferType, AdvancedBufferHeader *pBuff, bool *pEOS)
 {
     // reset item structure based on type
     switch (bufferType) {
@@ -67,7 +68,7 @@
       default:
         // shouldn't happen, but just in case clear out the item structure
         memset(&pBuff->mItems, 0, sizeof(AdvancedBufferItems));
-        return;
+        return SL_RESULT_INTERNAL_ERROR;
     }
 
     // process all items in the array; if no items then we break out of loop immediately
@@ -75,16 +76,16 @@
 
         // remaining length must be large enough for one full item without any associated data
         if (itemsLength < sizeof(SLAndroidBufferItem)) {
-            SL_LOGE("Partial item at end of array ignored");
-            break;
+            SL_LOGE("Partial item at end of array");
+            return SL_RESULT_PARAMETER_INVALID;
         }
         itemsLength -= sizeof(SLAndroidBufferItem);
 
         // remaining length must be large enough for data with current item and alignment padding
         SLuint32 itemDataSizeWithAlignmentPadding = (pItems->itemSize + 3) & ~3;
         if (itemsLength < itemDataSizeWithAlignmentPadding) {
-            SL_LOGE("Partial item data at end of array ignored");
-            break;
+            SL_LOGE("Partial item data at end of array");
+            return SL_RESULT_PARAMETER_INVALID;
         }
         itemsLength -= itemDataSizeWithAlignmentPadding;
 
@@ -98,8 +99,8 @@
                 pBuff->mItems.mTsCmdData.mTsCmdCode |= ANDROID_MP2TSEVENT_EOS;
                 //SL_LOGD("Found EOS event=%d", pBuff->mItems.mTsCmdData.mTsCmdCode);
                 if (pItems->itemSize != 0) {
-                    SL_LOGE("Invalid item parameter size %u for EOS, ignoring value",
-                            pItems->itemSize);
+                    SL_LOGE("Invalid item parameter size %u for EOS", pItems->itemSize);
+                    return SL_RESULT_PARAMETER_INVALID;
                 }
                 break;
 
@@ -112,9 +113,8 @@
                     pBuff->mItems.mTsCmdData.mPts = *((SLAuint64*)pItems->itemData);
                     //SL_LOGD("Found PTS=%lld", pBuff->mItems.mTsCmdData.mPts);
                 } else {
-                    SL_LOGE("Invalid item parameter size %u for MPEG-2 PTS, ignoring value",
-                            pItems->itemSize);
-                    pBuff->mItems.mTsCmdData.mTsCmdCode |= ANDROID_MP2TSEVENT_DISCONTINUITY;
+                    SL_LOGE("Invalid item parameter size %u for MPEG-2 PTS", pItems->itemSize);
+                    return SL_RESULT_PARAMETER_INVALID;
                 }
                 break;
 
@@ -147,9 +147,8 @@
 
               default:
                 // unknown item key
-                SL_LOGE("Unknown item key %u with size %u ignored", pItems->itemKey,
-                        pItems->itemSize);
-                break;
+                SL_LOGE("Unknown item key %u with size %u", pItems->itemKey, pItems->itemSize);
+                return SL_RESULT_PARAMETER_INVALID;
 
             }// switch (pItems->itemKey)
           } break;
@@ -160,16 +159,15 @@
               case SL_ANDROID_ITEMKEY_EOS:
                 pBuff->mItems.mAdtsCmdData.mAdtsCmdCode |= ANDROID_ADTSEVENT_EOS;
                 if (pItems->itemSize != 0) {
-                    SL_LOGE("Invalid item parameter size %u for EOS, ignoring value",
-                            pItems->itemSize);
+                    SL_LOGE("Invalid item parameter size %u for EOS", pItems->itemSize);
+                    return SL_RESULT_PARAMETER_INVALID;
                 }
                 break;
 
               default:
                 // unknown item key
-                SL_LOGE("Unknown item key %u with size %u ignored", pItems->itemKey,
-                        pItems->itemSize);
-                break;
+                SL_LOGE("Unknown item key %u with size %u", pItems->itemKey, pItems->itemSize);
+                return SL_RESULT_PARAMETER_INVALID;
 
             }// switch (pItems->itemKey)
           } break;
@@ -177,7 +175,7 @@
           case kAndroidBufferTypeInvalid:
           default:
             // not reachable as we checked this earlier
-            return;
+            return SL_RESULT_INTERNAL_ERROR;
 
         }// switch (bufferType)
 
@@ -193,8 +191,14 @@
         // supported Mpeg2Ts commands are mutually exclusive
         switch (pBuff->mItems.mTsCmdData.mTsCmdCode) {
           // single items are allowed
-          case ANDROID_MP2TSEVENT_NONE:
           case ANDROID_MP2TSEVENT_EOS:
+            if (dataLength > 0) {
+                SL_LOGE("Can't enqueue non-zero data with EOS");
+                return SL_RESULT_PRECONDITIONS_VIOLATED;
+            }
+            *pEOS = true;
+            break;
+          case ANDROID_MP2TSEVENT_NONE:
           case ANDROID_MP2TSEVENT_DISCONTINUITY:
           case ANDROID_MP2TSEVENT_DISCON_NEWPTS:
           case ANDROID_MP2TSEVENT_FORMAT_CHANGE_FULL:
@@ -202,22 +206,29 @@
             break;
           // no combinations are allowed
           default:
-            SL_LOGE("Invalid combination of items; all ignored");
-            pBuff->mItems.mTsCmdData.mTsCmdCode = ANDROID_MP2TSEVENT_NONE;
-            break;
+            SL_LOGE("Invalid combination of items");
+            return SL_RESULT_PARAMETER_INVALID;
         }
       } break;
 
       case kAndroidBufferTypeAacadts: {
         // only one item supported, and thus no combination check needed
+        if (pBuff->mItems.mAdtsCmdData.mAdtsCmdCode == ANDROID_ADTSEVENT_EOS) {
+            if (dataLength > 0) {
+                SL_LOGE("Can't enqueue non-zero data with EOS");
+                return SL_RESULT_PRECONDITIONS_VIOLATED;
+            }
+            *pEOS = true;
+        }
       } break;
 
       case kAndroidBufferTypeInvalid:
       default:
         // not reachable as we checked this earlier
-        return;
+        return SL_RESULT_INTERNAL_ERROR;
     }
 
+    return SL_RESULT_SUCCESS;
 }
 
 
@@ -234,19 +245,7 @@
     if (SL_PLAYSTATE_STOPPED == getAssociatedState(thiz)) {
         thiz->mCallback = callback;
         thiz->mContext = pContext;
-
-        // FIXME investigate why these two cases are not handled symmetrically any more
-        switch (InterfaceToObjectID(thiz)) {
-          case SL_OBJECTID_AUDIOPLAYER:
-            result = android_audioPlayer_androidBufferQueue_registerCallback_l(
-                    (CAudioPlayer*) thiz->mThis);
-            break;
-          case XA_OBJECTID_MEDIAPLAYER:
-            result = SL_RESULT_SUCCESS;
-            break;
-          default:
-            result = SL_RESULT_PARAMETER_INVALID;
-        }
+        result = SL_RESULT_SUCCESS;
 
     } else {
         result = SL_RESULT_PRECONDITIONS_VIOLATED;
@@ -273,40 +272,17 @@
     // reset the queue state
     thiz->mState.count = 0;
     thiz->mState.index = 0;
-    // reset the individual buffers
-    for (XAuint16 i=0 ; i<(thiz->mNumBuffers + 1) ; i++) {
-        thiz->mBufferArray[i].mDataBuffer = NULL;
-        thiz->mBufferArray[i].mDataSize = 0;
-        thiz->mBufferArray[i].mDataSizeConsumed = 0;
-        thiz->mBufferArray[i].mBufferContext = NULL;
-        thiz->mBufferArray[i].mBufferState = SL_ANDROIDBUFFERQUEUEEVENT_NONE;
-        switch (thiz->mBufferType) {
-          case kAndroidBufferTypeMpeg2Ts:
-            thiz->mBufferArray[i].mItems.mTsCmdData.mTsCmdCode = ANDROID_MP2TSEVENT_NONE;
-            thiz->mBufferArray[i].mItems.mTsCmdData.mPts = 0;
-            break;
-          case kAndroidBufferTypeAacadts:
-            thiz->mBufferArray[i].mItems.mAdtsCmdData.mAdtsCmdCode = ANDROID_ADTSEVENT_NONE;
-            break;
-          default:
-            result = SL_RESULT_CONTENT_UNSUPPORTED;
-        }
-    }
 
-    if (SL_RESULT_SUCCESS == result) {
-        // object-specific behavior for a clear
-        switch (InterfaceToObjectID(thiz)) {
-        case SL_OBJECTID_AUDIOPLAYER:
-            result = SL_RESULT_SUCCESS;
-            android_audioPlayer_androidBufferQueue_clear_l((CAudioPlayer*) thiz->mThis);
-            break;
-        case XA_OBJECTID_MEDIAPLAYER:
-            result = SL_RESULT_SUCCESS;
-            android_Player_androidBufferQueue_clear_l((CMediaPlayer*) thiz->mThis);
-            break;
-        default:
-            result = SL_RESULT_PARAMETER_INVALID;
-        }
+    // object-specific behavior for a clear
+    switch (InterfaceToObjectID(thiz)) {
+    case SL_OBJECTID_AUDIOPLAYER:
+        android_audioPlayer_androidBufferQueue_clear_l((CAudioPlayer*) thiz->mThis);
+        break;
+    case XA_OBJECTID_MEDIAPLAYER:
+        android_Player_androidBufferQueue_clear_l((CMediaPlayer*) thiz->mThis);
+        break;
+    default:
+        result = SL_RESULT_PARAMETER_INVALID;
     }
 
     interface_unlock_exclusive(thiz);
@@ -345,16 +321,24 @@
         // buffer size check, can be done outside of lock because buffer type can't change
         switch (thiz->mBufferType) {
           case kAndroidBufferTypeMpeg2Ts:
-            if (dataLength % MPEG2_TS_BLOCK_SIZE == 0) {
+            if (dataLength % MPEG2_TS_PACKET_SIZE == 0) {
+                // The downstream Stagefright MPEG-2 TS parser is sensitive to format errors,
+                // so do a quick sanity check beforehand on the first packet of the buffer.
+                // We don't check all the packets to avoid thrashing the data cache.
+                if ((dataLength > 0) && (*(SLuint8 *)pData != MPEG2_TS_PACKET_SYNC)) {
+                    SL_LOGE("Error enqueueing MPEG-2 TS data: incorrect packet sync");
+                    result = SL_RESULT_CONTENT_CORRUPTED;
+                    SL_LEAVE_INTERFACE
+                }
                 break;
             }
-            SL_LOGE("Error enqueueing MPEG-2 TS data: size must be a multiple of %d (block size)",
-                    MPEG2_TS_BLOCK_SIZE);
+            SL_LOGE("Error enqueueing MPEG-2 TS data: size must be a multiple of %d (packet size)",
+                    MPEG2_TS_PACKET_SIZE);
             result = SL_RESULT_PARAMETER_INVALID;
             SL_LEAVE_INTERFACE
             break;
           case kAndroidBufferTypeAacadts:
-            // non-zero dataLength is permitted in case of EOS command only
+            // zero dataLength is permitted in case of EOS command only
             if (dataLength > 0) {
                 result = android::AacBqToPcmCbRenderer::validateBufferStartEndOnFrameBoundaries(
                     pData, dataLength);
@@ -377,19 +361,24 @@
         if ((newRear = oldRear + 1) == &thiz->mBufferArray[thiz->mNumBuffers + 1]) {
             newRear = thiz->mBufferArray;
         }
-        if (newRear == thiz->mFront) {
+        if (thiz->mEOS) {
+            SL_LOGE("Can't enqueue after EOS");
+            result = SL_RESULT_PRECONDITIONS_VIOLATED;
+        } else if (newRear == thiz->mFront) {
             result = SL_RESULT_BUFFER_INSUFFICIENT;
         } else {
-            oldRear->mDataBuffer = pData;
-            oldRear->mDataSize = dataLength;
-            oldRear->mDataSizeConsumed = 0;
-            oldRear->mBufferContext = pBufferContext;
-            oldRear->mBufferState = SL_ANDROIDBUFFERQUEUEEVENT_NONE;
-            thiz->mRear = newRear;
-            ++thiz->mState.count;
             // set oldRear->mItems based on items
-            setItems(pItems, itemsLength, thiz->mBufferType, oldRear);
-            result = SL_RESULT_SUCCESS;
+            result = setItems(dataLength, pItems, itemsLength, thiz->mBufferType, oldRear,
+                    &thiz->mEOS);
+            if (SL_RESULT_SUCCESS == result) {
+                oldRear->mDataBuffer = pData;
+                oldRear->mDataSize = dataLength;
+                oldRear->mDataSizeConsumed = 0;
+                oldRear->mBufferContext = pBufferContext;
+                //oldRear->mBufferState = TBD;
+                thiz->mRear = newRear;
+                ++thiz->mState.count;
+            }
         }
         // set enqueue attribute if state is PLAYING and the first buffer is enqueued
         interface_unlock_exclusive_attributes(thiz, ((SL_RESULT_SUCCESS == result) &&
@@ -435,8 +424,7 @@
     IAndroidBufferQueue *thiz = (IAndroidBufferQueue *) self;
     interface_lock_exclusive(thiz);
     // FIXME only supporting SL_ANDROIDBUFFERQUEUEEVENT_PROCESSED in this implementation
-    if ((SL_ANDROIDBUFFERQUEUEEVENT_PROCESSED == eventFlags) ||
-            (SL_ANDROIDBUFFERQUEUEEVENT_NONE == eventFlags)) {
+    if (!(~(SL_ANDROIDBUFFERQUEUEEVENT_PROCESSED /* | others TBD */ ) & eventFlags)) {
         thiz->mCallbackEventsMask = eventFlags;
         result = SL_RESULT_SUCCESS;
     } else {
@@ -494,6 +482,7 @@
     thiz->mBufferArray = NULL;
     thiz->mFront = NULL;
     thiz->mRear = NULL;
+    thiz->mEOS = false;
 }
 
 
@@ -505,3 +494,98 @@
         thiz->mBufferArray = NULL;
     }
 }
+
+
+#if 0
+// Dump the contents of an IAndroidBufferQueue to the log.  This is for debugging only,
+// and is not a documented API.  The associated object is locked throughout for atomicity,
+// but the log entries may be interspersed with unrelated logs.
+
+void IAndroidBufferQueue_log(IAndroidBufferQueue *thiz)
+{
+    interface_lock_shared(thiz);
+    SL_LOGI("IAndroidBufferQueue %p:", thiz);
+    SL_LOGI("  mState.count=%u mState.index=%u mCallback=%p mContext=%p",
+            thiz->mState.count, thiz->mState.index, thiz->mCallback, thiz->mContext);
+    const char *bufferTypeString;
+    switch (thiz->mBufferType) {
+    case kAndroidBufferTypeInvalid:
+        bufferTypeString = "kAndroidBufferTypeInvalid";
+        break;
+    case kAndroidBufferTypeMpeg2Ts:
+        bufferTypeString = "kAndroidBufferTypeMpeg2Ts";
+        break;
+    case kAndroidBufferTypeAacadts:
+        bufferTypeString = "kAndroidBufferTypeAacadts";
+        break;
+    default:
+        bufferTypeString = "unknown";
+        break;
+    }
+    SL_LOGI("  mCallbackEventsMask=0x%x, mBufferType=0x%x (%s), mEOS=%s",
+            thiz->mCallbackEventsMask,
+            thiz->mBufferType, bufferTypeString,
+            thiz->mEOS ? "true" : "false");
+    SL_LOGI("  mBufferArray=%p, mFront=%p (%u), mRear=%p (%u)",
+            thiz->mBufferArray,
+            thiz->mFront, thiz->mFront - thiz->mBufferArray,
+            thiz->mRear, thiz->mRear - thiz->mBufferArray);
+    SL_LOGI("  index mDataBuffer mDataSize mDataSizeConsumed mBufferContext mItems");
+    const AdvancedBufferHeader *hdr;
+    for (hdr = thiz->mFront; hdr != thiz->mRear; ) {
+        SLuint32 i = hdr - thiz->mBufferArray;
+        char itemString[32];
+        switch (thiz->mBufferType) {
+        case kAndroidBufferTypeMpeg2Ts:
+            switch (hdr->mItems.mTsCmdData.mTsCmdCode) {
+            case ANDROID_MP2TSEVENT_NONE:
+                strcpy(itemString, "NONE");
+                break;
+            case ANDROID_MP2TSEVENT_EOS:
+                strcpy(itemString, "EOS");
+                break;
+            case ANDROID_MP2TSEVENT_DISCONTINUITY:
+                strcpy(itemString, "DISCONTINUITY");
+                break;
+            case ANDROID_MP2TSEVENT_DISCON_NEWPTS:
+                snprintf(itemString, sizeof(itemString), "NEWPTS %llu",
+                        hdr->mItems.mTsCmdData.mPts);
+                break;
+            case ANDROID_MP2TSEVENT_FORMAT_CHANGE:
+                strcpy(itemString, "FORMAT_CHANGE");
+                break;
+            default:
+                snprintf(itemString, sizeof(itemString), "0x%x", hdr->mItems.mTsCmdData.mTsCmdCode);
+                break;
+            }
+            break;
+        case kAndroidBufferTypeAacadts:
+            switch (hdr->mItems.mAdtsCmdData.mAdtsCmdCode) {
+            case ANDROID_ADTSEVENT_NONE:
+                strcpy(itemString, "NONE");
+                break;
+            case ANDROID_ADTSEVENT_EOS:
+                strcpy(itemString, "EOS");
+                break;
+            default:
+                snprintf(itemString, sizeof(itemString), "0x%x",
+                        hdr->mItems.mAdtsCmdData.mAdtsCmdCode);
+                break;
+            }
+            break;
+        default:
+            strcpy(itemString, "");
+            break;
+        }
+        SL_LOGI("  %5u %11p %9u %17u %14p %s",
+                i, hdr->mDataBuffer, hdr->mDataSize, hdr->mDataSizeConsumed,
+                hdr->mBufferContext, itemString);
+                // mBufferState
+        if (++hdr == &thiz->mBufferArray[thiz->mNumBuffers + 1]) {
+            hdr = thiz->mBufferArray;
+        }
+    }
+    interface_unlock_shared(thiz);
+}
+
+#endif
diff --git a/src/itf/IEngine.c b/src/itf/IEngine.c
index 41de0a1..b024fca 100644
--- a/src/itf/IEngine.c
+++ b/src/itf/IEngine.c
@@ -82,26 +82,6 @@
             return SL_RESULT_CONTENT_UNSUPPORTED;
         }
 
-        // initialize ABQ memory
-        for (SLuint16 i=0 ; i<(ap->mAndroidBufferQueue.mNumBuffers + 1) ; i++) {
-            AdvancedBufferHeader *pBuf = &ap->mAndroidBufferQueue.mBufferArray[i];
-            pBuf->mDataBuffer = NULL;
-            pBuf->mDataSize = 0;
-            pBuf->mDataSizeConsumed = 0;
-            pBuf->mBufferContext = NULL;
-            pBuf->mBufferState = SL_ANDROIDBUFFERQUEUEEVENT_NONE;
-            switch (ap->mAndroidBufferQueue.mBufferType) {
-              case kAndroidBufferTypeMpeg2Ts:
-                pBuf->mItems.mTsCmdData.mTsCmdCode = ANDROID_MP2TSEVENT_NONE;
-                pBuf->mItems.mTsCmdData.mPts = 0;
-                break;
-              case kAndroidBufferTypeAacadts:
-                pBuf->mItems.mAdtsCmdData.mAdtsCmdCode = ANDROID_ADTSEVENT_NONE;
-                break;
-              default:
-                return SL_RESULT_CONTENT_UNSUPPORTED;
-            }
-        }
         ap->mAndroidBufferQueue.mFront = ap->mAndroidBufferQueue.mBufferArray;
         ap->mAndroidBufferQueue.mRear  = ap->mAndroidBufferQueue.mBufferArray;
     }
@@ -252,7 +232,7 @@
                     // placement new (explicit constructor)
                     // FIXME unnecessary once those fields are encapsulated in one class, rather
                     //   than a structure
-                    (void) new (&thiz->mAudioTrack) android::sp<android::AudioTrackProxy>();
+                    (void) new (&thiz->mAudioTrack) android::sp<android::AudioTrack>();
                     (void) new (&thiz->mCallbackProtector)
                             android::sp<android::CallbackProtector>();
                     (void) new (&thiz->mAuxEffect) android::sp<android::AudioEffect>();
@@ -1229,25 +1209,6 @@
                             result = SL_RESULT_MEMORY_FAILURE;
                             break;
                         } else {
-                            for (XAuint16 i=0 ; i<(nbBuffers + 1) ; i++) {
-                                thiz->mAndroidBufferQueue.mBufferArray[i].mDataBuffer = NULL;
-                                thiz->mAndroidBufferQueue.mBufferArray[i].mDataSize = 0;
-                                thiz->mAndroidBufferQueue.mBufferArray[i].mDataSizeConsumed = 0;
-                                thiz->mAndroidBufferQueue.mBufferArray[i].mBufferContext = NULL;
-                                thiz->mAndroidBufferQueue.mBufferArray[i].mBufferState =
-                                        XA_ANDROIDBUFFERQUEUEEVENT_NONE;
-                                switch (thiz->mAndroidBufferQueue.mBufferType) {
-                                  case kAndroidBufferTypeMpeg2Ts:
-                                    thiz->mAndroidBufferQueue.mBufferArray[i].mItems.mTsCmdData.
-                                            mTsCmdCode = ANDROID_MP2TSEVENT_NONE;
-                                    thiz->mAndroidBufferQueue.mBufferArray[i].mItems.mTsCmdData.
-                                            mPts = 0;
-                                    break;
-                                  default:
-                                    result = SL_RESULT_CONTENT_UNSUPPORTED;
-                                    break;
-                                }
-                            }
                             thiz->mAndroidBufferQueue.mFront =
                                     thiz->mAndroidBufferQueue.mBufferArray;
                             thiz->mAndroidBufferQueue.mRear =
diff --git a/src/itf/IEnvironmentalReverb.c b/src/itf/IEnvironmentalReverb.c
index adf1a8d..79b0290 100644
--- a/src/itf/IEnvironmentalReverb.c
+++ b/src/itf/IEnvironmentalReverb.c
@@ -77,7 +77,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -139,7 +139,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -170,7 +170,7 @@
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_exclusive(thiz);
         thiz->mProperties.decayTime = decayTime;
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -198,7 +198,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -257,7 +257,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -318,7 +318,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -377,7 +377,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -438,7 +438,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -497,7 +497,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -556,7 +556,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -615,7 +615,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
@@ -699,7 +699,7 @@
     } else {
         IEnvironmentalReverb *thiz = (IEnvironmentalReverb *) self;
         interface_lock_shared(thiz);
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         result = SL_RESULT_SUCCESS;
 #else
         if (NO_ENVREVERB(thiz)) {
diff --git a/src/itf/IMetadataExtraction.c b/src/itf/IMetadataExtraction.c
index 6692c20..1a6c24e 100644
--- a/src/itf/IMetadataExtraction.c
+++ b/src/itf/IMetadataExtraction.c
@@ -28,8 +28,12 @@
     } else {
         IMetadataExtraction *thiz = (IMetadataExtraction *) self;
         if (SL_OBJECTID_AUDIOPLAYER == InterfaceToObjectID(thiz)) {
+#ifdef ANDROID
             result = android_audioPlayer_metadata_getItemCount((CAudioPlayer *)thiz->mThis,
                     pItemCount);
+#else
+            result = SL_RESULT_FEATURE_UNSUPPORTED;
+#endif
         } else {
             result = SL_RESULT_PARAMETER_INVALID;
         }
@@ -49,8 +53,12 @@
     } else {
         IMetadataExtraction *thiz = (IMetadataExtraction *) self;
         if (SL_OBJECTID_AUDIOPLAYER == InterfaceToObjectID(thiz)) {
+#ifdef ANDROID
             result = android_audioPlayer_metadata_getKeySize((CAudioPlayer *)thiz->mThis,
                     index, pKeySize);
+#else
+            result = SL_RESULT_FEATURE_UNSUPPORTED;
+#endif
         } else {
             result = SL_RESULT_PARAMETER_INVALID;
         }
@@ -70,8 +78,12 @@
     } else {
         IMetadataExtraction *thiz = (IMetadataExtraction *) self;
         if (SL_OBJECTID_AUDIOPLAYER == InterfaceToObjectID(thiz)) {
+#ifdef ANDROID
             result = android_audioPlayer_metadata_getKey((CAudioPlayer *)thiz->mThis,
                     index, keySize, pKey);
+#else
+            result = SL_RESULT_FEATURE_UNSUPPORTED;
+#endif
         } else {
             result = SL_RESULT_PARAMETER_INVALID;
         }
@@ -91,8 +103,12 @@
     } else {
         IMetadataExtraction *thiz = (IMetadataExtraction *) self;
         if (SL_OBJECTID_AUDIOPLAYER == InterfaceToObjectID(thiz)) {
+#ifdef ANDROID
             result = android_audioPlayer_metadata_getValueSize((CAudioPlayer *)thiz->mThis,
                     index, pValueSize);
+#else
+            result = SL_RESULT_FEATURE_UNSUPPORTED;
+#endif
         } else {
             result = SL_RESULT_PARAMETER_INVALID;
         }
@@ -112,8 +128,12 @@
     } else {
         IMetadataExtraction *thiz = (IMetadataExtraction *) self;
         if (SL_OBJECTID_AUDIOPLAYER == InterfaceToObjectID(thiz)) {
+#ifdef ANDROID
             result = android_audioPlayer_metadata_getValue((CAudioPlayer *)thiz->mThis,
                     index, valueSize, pValue);
+#else
+            result = SL_RESULT_FEATURE_UNSUPPORTED;
+#endif
         } else {
             result = SL_RESULT_PARAMETER_INVALID;
         }
diff --git a/src/itf/IMuteSolo.c b/src/itf/IMuteSolo.c
index dcf5879..46608e8 100644
--- a/src/itf/IMuteSolo.c
+++ b/src/itf/IMuteSolo.c
@@ -180,7 +180,8 @@
             SLuint8 numChannels = ap->mNumChannels;
             object_unlock_shared(thisObject);
             *pNumChannels = numChannels;
-            result = 0 < numChannels ? SL_RESULT_SUCCESS : SL_RESULT_PRECONDITIONS_VIOLATED;
+            // spec errata says to return 0 (== UNKNOWN_NUMCHANNELS) if channel count is unknown
+            result = SL_RESULT_SUCCESS;
         }
     }
 
diff --git a/src/itf/IPresetReverb.c b/src/itf/IPresetReverb.c
index e013cef..07214e5 100644
--- a/src/itf/IPresetReverb.c
+++ b/src/itf/IPresetReverb.c
@@ -72,7 +72,7 @@
         IPresetReverb *thiz = (IPresetReverb *) self;
         interface_lock_shared(thiz);
         SLuint16 preset = SL_REVERBPRESET_NONE;
-#if !defined(ANDROID)
+#if 1 // !defined(ANDROID)
         preset = thiz->mPreset;
         result = SL_RESULT_SUCCESS;
 #else
diff --git a/src/itf/IStreamInformation.c b/src/itf/IStreamInformation.c
index 57da843..bfd9233 100644
--- a/src/itf/IStreamInformation.c
+++ b/src/itf/IStreamInformation.c
@@ -30,10 +30,10 @@
 
 #ifdef ANDROID
     IStreamInformation *thiz = (IStreamInformation *) self;
-    interface_lock_exclusive(thiz);
+    interface_lock_shared(thiz);
     // always storing container info at index 0, as per spec
     *info = thiz->mStreamInfoTable.itemAt(0).containerInfo;
-    interface_unlock_exclusive(thiz);
+    interface_unlock_shared(thiz);
     // even though the pointer to the media container info is returned, the values aren't set
     //  for the actual container in this version, they are simply initialized to defaults
     //  (see IStreamInformation_init)
@@ -41,7 +41,7 @@
 #else
     SL_LOGE("QueryMediaContainerInformation is unsupported");
     memset(info, 0, sizeof(XAMediaContainerInformation));
-    result = XA_RESULT_CONTENT_UNSUPPORTED;
+    result = XA_RESULT_FEATURE_UNSUPPORTED;
 #endif
 
     XA_LEAVE_INTERFACE
@@ -69,7 +69,7 @@
     } else {
         IStreamInformation *thiz = (IStreamInformation *) self;
 
-        interface_lock_exclusive(thiz);
+        interface_lock_shared(thiz);
 
         XAuint32 nbStreams = thiz->mStreamInfoTable.itemAt(0).containerInfo.numStreams;
         // streams in the container are numbered 1..nbStreams
@@ -82,7 +82,7 @@
             result = XA_RESULT_PARAMETER_INVALID;
         }
 
-        interface_unlock_exclusive(thiz);
+        interface_unlock_shared(thiz);
     }
 #endif
 
@@ -106,7 +106,7 @@
 
         IStreamInformation *thiz = (IStreamInformation *) self;
 
-        interface_lock_exclusive(thiz);
+        interface_lock_shared(thiz);
 
         XAuint32 nbStreams = thiz->mStreamInfoTable.itemAt(0).containerInfo.numStreams;
         // stream 0 is the container, and other streams in the container are numbered 1..nbStreams
@@ -149,7 +149,7 @@
             result = XA_RESULT_PARAMETER_INVALID;
         }
 
-        interface_unlock_exclusive(thiz);
+        interface_unlock_shared(thiz);
 #endif
 
     }
@@ -165,8 +165,46 @@
 {
     XA_ENTER_INTERFACE
 
-    SL_LOGE("unsupported XAStreamInformationItf function");
-    result = XA_RESULT_FEATURE_UNSUPPORTED;
+    if (NULL == pNameSize || streamIndex == 0) {
+        result = XA_RESULT_PARAMETER_INVALID;
+    } else {
+#ifdef ANDROID
+        IStreamInformation *thiz = (IStreamInformation *) self;
+        interface_lock_shared(thiz);
+
+        XAuint32 nbStreams = thiz->mStreamInfoTable.itemAt(0).containerInfo.numStreams;
+        // streams in the container are numbered 1..nbStreams
+        if (streamIndex <= nbStreams) {
+            char streamName[16];        // large enough for the fixed format in next line
+            snprintf(streamName, sizeof(streamName), "stream%u", streamIndex);
+            size_t actualNameLength = strlen(streamName);
+            if (NULL == pName) {
+                // application is querying the name length in order to allocate a buffer
+                result = XA_RESULT_SUCCESS;
+            } else {
+                SLuint16 availableNameLength = *pNameSize;
+                if (actualNameLength > availableNameLength) {
+                    memcpy(pName, streamName, availableNameLength);
+                    result = XA_RESULT_BUFFER_INSUFFICIENT;
+                } else if (actualNameLength == availableNameLength) {
+                    memcpy(pName, streamName, availableNameLength);
+                    result = XA_RESULT_SUCCESS;
+                } else { // actualNameLength < availableNameLength
+                    memcpy(pName, streamName, actualNameLength + 1);
+                    result = XA_RESULT_SUCCESS;
+                }
+            }
+            *pNameSize = actualNameLength;
+        } else {
+            result = XA_RESULT_PARAMETER_INVALID;
+        }
+
+        interface_unlock_shared(thiz);
+#else
+        SL_LOGE("unsupported XAStreamInformationItf function");
+        result = XA_RESULT_FEATURE_UNSUPPORTED;
+#endif
+    }
 
     XA_LEAVE_INTERFACE
 }
@@ -203,15 +241,18 @@
         XA_LEAVE_INTERFACE;
     }
 
+#ifdef ANDROID
     IStreamInformation *thiz = (IStreamInformation *) self;
-
-    interface_lock_exclusive(thiz);
+    interface_lock_shared(thiz);
 
     result = XA_RESULT_SUCCESS;
     *numStreams = thiz->mStreamInfoTable.itemAt(0).containerInfo.numStreams;
     activeStreams = thiz->mActiveStreams;
 
-    interface_unlock_exclusive(thiz);
+    interface_unlock_shared(thiz);
+#else
+    result = SL_RESULT_FEATURE_UNSUPPORTED;
+#endif
 
     XA_LEAVE_INTERFACE
 }
diff --git a/src/itf/IVideoDecoderCapabilities.cpp b/src/itf/IVideoDecoderCapabilities.cpp
index 8f12cb2..c9649a9 100644
--- a/src/itf/IVideoDecoderCapabilities.cpp
+++ b/src/itf/IVideoDecoderCapabilities.cpp
@@ -42,16 +42,15 @@
             // If pDecodersIds is non-NULL, as an input pNumDecoders specifies the size of the
             // pDecoderIds array and as an output it specifies the number of decoder IDs available
             // within the pDecoderIds array.
-#ifdef ANDROID
             XAuint32 numDecoders = *pNumDecoders;
+#ifdef ANDROID
             const XAuint32 androidNbDecoders = android::android_videoCodec_getNbDecoders();
-            if (androidNbDecoders <= numDecoders) {
+            if (androidNbDecoders < numDecoders) {
                 *pNumDecoders = numDecoders = androidNbDecoders;
             }
             android::android_videoCodec_getDecoderIds(numDecoders, pDecoderIds);
 #else
-            XAuint32 numDecoders = *pNumDecoders;
-            if (kMaxVideoDecoders <= numDecoders) {
+            if (kMaxVideoDecoders < numDecoders) {
                 *pNumDecoders = numDecoders = kMaxVideoDecoders;
             }
             memcpy(pDecoderIds, VideoDecoderIds, numDecoders * sizeof(XAuint32));
@@ -78,6 +77,11 @@
 #ifdef ANDROID
             result = android::android_videoCodec_getProfileLevelCombinationNb(decoderId, pIndex);
 #else
+            // Generic implementation has zero profile/level combinations for all codecs,
+            // but this is not allowed per spec:
+            //    "Each decoder must support at least one profile/mode pair
+            //    and therefore have at least one Codec Descriptor."
+            *pIndex = 0;
             SL_LOGE("Generic implementation has no video decoder capabilities");
             result = XA_RESULT_PARAMETER_INVALID;
 #endif
@@ -87,7 +91,10 @@
             result = android::android_videoCodec_getProfileLevelCombination(decoderId, *pIndex,
                     pDescriptor);
 #else
+            // For the generic implementation, any index >= 0 is out of range
+#if 1   // not sure if this is needed, it's not being done for the Android case
             pDescriptor->codecId = decoderId;
+#endif
             SL_LOGE("Generic implementation has no video decoder capabilities");
             result = XA_RESULT_PARAMETER_INVALID;
 #endif
diff --git a/src/itfstruct.h b/src/itfstruct.h
index 8391c96..36d7565 100644
--- a/src/itfstruct.h
+++ b/src/itfstruct.h
@@ -672,6 +672,7 @@
     AndroidBufferType_type mBufferType;
     AdvancedBufferHeader *mBufferArray;
     AdvancedBufferHeader *mFront, *mRear;
+    bool mEOS;  // whether EOS has been enqueued; never reset
 } IAndroidBufferQueue;
 
 #endif
diff --git a/src/locks.c b/src/locks.c
index 743d4a9..d95c23a 100644
--- a/src/locks.c
+++ b/src/locks.c
@@ -203,7 +203,7 @@
     slPrefetchCallback prefetchCallback = NULL;
     void *prefetchContext = NULL;
     SLuint32 prefetchEvents = SL_PREFETCHEVENT_NONE;
-    android::sp<android::AudioTrackProxy> audioTrack;
+    android::sp<android::AudioTrack> audioTrack;
     if (SL_OBJECTID_AUDIOPLAYER == objectID) {
         CAudioPlayer *ap = (CAudioPlayer *) thiz;
         prefetchCallback = ap->mPrefetchStatus.mDeferredPrefetchCallback;
diff --git a/src/objects/CMediaPlayer.c b/src/objects/CMediaPlayer.c
index 63285d8..7b995c5 100644
--- a/src/objects/CMediaPlayer.c
+++ b/src/objects/CMediaPlayer.c
@@ -64,8 +64,8 @@
 
 predestroy_t CMediaPlayer_PreDestroy(void *self)
 {
-    CMediaPlayer *thiz = (CMediaPlayer *) self;
 #ifdef ANDROID
+    CMediaPlayer *thiz = (CMediaPlayer *) self;
     android_Player_preDestroy(thiz);
 #endif
     return predestroy_ok;
diff --git a/src/sles_allinclusive.h b/src/sles_allinclusive.h
index 513f774..0b33dcb 100644
--- a/src/sles_allinclusive.h
+++ b/src/sles_allinclusive.h
@@ -255,7 +255,8 @@
     SLuint32 mDataSizeConsumed;
     AdvancedBufferItems mItems;
     const void *mBufferContext;
-    SLuint32 mBufferState;
+    // mBufferState will be used for the other ABQ events we'll support in the future
+    // SLuint32 mBufferState;
 } AdvancedBufferHeader;
 #endif
 
diff --git a/tests/automated/BufferQueue_test.cpp b/tests/automated/BufferQueue_test.cpp
index 24b6d72..123e481 100644
--- a/tests/automated/BufferQueue_test.cpp
+++ b/tests/automated/BufferQueue_test.cpp
@@ -22,7 +22,7 @@
 #ifdef ANDROID
 #include <utils/Log.h>
 #else
-#define LOGV printf
+#define ALOGV printf
 #endif
 
 #include <assert.h>
@@ -284,13 +284,13 @@
         CheckErr(res);
         ASSERT_EQ((SLuint32) 1, bufferqueueState.count);
         ASSERT_EQ((SLuint32) 0, bufferqueueState.playIndex);
-        //LOGV("Before 1.5 sec");
+        //ALOGV("Before 1.5 sec");
         // wait 1.5 seconds
         usleep(1500000);
-        //LOGV("After 1.5 sec");
+        //ALOGV("After 1.5 sec");
         // state should still be playing
         res = (*playerPlay)->GetPlayState(playerPlay, &playerState);
-        //LOGV("GetPlayState");
+        //ALOGV("GetPlayState");
         CheckErr(res);
         ASSERT_EQ(SL_PLAYSTATE_PLAYING, playerState);
         // buffer should be removed from the queue
@@ -298,12 +298,12 @@
         CheckErr(res);
         ASSERT_EQ((SLuint32) 0, bufferqueueState.count);
         ASSERT_EQ((SLuint32) 1, bufferqueueState.playIndex);
-        //LOGV("TestEnd");
+        //ALOGV("TestEnd");
     }
 };
 
 TEST_F(TestBufferQueue, testInvalidBuffer){
-    //LOGV("Test Fixture: InvalidBuffer");
+    //ALOGV("Test Fixture: InvalidBuffer");
     InvalidBuffer();
 }
 
diff --git a/tests/examples/Android.mk b/tests/examples/Android.mk
index ac06ee7..f9e912e 100644
--- a/tests/examples/Android.mk
+++ b/tests/examples/Android.mk
@@ -293,7 +293,7 @@
 
 LOCAL_CFLAGS += -UNDEBUG
 
-LOCAL_MODULE:= slesTestDecodeAac
+LOCAL_MODULE:= slesTest_decodeAac
 
 include $(BUILD_EXECUTABLE)
 
diff --git a/tests/examples/slesTestDecodeAac.cpp b/tests/examples/slesTestDecodeAac.cpp
index 707ab37..1a51bb0 100644
--- a/tests/examples/slesTestDecodeAac.cpp
+++ b/tests/examples/slesTestDecodeAac.cpp
@@ -45,6 +45,7 @@
 
 #define QUERY_METADATA
 
+#include <assert.h>
 #include <stdlib.h>
 #include <stdio.h>
 #include <string.h>
@@ -64,7 +65,7 @@
  * on the AudioPlayer object for decoding, and
  * SL_IID_METADATAEXTRACTION for retrieving the format of the decoded audio.
  */
-#define NUM_EXPLICIT_INTERFACES_FOR_PLAYER 3
+#define NUM_EXPLICIT_INTERFACES_FOR_PLAYER 4
 
 /* Number of decoded samples produced by one AAC frame; defined by the standard */
 #define SAMPLES_PER_AAC_FRAME 1024
@@ -115,7 +116,7 @@
 size_t totalEncodeCompletions = 0;     // number of Enqueue completions received
 CentralTendencyStatistics frameStats;
 size_t pauseFrame = 0;              // pause after this many decoded frames, zero means don't pause
-SLboolean createRaw = SL_BOOLEAN_FALSE; // whether to create a .raw file containing PCM data
+SLboolean createRaw = SL_BOOLEAN_TRUE; // whether to create a .raw file containing PCM data
 
 /* constant to identify a buffer context which is the end of the stream to decode */
 static const int kEosBufferCntxt = 1980; // a magic value we can compare against
@@ -124,6 +125,23 @@
 pthread_mutex_t eosLock = PTHREAD_MUTEX_INITIALIZER;
 pthread_cond_t eosCondition = PTHREAD_COND_INITIALIZER;
 
+// These are extensions to OpenMAX AL 1.0.1 values
+
+#define PREFETCHSTATUS_UNKNOWN ((SLuint32) 0)
+#define PREFETCHSTATUS_ERROR   ((SLuint32) (-1))
+
+// Mutex and condition shared with main program to protect prefetch_status
+
+static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
+static pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
+SLuint32 prefetch_status = PREFETCHSTATUS_UNKNOWN;
+
+/* used to detect errors likely to have occured when the OpenSL ES framework fails to open
+ * a resource, for instance because a file URI is invalid, or an HTTP server doesn't respond.
+ */
+#define PREFETCHEVENT_ERROR_CANDIDATE \
+        (SL_PREFETCHEVENT_STATUSCHANGE | SL_PREFETCHEVENT_FILLLEVELCHANGE)
+
 //-----------------------------------------------------------------
 /* Exits the application if an error is encountered */
 #define ExitOnError(x) ExitOnErrorFunc(x,__LINE__)
@@ -137,6 +155,40 @@
 }
 
 //-----------------------------------------------------------------
+/* Callback for "prefetch" events, here used to detect audio resource opening errors */
+void PrefetchEventCallback(SLPrefetchStatusItf caller, void *pContext, SLuint32 event)
+{
+    // pContext is unused here, so we pass NULL
+    assert(pContext == NULL);
+    SLpermille level = 0;
+    SLresult result;
+    result = (*caller)->GetFillLevel(caller, &level);
+    ExitOnError(result);
+    SLuint32 status;
+    result = (*caller)->GetPrefetchStatus(caller, &status);
+    ExitOnError(result);
+    printf("prefetch level=%d status=0x%x event=%d\n", level, status, event);
+    SLuint32 new_prefetch_status;
+    if ((PREFETCHEVENT_ERROR_CANDIDATE == (event & PREFETCHEVENT_ERROR_CANDIDATE))
+            && (level == 0) && (status == SL_PREFETCHSTATUS_UNDERFLOW)) {
+        printf("PrefetchEventCallback: Error while prefetching data, exiting\n");
+        new_prefetch_status = PREFETCHSTATUS_ERROR;
+    } else if (event == SL_PREFETCHEVENT_STATUSCHANGE) {
+        new_prefetch_status = status;
+    } else {
+        return;
+    }
+    int ok;
+    ok = pthread_mutex_lock(&mutex);
+    assert(ok == 0);
+    prefetch_status = new_prefetch_status;
+    ok = pthread_cond_signal(&cond);
+    assert(ok == 0);
+    ok = pthread_mutex_unlock(&mutex);
+    assert(ok == 0);
+}
+
+//-----------------------------------------------------------------
 /* Structure for passing information to callback function */
 typedef struct CallbackCntxt_ {
 #ifdef QUERY_METADATA
@@ -147,6 +199,11 @@
     SLint8*   pData;        // Current address of local audio data storage
 } CallbackCntxt;
 
+// used to notify when SL_PLAYEVENT_HEADATEND event is received
+static pthread_mutex_t head_mutex = PTHREAD_MUTEX_INITIALIZER;
+static pthread_cond_t head_cond = PTHREAD_COND_INITIALIZER;
+static SLboolean head_atend = SL_BOOLEAN_FALSE;
+
 //-----------------------------------------------------------------
 /* Callback for SLPlayItf through which we receive the SL_PLAYEVENT_HEADATEND event */
 void PlayCallback(SLPlayItf caller, void *pContext, SLuint32 event) {
@@ -162,6 +219,10 @@
     if (event & SL_PLAYEVENT_HEADATEND) {
         printf("SL_PLAYEVENT_HEADATEND position=%u ms, all decoded data has been received\n",
                 position);
+        pthread_mutex_lock(&head_mutex);
+        head_atend = SL_BOOLEAN_TRUE;
+        pthread_cond_signal(&head_cond);
+        pthread_mutex_unlock(&head_mutex);
     }
 }
 
@@ -184,8 +245,7 @@
     // for demonstration purposes:
     // verify what type of information was enclosed in the processed buffer
     if (NULL != pBufferContext) {
-        const int processedCommand = *(int *)pBufferContext;
-        if (kEosBufferCntxt == processedCommand) {
+        if (&kEosBufferCntxt == pBufferContext) {
             fprintf(stdout, "EOS was processed\n");
         }
     }
@@ -193,7 +253,7 @@
     ++totalEncodeCompletions;
     if (endOfEncodedStream) {
         // we continue to receive acknowledgement after each buffer was processed
-        if (pBufferContext == (void *) kEosBufferCntxt) {
+        if (pBufferContext == (void *) &kEosBufferCntxt) {
             printf("Received EOS completion after EOS\n");
         } else if (pBufferContext == NULL) {
             printf("Received ADTS completion after EOS\n");
@@ -411,6 +471,8 @@
     SLAndroidSimpleBufferQueueItf decBuffQueueItf;
     /*   to queue the AAC data to decode */
     SLAndroidBufferQueueItf       aacBuffQueueItf;
+    /*   for prefetch status */
+    SLPrefetchStatusItf           prefetchItf;
 
     SLboolean required[NUM_EXPLICIT_INTERFACES_FOR_PLAYER];
     SLInterfaceID iidArray[NUM_EXPLICIT_INTERFACES_FOR_PLAYER];
@@ -435,10 +497,13 @@
     /* Request the AndroidBufferQueue interface */
     required[1] = SL_BOOLEAN_TRUE;
     iidArray[1] = SL_IID_ANDROIDBUFFERQUEUESOURCE;
+    /* Request the PrefetchStatus interface */
+    required[2] = SL_BOOLEAN_TRUE;
+    iidArray[2] = SL_IID_PREFETCHSTATUS;
 #ifdef QUERY_METADATA
     /* Request the MetadataExtraction interface */
-    required[2] = SL_BOOLEAN_TRUE;
-    iidArray[2] = SL_IID_METADATAEXTRACTION;
+    required[3] = SL_BOOLEAN_TRUE;
+    iidArray[3] = SL_IID_METADATAEXTRACTION;
 #endif
 
     /* Setup the data source for queueing AAC buffers of ADTS data */
@@ -529,6 +594,10 @@
     res = (*player)->GetInterface(player, SL_IID_ANDROIDBUFFERQUEUESOURCE, (void*)&aacBuffQueueItf);
     ExitOnError(res);
 
+    /* Get the prefetch status interface which was explicitly requested */
+    res = (*player)->GetInterface(player, SL_IID_PREFETCHSTATUS, (void*)&prefetchItf);
+    ExitOnError(res);
+
 #ifdef QUERY_METADATA
     /* Get the metadata extraction interface which was explicitly requested */
     res = (*player)->GetInterface(player, SL_IID_METADATAEXTRACTION, (void*)&mdExtrItf);
@@ -561,6 +630,13 @@
     }
     printf("\n");
 
+    /* ------------------------------------------------------ */
+    /* Initialize the callback for prefetch errors, if we can't open the resource to decode */
+    res = (*prefetchItf)->RegisterCallback(prefetchItf, PrefetchEventCallback, NULL);
+    ExitOnError(res);
+    res = (*prefetchItf)->SetCallbackEventsMask(prefetchItf, PREFETCHEVENT_ERROR_CANDIDATE);
+    ExitOnError(res);
+
     /* Initialize the callback for the Android buffer queue of the encoded data */
     res = (*aacBuffQueueItf)->RegisterCallback(aacBuffQueueItf, AndroidBufferQueueCallback, NULL);
     ExitOnError(res);
@@ -569,10 +645,14 @@
        we don't want to starve the player initially */
     printf("Enqueueing initial full buffers of encoded ADTS data");
     for (i=0 ; i < NB_BUFFERS_IN_ADTS_QUEUE ; i++) {
-        if (filelen < 7 || frame[0] != 0xFF || (frame[1] & 0xF0) != 0xF0)
+        if (filelen < 7 || frame[0] != 0xFF || (frame[1] & 0xF0) != 0xF0) {
+            printf("\ncorrupt ADTS frame encountered; offset %zu bytes\n",
+                    frame - (unsigned char *) ptr);
+            // Note that prefetch will detect this error soon when it gets a premature EOF
             break;
+        }
         unsigned framelen = ((frame[3] & 3) << 11) | (frame[4] << 3) | (frame[5] >> 5);
-        printf(" %d", i);
+        printf(" %d (%u bytes)", i, framelen);
         res = (*aacBuffQueueItf)->Enqueue(aacBuffQueueItf, NULL /*pBufferContext*/,
                 frame, framelen, NULL, 0);
         ExitOnError(res);
@@ -586,10 +666,12 @@
 
 #ifdef QUERY_METADATA
     /* ------------------------------------------------------ */
-    /* Display the metadata obtained from the decoder */
+    /* Get and display the metadata key names for the decoder */
     //   This is for test / demonstration purposes only where we discover the key and value sizes
     //   of a PCM decoder. An application that would want to directly get access to those values
-    //   can make assumptions about the size of the keys and their matching values (all SLuint32)
+    //   can make assumptions about the size of the keys and their matching values (all SLuint32),
+    //   but it should not make assumptions about the key indices as these are subject to change.
+    //   Note that we don't get the metadata values yet; that happens in the first decode callback.
     SLuint32 itemCount;
     res = (*mdExtrItf)->GetItemCount(mdExtrItf, &itemCount);
     ExitOnError(res);
@@ -667,6 +749,24 @@
     }
 #endif
 
+    // set the player's state to paused, to start prefetching
+    printf("Setting play state to PAUSED\n");
+    res = (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_PAUSED);
+    ExitOnError(res);
+
+    // wait for prefetch status callback to indicate either sufficient data or error
+    printf("Awaiting prefetch complete\n");
+    pthread_mutex_lock(&mutex);
+    while (prefetch_status == PREFETCHSTATUS_UNKNOWN) {
+        pthread_cond_wait(&cond, &mutex);
+    }
+    pthread_mutex_unlock(&mutex);
+    if (prefetch_status == PREFETCHSTATUS_ERROR) {
+        fprintf(stderr, "Error during prefetch, exiting\n");
+        goto destroyRes;
+    }
+    printf("Prefetch is complete\n");
+
     /* ------------------------------------------------------ */
     /* Start decoding */
     printf("Starting to decode\n");
@@ -674,6 +774,7 @@
     ExitOnError(res);
 
     /* Decode until the end of the stream is reached */
+    printf("Awaiting notification that all encoded buffers have been enqueued\n");
     pthread_mutex_lock(&eosLock);
     while (!eos) {
         if (pauseFrame > 0) {
@@ -698,10 +799,15 @@
         }
     }
     pthread_mutex_unlock(&eosLock);
+    printf("All encoded buffers have now been enqueued, but there's still more to do\n");
 
     /* This just means done enqueueing; there may still more data in decode queue! */
-    // FIXME here is where we should wait for HEADATEND
-    usleep(100 * 1000);
+    pthread_mutex_lock(&head_mutex);
+    while (!head_atend) {
+        pthread_cond_wait(&head_cond, &head_mutex);
+    }
+    pthread_mutex_unlock(&head_mutex);
+    printf("Decode is now finished\n");
 
     pthread_mutex_lock(&eosLock);
     printf("Frame counters: encoded=%u decoded=%u\n", encodedFrames, decodedFrames);
diff --git a/tests/examples/slesTestEqFdPath.cpp b/tests/examples/slesTestEqFdPath.cpp
index ed1fb32..4d5a80f 100644
--- a/tests/examples/slesTestEqFdPath.cpp
+++ b/tests/examples/slesTestEqFdPath.cpp
@@ -302,7 +302,8 @@
     if (argc < 1)
 #endif
     {
-        fprintf(stdout, "Usage: \t%s [--always-on] path offsetInBytes [sizeInBytes]\n", programName);
+        fprintf(stdout, "Usage: \t%s [--always-on] path offsetInBytes [sizeInBytes]\n",
+                programName);
         fprintf(stdout, "Example: \"%s /sdcard/my.mp3 0 344460\" \n", programName);
         exit(EXIT_FAILURE);
     }
diff --git a/tests/examples/slesTestRecBuffQueue.cpp b/tests/examples/slesTestRecBuffQueue.cpp
index 1f5d794..16ba1b4 100644
--- a/tests/examples/slesTestRecBuffQueue.cpp
+++ b/tests/examples/slesTestRecBuffQueue.cpp
@@ -240,7 +240,8 @@
     if (presetValue == SL_ANDROID_RECORDING_PRESET_NONE) {
         printf("The default record preset appears to be %u\n", presetRetrieved);
     } else if (presetValue != presetRetrieved) {
-        fprintf(stderr, "Error retrieving recording preset as %u instead of %u\n", presetRetrieved, presetValue);
+        fprintf(stderr, "Error retrieving recording preset as %u instead of %u\n", presetRetrieved,
+                presetValue);
         ExitOnError(SL_RESULT_INTERNAL_ERROR);
     }
 
@@ -344,8 +345,20 @@
     }
 
     if (argc-i < 2) {
-        fprintf(stdout, "Usage: \t%s [-p#] destination_file duration_in_seconds\n", prog);
-        fprintf(stdout, "Example: \"%s /sdcard/myrec.raw 4\" \n", prog);
+        printf("Usage: \t%s [-p#] destination_file duration_in_seconds\n", prog);
+        printf("  -p# is the preset value which defaults to SL_ANDROID_RECORDING_PRESET_NONE\n");
+        printf("  possible values are:\n");
+        printf("    -p%d SL_ANDROID_RECORDING_PRESET_NONE\n",
+                SL_ANDROID_RECORDING_PRESET_NONE);
+        printf("    -p%d SL_ANDROID_RECORDING_PRESET_GENERIC\n",
+                SL_ANDROID_RECORDING_PRESET_GENERIC);
+        printf("    -p%d SL_ANDROID_RECORDING_PRESET_CAMCORDER\n",
+                SL_ANDROID_RECORDING_PRESET_CAMCORDER);
+        printf("    -p%d SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION\n",
+                SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION);
+        printf("    -p%d SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION\n",
+                SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION);
+        printf("Example: \"%s /sdcard/myrec.raw 4\" \n", prog);
         exit(EXIT_FAILURE);
     }
 
diff --git a/tests/examples/xaVideoDecoderCapabilities.cpp b/tests/examples/xaVideoDecoderCapabilities.cpp
index 46d1a9b..67d7373 100644
--- a/tests/examples/xaVideoDecoderCapabilities.cpp
+++ b/tests/examples/xaVideoDecoderCapabilities.cpp
@@ -54,6 +54,171 @@
     }
 }
 
+// Use a table of [integer, string] entries to map an integer to a string
+
+typedef struct {
+    XAuint32 id;
+    const char *string;
+} id_to_string_t;
+
+const char *id_to_string(XAuint32 id, const id_to_string_t *table, size_t numEntries)
+{
+    size_t i;
+    for (i = 0; i < numEntries; ++i) {
+        if (id == table[i].id) {
+            return table[i].string;
+        }
+    }
+    return "Unknown";
+}
+
+// Use a table of [integer, table] entries to map a pair of integers to a string
+
+typedef struct {
+    XAuint32 id1;
+    const id_to_string_t *id2_table;
+    size_t id2_numEntries;
+} id_pair_to_string_t;
+
+const char *id_pair_to_string(XAuint32 id1, XAuint32 id2, const id_pair_to_string_t *table,
+        size_t numEntries)
+{
+    size_t i;
+    for (i = 0; i < numEntries; ++i) {
+        if (id1 == table[i].id1) {
+            return id_to_string(id2, table[i].id2_table, table[i].id2_numEntries);
+        }
+    }
+    return "Unknown";
+}
+
+// Map a video codec and profile to string
+
+const char *videoProfileToString(XAuint32 codec, XAuint32 profile) {
+    // http://en.wikipedia.org/wiki/H.262/MPEG-2_Part_2
+    static const id_to_string_t MPEG2[] = {
+        {XA_VIDEOPROFILE_MPEG2_SIMPLE,  "Simple"},
+        {XA_VIDEOPROFILE_MPEG2_MAIN,    "Main"},
+        {XA_VIDEOPROFILE_MPEG2_422,     "4:2:2"},
+        {XA_VIDEOPROFILE_MPEG2_SNR,     "SNR Scalable"},
+        {XA_VIDEOPROFILE_MPEG2_SPATIAL, "Spatially Scalable"},
+        {XA_VIDEOPROFILE_MPEG2_HIGH,    "High"},
+    }, H263[] = {
+        {XA_VIDEOPROFILE_H263_BASELINE,           "baseline"},
+        {XA_VIDEOPROFILE_H263_H320CODING,         "H320 coding"},
+        {XA_VIDEOPROFILE_H263_BACKWARDCOMPATIBLE, "backwards compatible"},
+        {XA_VIDEOPROFILE_H263_ISWV2,              "isw v2"},
+        {XA_VIDEOPROFILE_H263_ISWV3,              "isw v3"},
+        {XA_VIDEOPROFILE_H263_HIGHCOMPRESSION,    "high compression"},
+        {XA_VIDEOPROFILE_H263_INTERNET,           "internet"},
+        {XA_VIDEOPROFILE_H263_INTERLACE,          "interlace"},
+        {XA_VIDEOPROFILE_H263_HIGHLATENCY,        "high latency"},
+    }, MPEG4[] = {
+        {XA_VIDEOPROFILE_MPEG4_SIMPLE,           "simple"},
+        {XA_VIDEOPROFILE_MPEG4_SIMPLESCALABLE,   "simple scalable"},
+        {XA_VIDEOPROFILE_MPEG4_CORE,             "core"},
+        {XA_VIDEOPROFILE_MPEG4_MAIN,             "main"},
+        {XA_VIDEOPROFILE_MPEG4_NBIT,             "nbit"},
+        {XA_VIDEOPROFILE_MPEG4_SCALABLETEXTURE,  "scalable texture"},
+        {XA_VIDEOPROFILE_MPEG4_SIMPLEFACE,       "simple face"},
+        {XA_VIDEOPROFILE_MPEG4_SIMPLEFBA,        "simple fba"},
+        {XA_VIDEOPROFILE_MPEG4_BASICANIMATED,    "basic animated"},
+        {XA_VIDEOPROFILE_MPEG4_HYBRID,           "hybrid"},
+        {XA_VIDEOPROFILE_MPEG4_ADVANCEDREALTIME, "advanced realtime"},
+        {XA_VIDEOPROFILE_MPEG4_CORESCALABLE,     "core scalable"},
+        {XA_VIDEOPROFILE_MPEG4_ADVANCEDCODING,   "advanced coding"},
+        {XA_VIDEOPROFILE_MPEG4_ADVANCEDCORE,     "advanced core"},
+        {XA_VIDEOPROFILE_MPEG4_ADVANCEDSCALABLE, "advanced scalable"},
+        // FIXME OpenMAX AL is out-of-date with respect to OpenMAX IL
+        {16,                                     "advanced simple"},
+    }, AVC[] = {
+        {XA_VIDEOPROFILE_AVC_BASELINE, "Baseline"},
+        {XA_VIDEOPROFILE_AVC_MAIN,     "Main"},
+        {XA_VIDEOPROFILE_AVC_EXTENDED, "Extended"},
+        {XA_VIDEOPROFILE_AVC_HIGH,     "High"},
+        {XA_VIDEOPROFILE_AVC_HIGH10,   "High 10"},
+        {XA_VIDEOPROFILE_AVC_HIGH422,  "High 4:2:2"},
+        {XA_VIDEOPROFILE_AVC_HIGH444,  "High 4:4:4"},
+    }, VC1[] = {
+        // FIXME sic should be XA_VIDEOPROFILE_*
+        {XA_VIDEOLEVEL_VC1_SIMPLE,   "simple"},
+        {XA_VIDEOLEVEL_VC1_MAIN,     "main"},
+        {XA_VIDEOLEVEL_VC1_ADVANCED, "advanced"},
+    };
+    static const id_pair_to_string_t table[] = {
+        {XA_VIDEOCODEC_MPEG2, MPEG2, sizeof(MPEG2) / sizeof(MPEG2[0])},
+        {XA_VIDEOCODEC_H263,  H263,  sizeof(H263)  / sizeof(H263[0])},
+        {XA_VIDEOCODEC_MPEG4, MPEG4, sizeof(MPEG4) / sizeof(MPEG4[0])},
+        {XA_VIDEOCODEC_AVC,   AVC,   sizeof(AVC)   / sizeof(AVC[0])},
+        {XA_VIDEOCODEC_VC1,   VC1,   sizeof(VC1)   / sizeof(VC1[0])},
+    };
+    return id_pair_to_string(codec, profile, table, sizeof(table) / sizeof(table[0]));
+}
+
+// Map a video codec and level to string
+
+const char* videoLevelToString(XAuint32 codec, XAuint32 level) {
+    static const id_to_string_t MPEG2[] = {
+        {XA_VIDEOLEVEL_MPEG2_LL,  "Low"},
+        {XA_VIDEOLEVEL_MPEG2_ML,  "Main"},
+        {XA_VIDEOLEVEL_MPEG2_H14, "H-14"},
+        {XA_VIDEOLEVEL_MPEG2_HL,  "High"},
+    }, H263[]= {
+        {XA_VIDEOLEVEL_H263_10, "10"},
+        {XA_VIDEOLEVEL_H263_20, "20"},
+        {XA_VIDEOLEVEL_H263_30, "30"},
+        {XA_VIDEOLEVEL_H263_40, "40"},
+        {XA_VIDEOLEVEL_H263_45, "45"},
+        {XA_VIDEOLEVEL_H263_50, "50"},
+        {XA_VIDEOLEVEL_H263_60, "60"},
+        {XA_VIDEOLEVEL_H263_70, "70"},
+    }, MPEG4[] = {
+        {XA_VIDEOLEVEL_MPEG4_0,  "0"},
+        {XA_VIDEOLEVEL_MPEG4_0b, "0b"},
+        {XA_VIDEOLEVEL_MPEG4_1,  "1"},
+        {XA_VIDEOLEVEL_MPEG4_2,  "2"},
+        {XA_VIDEOLEVEL_MPEG4_3,  "3"},
+        {XA_VIDEOLEVEL_MPEG4_4,  "4"},
+        {XA_VIDEOLEVEL_MPEG4_4a, "4a"},
+        // FIXME OpenMAX AL is out-of-date with respect to OpenMAX IL
+        {8,                      "5"},
+    }, AVC[] = {
+        {XA_VIDEOLEVEL_AVC_1,  "1"},
+        {XA_VIDEOLEVEL_AVC_1B, "1B"},
+        {XA_VIDEOLEVEL_AVC_11, "1.1"},
+        {XA_VIDEOLEVEL_AVC_12, "1.2"},
+        {XA_VIDEOLEVEL_AVC_13, "1.3"},
+        {XA_VIDEOLEVEL_AVC_2,  "2"},
+        {XA_VIDEOLEVEL_AVC_21, "2.1"},
+        {XA_VIDEOLEVEL_AVC_22, "2.2"},
+        {XA_VIDEOLEVEL_AVC_3,  "3"},
+        {XA_VIDEOLEVEL_AVC_31, "3.1"},
+        {XA_VIDEOLEVEL_AVC_32, "3.2"},
+        {XA_VIDEOLEVEL_AVC_4,  "4"},
+        {XA_VIDEOLEVEL_AVC_41, "4.1"},
+        {XA_VIDEOLEVEL_AVC_42, "4.2"},
+        {XA_VIDEOLEVEL_AVC_5,  "5"},
+        {XA_VIDEOLEVEL_AVC_51, "5.1"},
+    }, VC1[] = {
+        {XA_VIDEOLEVEL_VC1_LOW,    "Low"},
+        {XA_VIDEOLEVEL_VC1_MEDIUM, "Medium"},
+        {XA_VIDEOLEVEL_VC1_HIGH,   "High"},
+        {XA_VIDEOLEVEL_VC1_L0,     "L0"},
+        {XA_VIDEOLEVEL_VC1_L1,     "L1"},
+        {XA_VIDEOLEVEL_VC1_L2,     "L2"},
+        {XA_VIDEOLEVEL_VC1_L3,     "L3"},
+        {XA_VIDEOLEVEL_VC1_L4,     "L4"},
+    };
+    static const id_pair_to_string_t table[] = {
+        {XA_VIDEOCODEC_MPEG2, MPEG2, sizeof(MPEG2) / sizeof(MPEG2[0])},
+        {XA_VIDEOCODEC_H263,  H263,  sizeof(H263)  / sizeof(H263[0])},
+        {XA_VIDEOCODEC_MPEG4, MPEG4, sizeof(MPEG4) / sizeof(MPEG4[0])},
+        {XA_VIDEOCODEC_AVC,   AVC,   sizeof(AVC)   / sizeof(AVC[0])},
+        {XA_VIDEOCODEC_VC1,   VC1,   sizeof(VC1)   / sizeof(VC1[0])},
+    };
+    return id_pair_to_string(codec, level, table, sizeof(table) / sizeof(table[0]));
+}
+
 //-----------------------------------------------------------------
 void TestVideoDecoderCapabilities() {
 
@@ -111,10 +276,15 @@
         /* display the profile / level combinations */
         for(XAuint32 pl = 0 ; pl < nbCombinations ; pl++) {
             XAVideoCodecDescriptor decDescriptor;
-            res = (*decItf)->GetVideoDecoderCapabilities(decItf, decoderIds[i], &pl, &decDescriptor);
+            XAuint32 decoder = decoderIds[i];
+            res = (*decItf)->GetVideoDecoderCapabilities(decItf, decoder, &pl, &decDescriptor);
             ExitOnError(res);
-            fprintf(stdout, "%u/%u ", decDescriptor.profileSetting, decDescriptor.levelSetting);
+            XAuint32 profile = decDescriptor.profileSetting;
+            XAuint32 level = decDescriptor.levelSetting;
+            fprintf(stdout, "%u/%u ", profile, level);
             ExitOnError(res);
+            printf("(%s/%s) ", videoProfileToString(decoder, profile),
+                    videoLevelToString(decoder, level));
         }
         fprintf(stdout, "\n");
     }
diff --git a/tests/listening/slesTest_playMuteSolo.cpp b/tests/listening/slesTest_playMuteSolo.cpp
index 0159cda..a342b22 100644
--- a/tests/listening/slesTest_playMuteSolo.cpp
+++ b/tests/listening/slesTest_playMuteSolo.cpp
@@ -274,13 +274,16 @@
 
     // Attempt to get the channel count before it is necessarily known.
     // This should either return successfully with a specific value (e.g. 1 or 2),
-    // or fail with SL_RESULT_PRECONDITIONS_VIOLATED, depending on the platform.
+    // or return zero, depending on whether the channel count is known yet.
     SLuint8 numChannels = 123;
     result = (*muteSoloItf)->GetNumChannels(muteSoloItf, &numChannels);
     printf("GetNumChannels after Realize but before pre-fetch: result=%u, numChannels=%u\n",
         result, numChannels);
     if (result != SL_RESULT_PRECONDITIONS_VIOLATED) {
         ExitOnError(result);
+    } else {
+        printf("Warning: returning SL_RESULT_PRECONDITIONS_VIOLATED for unknown channel count is "
+                "obsolete; now it should return SL_RESULT_SUCCESS and zero count if unknown\n");
     }
 
     /* Initialize a context for use by the play event callback */
diff --git a/tests/native-media/jni/native-media-jni.c b/tests/native-media/jni/native-media-jni.c
index 943f47a..6c5a283 100644
--- a/tests/native-media/jni/native-media-jni.c
+++ b/tests/native-media/jni/native-media-jni.c
@@ -84,7 +84,7 @@
 // Callback for XAPlayItf through which we receive the XA_PLAYEVENT_HEADATEND event */
 void PlayCallback(XAPlayItf caller, void *pContext, XAuint32 event) {
     if (event & XA_PLAYEVENT_HEADATEND) {
-        LOGV("XA_PLAYEVENT_HEADATEND received, all MP2TS data has been decoded\n");
+        ALOGV("XA_PLAYEVENT_HEADATEND received, all MP2TS data has been decoded\n");
     }
 }
 
@@ -131,7 +131,7 @@
     if ((pBufferData == NULL) && (pBufferContext != NULL)) {
         const int processedCommand = *(int *)pBufferContext;
         if (kEosBufferCntxt == processedCommand) {
-            LOGV("EOS was processed\n");
+            ALOGV("EOS was processed\n");
             // our buffer with the EOS message has been consumed
             assert(0 == dataSize);
             goto exit;
@@ -151,32 +151,32 @@
     switch (state.index) {
     case 300:
         (*playerVolItf)->SetVolumeLevel(playerVolItf, -600); // -6dB
-        LOGV("setting volume to -6dB");
+        ALOGV("setting volume to -6dB");
         break;
     case 400:
         (*playerVolItf)->SetVolumeLevel(playerVolItf, -1200); // -12dB
-        LOGV("setting volume to -12dB");
+        ALOGV("setting volume to -12dB");
         break;
     case 500:
         (*playerVolItf)->SetVolumeLevel(playerVolItf, 0); // full volume
-        LOGV("setting volume to 0dB (full volume)");
+        ALOGV("setting volume to 0dB (full volume)");
         break;
     case 600:
         (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_TRUE); // mute
-        LOGV("muting player");
+        ALOGV("muting player");
         break;
     case 700:
         (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_FALSE); // unmute
-        LOGV("unmuting player");
+        ALOGV("unmuting player");
         break;
     case 800:
         (*playerVolItf)->SetStereoPosition(playerVolItf, -1000);
         (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_TRUE);
-        LOGV("pan sound to the left (hard-left)");
+        ALOGV("pan sound to the left (hard-left)");
         break;
     case 900:
         (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_FALSE);
-        LOGV("disabling stereo position");
+        ALOGV("disabling stereo position");
         break;
     default:
         break;
@@ -228,7 +228,7 @@
         void * pEventData,
         void * pContext )
 {
-    LOGV("StreamChangeCallback called for stream %u", streamIndex);
+    ALOGV("StreamChangeCallback called for stream %u", streamIndex);
     // pContext was specified as NULL at RegisterStreamChangeCallback and is unused here
     assert(NULL == pContext);
     switch (eventId) {
@@ -305,7 +305,7 @@
         return JNI_FALSE;
     }
     assert(1 <= nbRead && nbRead <= NB_BUFFERS);
-    LOGV("Initially queueing %u buffers of %u bytes each", nbRead, BUFFER_SIZE);
+    ALOGV("Initially queueing %u buffers of %u bytes each", nbRead, BUFFER_SIZE);
 
     /* Enqueue the content of our cache before starting to play,
        we don't want to starve the player */
@@ -518,15 +518,6 @@
 }
 
 
-// set the surface texture
-void Java_com_example_nativemedia_NativeMedia_setSurfaceTexture(JNIEnv *env, jclass clazz,
-        jobject surfaceTexture)
-{
-    // obtain a native window from a Java surface texture
-    theNativeWindow = ANativeWindow_fromSurfaceTexture(env, surfaceTexture);
-}
-
-
 // rewind the streaming media player
 void Java_com_example_nativemedia_NativeMedia_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz)
 {
diff --git a/tests/native-media/src/com/example/nativemedia/NativeMedia.java b/tests/native-media/src/com/example/nativemedia/NativeMedia.java
index 01636d2..816d01f 100644
--- a/tests/native-media/src/com/example/nativemedia/NativeMedia.java
+++ b/tests/native-media/src/com/example/nativemedia/NativeMedia.java
@@ -84,7 +84,8 @@
         mSurfaceHolder1.addCallback(new SurfaceHolder.Callback() {
 
             public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
-                Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height=" + height);
+                Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height=" +
+                        height);
             }
 
             public void surfaceCreated(SurfaceHolder holder) {
@@ -105,7 +106,8 @@
         mSurfaceHolder2.addCallback(new SurfaceHolder.Callback() {
 
             public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
-                Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height=" + height);
+                Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height=" +
+                        height);
             }
 
             public void surfaceCreated(SurfaceHolder holder) {
@@ -329,7 +331,6 @@
     public static native void setPlayingStreamingMediaPlayer(boolean isPlaying);
     public static native void shutdown();
     public static native void setSurface(Surface surface);
-    public static native void setSurfaceTexture(SurfaceTexture surfaceTexture);
     public static native void rewindStreamingMediaPlayer();
 
     /** Load jni .so on initialization */
@@ -388,7 +389,9 @@
         }
 
         void useAsSinkForNative() {
-            setSurfaceTexture(mMyGLSurfaceView.getSurfaceTexture());
+            Surface surface = new Surface(mMyGLSurfaceView.getSurfaceTexture());
+            setSurface(surface);
+            surface.release();
         }
 
     }
diff --git a/tests/sandbox/nativewindow.cpp b/tests/sandbox/nativewindow.cpp
index 21eb0f3..8fe7fb0 100644
--- a/tests/sandbox/nativewindow.cpp
+++ b/tests/sandbox/nativewindow.cpp
@@ -76,9 +76,19 @@
     return surface.get();
 }
 
+void disposeNativeWindow_()
+{
+    gComposerClient->dispose();
+}
+
 } // namespace android
 
 ANativeWindow *getNativeWindow()
 {
     return android::getNativeWindow_();
 }
+
+void disposeNativeWindow()
+{
+    android::disposeNativeWindow_();
+}
diff --git a/tests/sandbox/nativewindow.h b/tests/sandbox/nativewindow.h
index 8903d8e..24c9399 100644
--- a/tests/sandbox/nativewindow.h
+++ b/tests/sandbox/nativewindow.h
@@ -5,6 +5,7 @@
 #endif
 
 extern ANativeWindow *getNativeWindow();
+extern void disposeNativeWindow();
 
 #ifdef __cplusplus
 }
diff --git a/tests/sandbox/streamSource/slesTestPlayStream.cpp b/tests/sandbox/streamSource/slesTestPlayStream.cpp
index 976c116..7ce17c5 100644
--- a/tests/sandbox/streamSource/slesTestPlayStream.cpp
+++ b/tests/sandbox/streamSource/slesTestPlayStream.cpp
@@ -159,8 +159,8 @@
                 SLAndroidBufferItem msgEos;
                 msgEos.itemKey = SL_ANDROID_ITEMKEY_EOS;
                 msgEos.itemSize = 0;
-                // EOS message has no parameters, so the total size of the message is the size of the key
-                //   plus the size if itemSize, both SLuint32
+                // EOS message has no parameters, so the total size of the message is the size of
+                //   the key plus the size if itemSize, both SLuint32
                 (*caller)->Enqueue(caller,  NULL /*pBufferContext*/,
                         NULL /*pData*/, 0 /*dataLength*/,
                         &msgEos /*pMsg*/,
@@ -383,7 +383,8 @@
 
     fprintf(stdout, "OpenSL ES test %s: exercises SLPlayItf, SLVolumeItf, SLAndroidBufferQueue \n",
             argv[0]);
-    fprintf(stdout, "and AudioPlayer with SL_DATALOCATOR_ANDROIDBUFFERQUEUE source / OutputMix sink\n");
+    fprintf(stdout, "and AudioPlayer with SL_DATALOCATOR_ANDROIDBUFFERQUEUE source / OutputMix "
+            "sink\n");
     fprintf(stdout, "Plays a sound and stops after its reported duration\n\n");
 
     if (argc == 1) {
diff --git a/tests/sandbox/xaplay.c b/tests/sandbox/xaplay.c
index 82a6e04..a93f982 100644
--- a/tests/sandbox/xaplay.c
+++ b/tests/sandbox/xaplay.c
@@ -38,18 +38,20 @@
     char data[MPEG2TS_PACKET_SIZE];
 } MPEG2TS_Packet;
 
-#if 0
-// Each buffer in Android buffer queue
-typedef struct {
-    MPEG2TS_Packet packets[PACKETS_PER_BUFFER];
-} Buffer;
-#endif
-
 // Globals shared between main thread and buffer queue callback
 MPEG2TS_Packet *packets;
-size_t numPackets;
-size_t curPacket;
-size_t discPacket;
+size_t totalPackets;    // total number of packets in input file
+size_t numPackets;      // number of packets to play, defaults to totalPackets - firstPacket
+size_t curPacket;       // current packet index
+size_t discPacket;      // discontinuity packet index, defaults to no discontinuity requested
+size_t afterDiscPacket; // packet index to switch to after the discontinuity
+size_t firstPacket;     // first packet index to be played, defaults to zero
+size_t lastPacket;      // last packet index to be played
+size_t formatPacket;    // format change packet index, defaults to no format change requested
+XAmillisecond seekPos = XA_TIME_UNKNOWN;    // seek to this position initially
+int pauseMs = -1;       // pause after this many ms into playback
+XAboolean forceCallbackFailure = XA_BOOLEAN_FALSE;  // force callback failures occasionally
+XAboolean sentEOS = XA_BOOLEAN_FALSE;   // whether we have enqueued EOS yet
 
 // These are extensions to OpenMAX AL 1.0.1 values
 
@@ -129,24 +131,21 @@
     // pContext is unused here, so we pass NULL
     assert(NULL == pContext);
 
+    XAresult result;
     XAmillisecond position;
+    result = (*caller)->GetPosition(caller, &position);
+    assert(XA_RESULT_SUCCESS == result);
 
     if (XA_PLAYEVENT_HEADATEND & event) {
-        printf("XA_PLAYEVENT_HEADATEND reached\n");
-        //SignalEos();
+        printf("XA_PLAYEVENT_HEADATEND current position=%u ms\n", position);
     }
 
-    XAresult result;
     if (XA_PLAYEVENT_HEADATNEWPOS & event) {
-        result = (*caller)->GetPosition(caller, &position);
-        assert(XA_RESULT_SUCCESS == result);
-        printf("XA_PLAYEVENT_HEADATNEWPOS current position=%ums\n", position);
+        printf("XA_PLAYEVENT_HEADATNEWPOS current position=%u ms\n", position);
     }
 
     if (XA_PLAYEVENT_HEADATMARKER & event) {
-        result = (*caller)->GetPosition(caller, &position);
-        assert(XA_RESULT_SUCCESS == result);
-        printf("XA_PLAYEVENT_HEADATMARKER current position=%ums\n", position);
+        printf("XA_PLAYEVENT_HEADATMARKER current position=%u ms\n", position);
     }
 }
 
@@ -161,28 +160,41 @@
         const XAAndroidBufferItem *pItems,
         XAuint32 itemsLength)
 {
+    XAPlayItf playerPlay = (XAPlayItf) pCallbackContext;
     // enqueue the .ts data directly from mapped memory, so ignore the empty buffer pBufferData
-    if (curPacket <= numPackets) {
+    if (curPacket <= lastPacket) {
         static const XAAndroidBufferItem discontinuity = {XA_ANDROID_ITEMKEY_DISCONTINUITY, 0};
         static const XAAndroidBufferItem eos = {XA_ANDROID_ITEMKEY_EOS, 0};
+        static const XAAndroidBufferItem formatChange = {XA_ANDROID_ITEMKEY_FORMAT_CHANGE, 0};
         const XAAndroidBufferItem *items;
         XAuint32 itemSize;
         // compute number of packets to be enqueued in this buffer
-        XAuint32 packetsThisBuffer = numPackets - curPacket;
+        XAuint32 packetsThisBuffer = lastPacket - curPacket;
         if (packetsThisBuffer > PACKETS_PER_BUFFER) {
             packetsThisBuffer = PACKETS_PER_BUFFER;
         }
         // last packet? this should only happen once
-        if (curPacket == numPackets) {
-            (void) write(1, "e", 1);
+        if (curPacket == lastPacket) {
+            if (sentEOS) {
+                printf("buffer completion callback after EOS\n");
+                return XA_RESULT_SUCCESS;
+            }
+            printf("sending EOS\n");
             items = &eos;
             itemSize = sizeof(eos);
+            sentEOS = XA_BOOLEAN_TRUE;
         // discontinuity requested?
         } else if (curPacket == discPacket) {
-            printf("sending discontinuity, rewinding from beginning of stream\n");
+            printf("sending discontinuity at packet %zu, then resuming at packet %zu\n", discPacket,
+                    afterDiscPacket);
             items = &discontinuity;
             itemSize = sizeof(discontinuity);
-            curPacket = 0;
+            curPacket = afterDiscPacket;
+        // format change requested?
+        } else if (curPacket == formatPacket) {
+            printf("sending format change");
+            items = &formatChange;
+            itemSize = sizeof(formatChange);
         // pure data with no items
         } else {
             items = NULL;
@@ -195,8 +207,40 @@
                 sizeof(MPEG2TS_Packet) * packetsThisBuffer, items, itemSize);
         assert(XA_RESULT_SUCCESS == result);
         curPacket += packetsThisBuffer;
+        // display position periodically
+        if (curPacket % 1000 == 0) {
+            XAmillisecond position;
+            result = (*playerPlay)->GetPosition(playerPlay, &position);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("Position after enqueueing packet %u: %u ms\n", curPacket, position);
+        }
     }
-    return XA_RESULT_SUCCESS;
+    if (forceCallbackFailure && (curPacket % 1230 == 0)) {
+        return (XAresult) curPacket;
+    } else {
+        return XA_RESULT_SUCCESS;
+    }
+}
+
+// convert a domain type to string
+static const char *domainToString(XAuint32 domain)
+{
+    switch (domain) {
+    case 0: // FIXME There's a private declaration '#define XA_DOMAINTYPE_CONTAINER 0' in src/data.h
+            // but we don't have access to it. Plan to file a bug with Khronos about this symbol.
+        return "media container";
+#define _(x) case x: return #x;
+    _(XA_DOMAINTYPE_AUDIO)
+    _(XA_DOMAINTYPE_VIDEO)
+    _(XA_DOMAINTYPE_IMAGE)
+    _(XA_DOMAINTYPE_TIMEDTEXT)
+    _(XA_DOMAINTYPE_MIDI)
+    _(XA_DOMAINTYPE_VENDOR)
+    _(XA_DOMAINTYPE_UNKNOWN)
+#undef _
+    default:
+        return "unknown";
+    }
 }
 
 // main program
@@ -207,9 +251,6 @@
 
     XAboolean abq = XA_BOOLEAN_FALSE;   // use AndroidBufferQueue, default is URI
     XAboolean looping = XA_BOOLEAN_FALSE;
-#ifdef REINITIALIZE
-    int reinit_counter = 0;
-#endif
     for (i = 1; i < argc; ++i) {
         const char *arg = argv[i];
         if (arg[0] != '-')
@@ -218,17 +259,33 @@
         case 'a':
             abq = XA_BOOLEAN_TRUE;
             break;
+        case 'c':
+            forceCallbackFailure = XA_BOOLEAN_TRUE;
+            break;
         case 'd':
             discPacket = atoi(&arg[2]);
             break;
+        case 'D':
+            afterDiscPacket = atoi(&arg[2]);
+            break;
+        case 'f':
+            firstPacket = atoi(&arg[2]);
+            break;
+        case 'F':
+            formatPacket = atoi(&arg[2]);
+            break;
         case 'l':
             looping = XA_BOOLEAN_TRUE;
             break;
-#ifdef REINITIALIZE
-        case 'r':
-            reinit_counter = atoi(&arg[2]);
+        case 'n':
+            numPackets = atoi(&arg[2]);
             break;
-#endif
+        case 'p':
+            pauseMs = atoi(&arg[2]);
+            break;
+        case 's':
+            seekPos = atoi(&arg[2]);
+            break;
         default:
             fprintf(stderr, "%s: unknown option %s\n", prog, arg);
             break;
@@ -237,7 +294,18 @@
 
     // check that exactly one URI was specified
     if (argc - i != 1) {
-        fprintf(stderr, "usage: %s [-a] [-d#] [-l] uri\n", prog);
+        fprintf(stderr, "usage: %s [-a] [-c] [-d#] [-D#] [-f#] [-F#] [-l] [-n#] [-p#] [-s#] uri\n",
+                prog);
+        fprintf(stderr, "    -a  Use Android buffer queue to supply data, default is URI\n");
+        fprintf(stderr, "    -c  Force callback to return an error randomly, for debugging only\n");
+        fprintf(stderr, "    -d# Packet index to insert a discontinuity, default is none\n");
+        fprintf(stderr, "    -D# Packet index to switch to after the discontinuity\n");
+        fprintf(stderr, "    -f# First packet index, defaults to 0\n");
+        fprintf(stderr, "    -F# Packet index to insert a format change, default is none\n");
+        fprintf(stderr, "    -l  Enable looping, for URI only\n");
+        fprintf(stderr, "    -n# Number of packets to enqueue\n");
+        fprintf(stderr, "    -p# Pause playback for 5 seconds after this many milliseconds\n");
+        fprintf(stderr, "    -s# Seek position in milliseconds, for URI only\n");
         return EXIT_FAILURE;
     }
     const char *uri = argv[i];
@@ -273,16 +341,35 @@
                     MPEG2TS_PACKET_SIZE);
         }
         packets = (MPEG2TS_Packet *) ptr;
-        numPackets = filelen / MPEG2TS_PACKET_SIZE;
-        printf("%s has %zu packets\n", uri, numPackets);
+        totalPackets = filelen / MPEG2TS_PACKET_SIZE;
+        printf("%s has %zu total packets\n", uri, totalPackets);
+        if (firstPacket >= totalPackets) {
+            fprintf(stderr, "-f%zu ignored\n", firstPacket);
+            firstPacket = 0;
+        }
+        if (numPackets == 0) {
+            numPackets = totalPackets - firstPacket;
+        } else if (firstPacket + numPackets > totalPackets) {
+            fprintf(stderr, "-n%zu ignored\n", numPackets);
+            numPackets = totalPackets - firstPacket;
+        }
+        lastPacket = firstPacket + numPackets;
+        if (discPacket != 0 && (discPacket < firstPacket || discPacket >= lastPacket)) {
+            fprintf(stderr, "-d%zu ignored\n", discPacket);
+            discPacket = 0;
+        }
+        if (afterDiscPacket < firstPacket || afterDiscPacket >= lastPacket) {
+            fprintf(stderr, "-D%zu ignored\n", afterDiscPacket);
+            afterDiscPacket = 0;
+        }
+        if (formatPacket != 0 && (formatPacket < firstPacket || formatPacket >= lastPacket)) {
+            fprintf(stderr, "-F%zu ignored\n", formatPacket);
+            formatPacket = 0;
+        }
     }
 
     ANativeWindow *nativeWindow;
 
-#ifdef REINITIALIZE
-reinitialize:    ;
-#endif
-
     XAresult result;
     XAObjectItf engineObject;
 
@@ -348,7 +435,7 @@
     XAObjectItf playerObject;
     XAInterfaceID ids[4] = {XA_IID_STREAMINFORMATION, XA_IID_PREFETCHSTATUS, XA_IID_SEEK,
             XA_IID_ANDROIDBUFFERQUEUESOURCE};
-    XAboolean req[4] = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE};
+    XAboolean req[4] = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_FALSE, XA_BOOLEAN_TRUE};
     result = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObject, &dataSrc, NULL,
             &audioSnk, nativeWindow != NULL ? &imageVideoSink : NULL, NULL, NULL, abq ? 4 : 3, ids,
             req);
@@ -358,6 +445,11 @@
     result = (*playerObject)->Realize(playerObject, XA_BOOLEAN_FALSE);
     assert(XA_RESULT_SUCCESS == result);
 
+    // get the play interface
+    XAPlayItf playerPlay;
+    result = (*playerObject)->GetInterface(playerObject, XA_IID_PLAY, &playerPlay);
+    assert(XA_RESULT_SUCCESS == result);
+
     if (abq) {
 
         // get the Android buffer queue interface
@@ -368,24 +460,30 @@
 
         // register the buffer queue callback
         result = (*playerAndroidBufferQueue)->RegisterCallback(playerAndroidBufferQueue,
-                bufferQueueCallback, NULL);
+                bufferQueueCallback, (void *) playerPlay);
         assert(XA_RESULT_SUCCESS == result);
         result = (*playerAndroidBufferQueue)->SetCallbackEventsMask(playerAndroidBufferQueue,
                 XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);
         assert(XA_RESULT_SUCCESS == result);
 
+        // set the player's state to paused, to start prefetching
+        printf("start early prefetch\n");
+        result = (*playerPlay)->SetPlayState(playerPlay, XA_PLAYSTATE_PAUSED);
+        assert(XA_RESULT_SUCCESS == result);
+
         // enqueue the initial buffers until buffer queue is full
         XAuint32 packetsThisBuffer;
-        for (curPacket = 0; curPacket < numPackets; curPacket += packetsThisBuffer) {
+        for (curPacket = firstPacket; curPacket < lastPacket; curPacket += packetsThisBuffer) {
             // handle the unlikely case of a very short .ts
-            packetsThisBuffer = numPackets - curPacket;
+            packetsThisBuffer = lastPacket - curPacket;
             if (packetsThisBuffer > PACKETS_PER_BUFFER) {
                 packetsThisBuffer = PACKETS_PER_BUFFER;
             }
             result = (*playerAndroidBufferQueue)->Enqueue(playerAndroidBufferQueue, NULL,
                     &packets[curPacket], MPEG2TS_PACKET_SIZE * packetsThisBuffer, NULL, 0);
             if (XA_RESULT_BUFFER_INSUFFICIENT == result) {
-                printf("Enqueued initial %u packets in %u buffers\n", curPacket, curPacket / PACKETS_PER_BUFFER);
+                printf("Enqueued initial %u packets in %u buffers\n", curPacket - firstPacket,
+                        (curPacket - firstPacket + PACKETS_PER_BUFFER - 1) / PACKETS_PER_BUFFER);
                 break;
             }
             assert(XA_RESULT_SUCCESS == result);
@@ -418,46 +516,59 @@
             XA_PREFETCHEVENT_FILLLEVELCHANGE | XA_PREFETCHEVENT_STATUSCHANGE);
     assert(XA_RESULT_SUCCESS == result);
 
-    // get the seek interface
-    if (looping) {
+    // get the seek interface for seeking and/or looping
+    if (looping || seekPos != XA_TIME_UNKNOWN) {
         XASeekItf playerSeek;
         result = (*playerObject)->GetInterface(playerObject, XA_IID_SEEK, &playerSeek);
         assert(XA_RESULT_SUCCESS == result);
-        result = (*playerSeek)->SetLoop(playerSeek, XA_BOOLEAN_TRUE, (XAmillisecond) 0,
-                XA_TIME_UNKNOWN);
-        assert(XA_RESULT_SUCCESS == result);
+        if (seekPos != XA_TIME_UNKNOWN) {
+            result = (*playerSeek)->SetPosition(playerSeek, seekPos, XA_SEEKMODE_ACCURATE);
+            if (XA_RESULT_FEATURE_UNSUPPORTED == result) {
+                fprintf(stderr, "-s%u (seek to initial position) is unsupported\n", seekPos);
+            } else {
+                assert(XA_RESULT_SUCCESS == result);
+            }
+        }
+        if (looping) {
+            result = (*playerSeek)->SetLoop(playerSeek, XA_BOOLEAN_TRUE, (XAmillisecond) 0,
+                    XA_TIME_UNKNOWN);
+            if (XA_RESULT_FEATURE_UNSUPPORTED) {
+                fprintf(stderr, "-l (looping) is unsupported\n");
+            } else {
+                assert(XA_RESULT_SUCCESS == result);
+            }
+        }
     }
 
-    // get the play interface
-    XAPlayItf playerPlay;
-    result = (*playerObject)->GetInterface(playerObject, XA_IID_PLAY, &playerPlay);
-    assert(XA_RESULT_SUCCESS == result);
-
     // register play event callback
     result = (*playerPlay)->RegisterCallback(playerPlay, playEventCallback, NULL);
     assert(XA_RESULT_SUCCESS == result);
-#if 0 // FIXME broken
     result = (*playerPlay)->SetCallbackEventsMask(playerPlay,
             XA_PLAYEVENT_HEADATEND | XA_PLAYEVENT_HEADATMARKER | XA_PLAYEVENT_HEADATNEWPOS);
     assert(XA_RESULT_SUCCESS == result);
-#endif
 
     // set a marker
-    result = (*playerPlay)->SetMarkerPosition(playerPlay, 10000);
+    result = (*playerPlay)->SetMarkerPosition(playerPlay, 5000);
     assert(XA_RESULT_SUCCESS == result);
 
     // set position update period
-    result = (*playerPlay)->SetPositionUpdatePeriod(playerPlay, 1000);
+    result = (*playerPlay)->SetPositionUpdatePeriod(playerPlay, 2000);
     assert(XA_RESULT_SUCCESS == result);
 
-    // get the duration
+    // get the position before prefetch
+    XAmillisecond position;
+    result = (*playerPlay)->GetPosition(playerPlay, &position);
+    assert(XA_RESULT_SUCCESS == result);
+    printf("Position before prefetch: %u ms\n", position);
+
+    // get the duration before prefetch
     XAmillisecond duration;
     result = (*playerPlay)->GetDuration(playerPlay, &duration);
     assert(XA_RESULT_SUCCESS == result);
     if (XA_TIME_UNKNOWN == duration)
-        printf("Duration: unknown\n");
+        printf("Duration before prefetch: unknown as expected\n");
     else
-        printf("Duration: %.1f\n", duration / 1000.0f);
+        printf("Duration before prefetch: %.1f (surprise!)\n", duration / 1000.0f);
 
     // set the player's state to paused, to start prefetching
     printf("start prefetch\n");
@@ -475,15 +586,209 @@
         goto destroyRes;
     }
 
-    // get duration again, now it should be known
+    // get the position after prefetch
+    result = (*playerPlay)->GetPosition(playerPlay, &position);
+    assert(XA_RESULT_SUCCESS == result);
+    printf("Position after prefetch: %u ms\n", position);
+
+    // get duration again, now it should be known for the file source or unknown for TS
     result = (*playerPlay)->GetDuration(playerPlay, &duration);
     assert(XA_RESULT_SUCCESS == result);
     if (duration == XA_TIME_UNKNOWN) {
-        fprintf(stdout, "Content duration is unknown (after prefetch completed)\n");
+        printf("Duration after prefetch: unknown (expected for TS, unexpected for file)\n");
     } else {
-        fprintf(stdout, "Content duration is %u ms (after prefetch completed)\n", duration);
+        printf("Duration after prefetch: %u ms (expected for file, unexpected for TS)\n", duration);
     }
 
+    // query for media container information
+    result = (*playerStreamInformation)->QueryMediaContainerInformation(playerStreamInformation,
+            NULL);
+    assert(XA_RESULT_PARAMETER_INVALID == result);
+    XAMediaContainerInformation mediaContainerInformation;
+    // this verifies it is filling in all the fields
+    memset(&mediaContainerInformation, 0x55, sizeof(XAMediaContainerInformation));
+    result = (*playerStreamInformation)->QueryMediaContainerInformation(playerStreamInformation,
+            &mediaContainerInformation);
+    assert(XA_RESULT_SUCCESS == result);
+    printf("Media container information:\n");
+    printf("  containerType = %u\n", mediaContainerInformation.containerType);
+    printf("  mediaDuration = %u\n", mediaContainerInformation.mediaDuration);
+    printf("  numStreams = %u\n", mediaContainerInformation.numStreams);
+
+    // Now query for each the streams.  Note that stream indices go up to and including
+    // mediaContainerInformation.numStreams, because stream 0 is the container itself,
+    // while stream 1 to mediaContainerInformation.numStreams are the contained streams.
+    XAuint32 numStreams = mediaContainerInformation.numStreams;
+    XAuint32 streamIndex;
+    for (streamIndex = 0; streamIndex <= mediaContainerInformation.numStreams; ++streamIndex) {
+        XAuint32 domain;
+        XAuint16 nameSize;
+        XAchar name[64];
+        printf("stream[%u]:\n", streamIndex);
+        if (streamIndex == 0) {
+            result = (*playerStreamInformation)->QueryStreamType(playerStreamInformation,
+                    streamIndex, &domain);
+            assert(XA_RESULT_PARAMETER_INVALID == result);
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &mediaContainerInformation);
+            //assert(XA_RESULT_PARAMETER_INVALID == result);
+            nameSize = sizeof(name);
+            result = (*playerStreamInformation)->QueryStreamName(playerStreamInformation,
+streamIndex, &nameSize, name);
+            //assert(XA_RESULT_PARAMETER_INVALID == result);
+            continue;
+        }
+        result = (*playerStreamInformation)->QueryStreamType(playerStreamInformation, streamIndex,
+                NULL);
+        assert(XA_RESULT_PARAMETER_INVALID == result);
+        domain = 12345;
+        result = (*playerStreamInformation)->QueryStreamType(playerStreamInformation, streamIndex,
+                &domain);
+        assert(XA_RESULT_SUCCESS == result);
+        printf(" QueryStreamType: domain = 0x%X (%s)\n", domain, domainToString(domain));
+        nameSize = sizeof(name);
+        result = (*playerStreamInformation)->QueryStreamName(playerStreamInformation, streamIndex,
+                &nameSize, name);
+#if 0
+        assert(XA_RESULT_SUCCESS == result);
+        assert(sizeof(name) >= nameSize);
+        if (sizeof(name) != nameSize) {
+            assert('\0' == name[nameSize]);
+        }
+        printf(" QueryStreamName: nameSize=%u, name=\"%.*s\"\n", nameSize, nameSize, name);
+        result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                streamIndex, NULL);
+        assert(XA_RESULT_PARAMETER_INVALID == result);
+#endif
+
+        printf(" QueryStreamInformation:\n");
+        switch (domain) {
+#if 0
+        case 0: // FIXME container
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+streamIndex, &mediaContainerInformation);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("  containerType = %u (1=unspecified)\n",
+                    mediaContainerInformation.containerType);
+            printf("  mediaDuration = %u\n", mediaContainerInformation.mediaDuration);
+            printf("  numStreams = %u\n", mediaContainerInformation.numStreams);
+            break;
+#endif
+        case XA_DOMAINTYPE_AUDIO: {
+            XAAudioStreamInformation audioStreamInformation;
+            memset(&audioStreamInformation, 0x55, sizeof(XAAudioStreamInformation));
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &audioStreamInformation);
+            assert(XA_RESULT_PARAMETER_INVALID == result);
+            printf("  codecId = %u\n", audioStreamInformation.codecId);
+            printf("  channels = %u\n", audioStreamInformation.channels);
+            printf("  sampleRate = %u\n", audioStreamInformation.sampleRate);
+            printf("  bitRate = %u\n", audioStreamInformation.bitRate);
+            printf("  langCountry = \"%s\"\n", audioStreamInformation.langCountry);
+            printf("  duration = %u\n", audioStreamInformation.duration);
+            } break;
+        case XA_DOMAINTYPE_VIDEO: {
+            XAVideoStreamInformation videoStreamInformation;
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &videoStreamInformation);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("  codecId = %u\n", videoStreamInformation.codecId);
+            printf("  width = %u\n", videoStreamInformation.width);
+            printf("  height = %u\n", videoStreamInformation.height);
+            printf("  frameRate = %u\n", videoStreamInformation.frameRate);
+            printf("  bitRate = %u\n", videoStreamInformation.bitRate);
+            printf("  duration = %u\n", videoStreamInformation.duration);
+            } break;
+        case XA_DOMAINTYPE_IMAGE: {
+            XAImageStreamInformation imageStreamInformation;
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &imageStreamInformation);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("  codecId = %u\n", imageStreamInformation.codecId);
+            printf("  width = %u\n", imageStreamInformation.width);
+            printf("  height = %u\n", imageStreamInformation.height);
+            printf("  presentationDuration = %u\n", imageStreamInformation.presentationDuration);
+            } break;
+        case XA_DOMAINTYPE_TIMEDTEXT: {
+            XATimedTextStreamInformation timedTextStreamInformation;
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &timedTextStreamInformation);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("  layer = %u\n", timedTextStreamInformation.layer);
+            printf("  width = %u\n", timedTextStreamInformation.width);
+            printf("  height = %u\n", timedTextStreamInformation.height);
+            printf("  tx = %u\n", timedTextStreamInformation.tx);
+            printf("  ty = %u\n", timedTextStreamInformation.ty);
+            printf("  bitrate = %u\n", timedTextStreamInformation.bitrate);
+            printf("  langCountry = \"%s\"\n", timedTextStreamInformation.langCountry);
+            printf("  duration = %u\n", timedTextStreamInformation.duration);
+            } break;
+        case XA_DOMAINTYPE_MIDI: {
+            XAMIDIStreamInformation midiStreamInformation;
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &midiStreamInformation);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("  channels = %u\n", midiStreamInformation.channels);
+            printf("  tracks = %u\n", midiStreamInformation.tracks);
+            printf("  bankType = %u\n", midiStreamInformation.bankType);
+            printf("  langCountry = \"%s\"\n", midiStreamInformation.langCountry);
+            printf("  duration = %u\n", midiStreamInformation.duration);
+            } break;
+        case XA_DOMAINTYPE_VENDOR: {
+            XAVendorStreamInformation vendorStreamInformation;
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &vendorStreamInformation);
+            assert(XA_RESULT_SUCCESS == result);
+            printf("  VendorStreamInfo = %p\n", vendorStreamInformation.VendorStreamInfo);
+            } break;
+        case XA_DOMAINTYPE_UNKNOWN: {
+            // "It is not possible to query Information for streams identified as
+            // XA_DOMAINTYPE_UNKNOWN, any attempt to do so shall return a result of
+            // XA_RESULT_CONTENT_UNSUPPORTED."
+            char big[256];
+            result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+                    streamIndex, &big);
+            assert(XA_RESULT_CONTENT_UNSUPPORTED == result);
+            } break;
+        default:
+            break;
+        }
+
+    }
+    // Try one more stream index beyond the valid range
+    XAuint32 domain;
+    result = (*playerStreamInformation)->QueryStreamType(playerStreamInformation, streamIndex,
+            &domain);
+    assert(XA_RESULT_PARAMETER_INVALID == result);
+    XATimedTextStreamInformation big;
+    result = (*playerStreamInformation)->QueryStreamInformation(playerStreamInformation,
+            streamIndex, &big);
+    assert(XA_RESULT_PARAMETER_INVALID == result);
+
+    printf("QueryActiveStreams:\n");
+    result = (*playerStreamInformation)->QueryActiveStreams(playerStreamInformation, NULL, NULL);
+    assert(XA_RESULT_PARAMETER_INVALID == result);
+    XAuint32 numStreams1 = 0x12345678;
+    result = (*playerStreamInformation)->QueryActiveStreams(playerStreamInformation, &numStreams1,
+            NULL);
+    assert(XA_RESULT_SUCCESS == result);
+    printf("  numStreams = %u\n", numStreams1);
+    XAboolean *activeStreams = calloc(numStreams1 + 1, sizeof(XAboolean));
+    assert(NULL != activeStreams);
+    printf("  active stream(s) =");
+    XAuint32 numStreams2 = numStreams1;
+    result = (*playerStreamInformation)->QueryActiveStreams(playerStreamInformation, &numStreams2,
+            activeStreams);
+    assert(XA_RESULT_SUCCESS == result);
+    assert(numStreams2 == numStreams1);
+    for (streamIndex = 0; streamIndex <= numStreams1; ++streamIndex) {
+        if (activeStreams[streamIndex])
+            printf(" %u", streamIndex);
+    }
+    printf("\n");
+
+    // SetActiveStream is untested
+
     // start playing
     printf("starting to play\n");
     result = (*playerPlay)->SetPlayState(playerPlay, XA_PLAYSTATE_PLAYING);
@@ -497,13 +802,41 @@
         if (status == XA_PLAYSTATE_PAUSED)
             break;
         assert(status == XA_PLAYSTATE_PLAYING);
-        sleep(1);
+        usleep(100000);
+        if (pauseMs >= 0) {
+            result = (*playerPlay)->GetPosition(playerPlay, &position);
+            assert(XA_RESULT_SUCCESS == result);
+            if (position >= pauseMs) {
+                printf("Pausing for 5 seconds at position %u\n", position);
+                result = (*playerPlay)->SetPlayState(playerPlay, XA_PLAYSTATE_PAUSED);
+                assert(XA_RESULT_SUCCESS == result);
+                sleep(5);
+                // FIXME clear ABQ queue here
+                result = (*playerPlay)->SetPlayState(playerPlay, XA_PLAYSTATE_PLAYING);
+                assert(XA_RESULT_SUCCESS == result);
+                pauseMs = -1;
+            }
+        }
     }
 
     // wait a bit more in case of additional callbacks
     printf("end of media\n");
     sleep(3);
 
+    // get final position
+    result = (*playerPlay)->GetPosition(playerPlay, &position);
+    assert(XA_RESULT_SUCCESS == result);
+    printf("Position at end: %u ms\n", position);
+
+    // get duration again, now it should be known
+    result = (*playerPlay)->GetDuration(playerPlay, &duration);
+    assert(XA_RESULT_SUCCESS == result);
+    if (duration == XA_TIME_UNKNOWN) {
+        printf("Duration at end: unknown\n");
+    } else {
+        printf("Duration at end: %u ms\n", duration);
+    }
+
 destroyRes:
 
     // destroy the player
@@ -515,13 +848,6 @@
     // destroy the engine
     (*engineObject)->Destroy(engineObject);
 
-#ifdef REINITIALIZE
-    if (--reinit_count > 0) {
-        prefetch_status = PREFETCHSTATUS_UNKNOWN;
-        goto reinitialize;
-    }
-#endif
-
 #if 0
     if (nativeWindow != NULL) {
         ANativeWindow_release(nativeWindow);
@@ -533,5 +859,7 @@
         (void) close(fd);
     }
 
+    disposeNativeWindow();
+
     return EXIT_SUCCESS;
 }
diff --git a/tools/hashgen/part6.c b/tools/hashgen/part6.c
index be40d9f..e639143 100644
--- a/tools/hashgen/part6.c
+++ b/tools/hashgen/part6.c
@@ -2,4 +2,3 @@
     if (&SL_IID_array[0] <= iid && &SL_IID_array[MPH_MAX] > iid)
         return iid - &SL_IID_array[0];
     if (NULL != iid) {
-        static const unsigned len = sizeof(struct SLInterfaceID_);
diff --git a/tools/mphtogen/Makefile b/tools/mphtogen/Makefile
index 329292a..c6a6fb2 100644
--- a/tools/mphtogen/Makefile
+++ b/tools/mphtogen/Makefile
@@ -8,6 +8,8 @@
 	./mphtogen
 
 mphtogen : mphtogen.c MPH_to.c MPH.h MPH_to.h
+# Add -DANDROID if both (a) building for Android, and (b) not
+# using -DUSE_DESIGNATED_INITIALIZERS in ../../src/Android.mk
 	gcc -o $@ -DUSE_DESIGNATED_INITIALIZERS mphtogen.c MPH_to.c
 
 clean :