Prepare MediaCodecVideoEncoder for surface textures.
This make small refactorings to MediaVideoEncoder to prepare for adding support to encode from textures. The C++ layer does not have any functional changes.
- Moves ResetEncoder to always work on the codec thread
- Adds use of ThreadChecker.
- Change Java MediaEncoder.Init to return true or false and introduce method getInputBuffers.
- Add simple unit test for Java MediaCodecVideoEncoder.

BUG=webrtc:4993

Review URL: https://codereview.webrtc.org/1396073003

Cr-Commit-Position: refs/heads/master@{#10250}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
new file mode 100644
index 0000000..29f3022
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -0,0 +1,95 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+
+import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
+
+public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
+  final static String TAG = "MediaCodecVideoEncoderTest";
+
+  @SmallTest
+  public static void testInitReleaseUsingByteBuffer() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+      Log.i(TAG,
+            "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
+      return;
+    }
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30));
+    encoder.release();
+  }
+
+  @SmallTest
+  public static void testEncoderUsingByteBuffer() throws InterruptedException {
+    if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+      Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
+      return;
+    }
+
+    final int width = 640;
+    final int height = 480;
+    final int min_size = width * height * 3 / 2;
+    final long presentationTimestampUs = 2;
+
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30));
+    ByteBuffer[] inputBuffers = encoder.getInputBuffers();
+    assertNotNull(inputBuffers);
+    assertTrue(min_size <= inputBuffers[0].capacity());
+
+    int bufferIndex;
+    do {
+      Thread.sleep(10);
+      bufferIndex = encoder.dequeueInputBuffer();
+    } while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
+
+    assertTrue(bufferIndex >= 0);
+    assertTrue(bufferIndex < inputBuffers.length);
+    assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
+
+    OutputBufferInfo info;
+    do {
+      info = encoder.dequeueOutputBuffer();
+      Thread.sleep(10);
+    } while (info == null);
+    assertTrue(info.index >= 0);
+    assertEquals(presentationTimestampUs, info.presentationTimestampUs);
+    assertTrue(info.buffer.capacity() > 0);
+    encoder.releaseOutputBuffer(info.index);
+
+    encoder.release();
+  }
+}
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index 76a675d..8fcc20b 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -33,6 +33,7 @@
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
@@ -79,7 +80,8 @@
                                public rtc::MessageHandler {
  public:
   virtual ~MediaCodecVideoEncoder();
-  explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
+  MediaCodecVideoEncoder(JNIEnv* jni,
+                         VideoCodecType codecType);
 
   // webrtc::VideoEncoder implementation.  Everything trampolines to
   // |codec_thread_| for execution.
@@ -105,12 +107,10 @@
   int GetTargetFramerate() override;
 
  private:
-  // CHECK-fail if not running on |codec_thread_|.
-  void CheckOnCodecThread();
-
-  // Release() and InitEncode() in an attempt to restore the codec to an
+  // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+  // InitEncodeOnCodecThread() in an attempt to restore the codec to an
   // operable state.  Necessary after all manner of OMX-layer errors.
-  void ResetCodec();
+  void ResetCodecOnCodecThread();
 
   // Implementation of webrtc::VideoEncoder methods above, all running on the
   // codec thread exclusively.
@@ -119,9 +119,15 @@
   // previously-current values are reused instead of the passed parameters
   // (makes it easier to reason about thread-safety).
   int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
+  // Reconfigure to match |frame| in width, height. Returns false if
+  // reconfiguring fails.
+  bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
   int32_t EncodeOnCodecThread(
       const webrtc::VideoFrame& input_image,
       const std::vector<webrtc::VideoFrameType>* frame_types);
+  bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame);
+
   int32_t RegisterEncodeCompleteCallbackOnCodecThread(
       webrtc::EncodedImageCallback* callback);
   int32_t ReleaseOnCodecThread();
@@ -151,11 +157,13 @@
   // State that is constant for the lifetime of this object once the ctor
   // returns.
   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
+  rtc::ThreadChecker codec_thread_checker_;
   ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
   ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
   jmethodID j_init_encode_method_;
+  jmethodID j_get_input_buffers_method_;
   jmethodID j_dequeue_input_buffer_method_;
-  jmethodID j_encode_method_;
+  jmethodID j_encode_buffer_method_;
   jmethodID j_release_method_;
   jmethodID j_set_rates_method_;
   jmethodID j_dequeue_output_buffer_method_;
@@ -240,19 +248,23 @@
   // thread.
   codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
   RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
-
+  codec_thread_checker_.DetachFromThread();
   jclass j_output_buffer_info_class =
       FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
   j_init_encode_method_ = GetMethodID(
       jni,
       *j_media_codec_video_encoder_class_,
       "initEncode",
-      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
-      "[Ljava/nio/ByteBuffer;");
+      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z");
+  j_get_input_buffers_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "getInputBuffers",
+      "()[Ljava/nio/ByteBuffer;");
   j_dequeue_input_buffer_method_ = GetMethodID(
       jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
-  j_encode_method_ = GetMethodID(
-      jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
+  j_encode_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
   j_release_method_ =
       GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
   j_set_rates_method_ = GetMethodID(
@@ -375,6 +387,7 @@
 }
 
 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
@@ -382,7 +395,6 @@
   // functor), so expect no ID/data.
   RTC_CHECK(!msg->message_id) << "Unexpected message!";
   RTC_CHECK(!msg->pdata) << "Unexpected message!";
-  CheckOnCodecThread();
   if (!inited_) {
     return;
   }
@@ -394,17 +406,12 @@
   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
 }
 
-void MediaCodecVideoEncoder::CheckOnCodecThread() {
-  RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
-      << "Running on wrong thread!";
-}
-
-void MediaCodecVideoEncoder::ResetCodec() {
-  ALOGE << "ResetCodec";
-  if (Release() != WEBRTC_VIDEO_CODEC_OK ||
-      codec_thread_->Invoke<int32_t>(Bind(
-          &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
-          width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
+void MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  ALOGE << "ResetOnCodecThread";
+  if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+      InitEncodeOnCodecThread(width_, height_, 0, 0)
+          != WEBRTC_VIDEO_CODEC_OK) {
     // TODO(fischman): wouldn't it be nice if there was a way to gracefully
     // degrade to a SW encoder at this point?  There isn't one AFAICT :(
     // https://code.google.com/p/webrtc/issues/detail?id=2920
@@ -413,7 +420,7 @@
 
 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
     int width, int height, int kbps, int fps) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
@@ -450,23 +457,27 @@
   frame_rtc_times_ms_.clear();
   drop_next_input_frame_ = false;
   picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+
   // We enforce no extra stride/padding in the format creation step.
   jobject j_video_codec_enum = JavaEnumFromIndex(
       jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+  const bool encode_status = jni->CallBooleanMethod(
+      *j_media_codec_video_encoder_, j_init_encode_method_,
+      j_video_codec_enum, width, height, kbps, fps);
+  if (!encode_status) {
+    ALOGE << "Failed to configure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  CHECK_EXCEPTION(jni);
+
   jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
       jni->CallObjectMethod(*j_media_codec_video_encoder_,
-                            j_init_encode_method_,
-                            j_video_codec_enum,
-                            width_,
-                            height_,
-                            kbps,
-                            fps));
+          j_get_input_buffers_method_));
   CHECK_EXCEPTION(jni);
   if (IsNull(jni, input_buffers)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
-  inited_ = true;
   switch (GetIntField(jni, *j_media_codec_video_encoder_,
       j_color_format_field_)) {
     case COLOR_FormatYUV420Planar:
@@ -495,6 +506,8 @@
   }
   CHECK_EXCEPTION(jni);
 
+
+  inited_ = true;
   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
   return WEBRTC_VIDEO_CODEC_OK;
 }
@@ -502,21 +515,22 @@
 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
     const webrtc::VideoFrame& frame,
     const std::vector<webrtc::VideoFrameType>* frame_types) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
+
   frames_received_++;
   if (!DeliverPendingOutputs(jni)) {
-    ResetCodec();
+    ResetCodecOnCodecThread();
     // Continue as if everything's fine.
   }
 
   if (drop_next_input_frame_) {
-    ALOGV("Encoder drop frame - failed callback.");
+    ALOGD << "Encoder drop frame - failed callback.";
     drop_next_input_frame_ = false;
     return WEBRTC_VIDEO_CODEC_OK;
   }
@@ -529,13 +543,9 @@
   const VideoFrame& input_frame =
       scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
 
-  if (input_frame.width() != width_ || input_frame.height() != height_) {
-    ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
-        " to " << input_frame.width() << " x " << input_frame.height();
-    width_ = input_frame.width();
-    height_ = input_frame.height();
-    ResetCodec();
-    return WEBRTC_VIDEO_CODEC_OK;
+  if (!MaybeReconfigureEncoderOnCodecThread(frame)) {
+    ALOGE << "Failed to reconfigure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
   // Check if we accumulated too many frames in encoder input buffers
@@ -553,20 +563,69 @@
     }
   }
 
+  last_input_timestamp_ms_ =
+      current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+  frames_in_queue_++;
+
+  // Save input image timestamps for later output
+  timestamps_.push_back(input_frame.timestamp());
+  render_times_ms_.push_back(input_frame.render_time_ms());
+  frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
+  const bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
+  const bool encode_status =
+      EncodeByteBufferOnCodecThread(jni, key_frame, input_frame);
+
+  current_timestamp_us_ += 1000000 / last_set_fps_;
+
+  if (!encode_status || !DeliverPendingOutputs(jni)) {
+    ALOGE << "Failed deliver pending outputs.";
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+    const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+  const bool reconfigure_due_to_size =
+      frame.width() != width_ || frame.height() != height_;
+
+  if (reconfigure_due_to_size) {
+    ALOGD << "Reconfigure encoder due to frame resolution change from "
+        << width_ << " x " << height_ << " to " << frame.width() << " x "
+        << frame.height();
+    width_ = frame.width();
+    height_ = frame.height();
+  }
+
+  if (!reconfigure_due_to_size)
+    return true;
+
+  ReleaseOnCodecThread();
+
+  return InitEncodeOnCodecThread(width_, height_, 0, 0) ==
+      WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
                                                 j_dequeue_input_buffer_method_);
   CHECK_EXCEPTION(jni);
   if (j_input_buffer_index == -1) {
     // Video codec falls behind - no input buffer available.
-    ALOGV("Encoder drop frame - no input buffers available");
+    ALOGD <<"Encoder drop frame - no input buffers available";
     frames_dropped_++;
     // Report dropped frame to quality_scaler_.
     OnDroppedFrame();
-    return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
+    return true;  // TODO(fischman): see webrtc bug 2887.
   }
   if (j_input_buffer_index == -2) {
-    ResetCodec();
-    return WEBRTC_VIDEO_CODEC_ERROR;
+    return false;
   }
 
   ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
@@ -578,40 +637,26 @@
   CHECK_EXCEPTION(jni);
   RTC_CHECK(yuv_buffer) << "Indirect buffer??";
   RTC_CHECK(!libyuv::ConvertFromI420(
-      input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane),
-      input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane),
-      input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane),
+      frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+      frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+      frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
       yuv_buffer, width_, width_, height_, encoder_fourcc_))
       << "ConvertFromI420 failed";
-  last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
-  frames_in_queue_++;
 
-  // Save input image timestamps for later output
-  timestamps_.push_back(input_frame.timestamp());
-  render_times_ms_.push_back(input_frame.render_time_ms());
-  frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
 
-  bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
   bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
-                                              j_encode_method_,
+                                              j_encode_buffer_method_,
                                               key_frame,
                                               j_input_buffer_index,
                                               yuv_size_,
                                               current_timestamp_us_);
   CHECK_EXCEPTION(jni);
-  current_timestamp_us_ += 1000000 / last_set_fps_;
-
-  if (!encode_status || !DeliverPendingOutputs(jni)) {
-    ResetCodec();
-    return WEBRTC_VIDEO_CODEC_ERROR;
-  }
-
-  return WEBRTC_VIDEO_CODEC_OK;
+  return encode_status;
 }
 
 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
     webrtc::EncodedImageCallback* callback) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
   callback_ = callback;
@@ -619,10 +664,10 @@
 }
 
 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_OK;
   }
-  CheckOnCodecThread();
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
       frames_received_ << ". Encoded: " << frames_encoded_ <<
@@ -641,7 +686,7 @@
 
 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
                                                       uint32_t frame_rate) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   if (last_set_bitrate_kbps_ == new_bit_rate &&
       last_set_fps_ == frame_rate) {
     return WEBRTC_VIDEO_CODEC_OK;
@@ -660,7 +705,7 @@
                                        last_set_fps_);
   CHECK_EXCEPTION(jni);
   if (!ret) {
-    ResetCodec();
+    ResetCodecOnCodecThread();
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
   return WEBRTC_VIDEO_CODEC_OK;
@@ -692,6 +737,7 @@
 }
 
 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   while (true) {
     jobject j_output_buffer_info = jni->CallObjectMethod(
         *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
@@ -703,7 +749,7 @@
     int output_buffer_index =
         GetOutputBufferInfoIndex(jni, j_output_buffer_info);
     if (output_buffer_index == -1) {
-      ResetCodec();
+      ResetCodecOnCodecThread();
       return false;
     }
 
@@ -824,7 +870,7 @@
           ALOGE << "Data:" <<  image->_buffer[0] << " " << image->_buffer[1]
               << " " << image->_buffer[2] << " " << image->_buffer[3]
               << " " << image->_buffer[4] << " " << image->_buffer[5];
-          ResetCodec();
+          ResetCodecOnCodecThread();
           return false;
         }
         scPositions[scPositionsLength] = payload_size;
@@ -847,7 +893,7 @@
                                           output_buffer_index);
     CHECK_EXCEPTION(jni);
     if (!success) {
-      ResetCodec();
+      ResetCodecOnCodecThread();
       return false;
     }
 
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index fa3d472..6a218fd 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -96,13 +96,13 @@
     CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
     COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
   };
-  private int colorFormat;
-  // Video encoder type.
   private VideoCodecType type;
+  private int colorFormat;  // Used by native code.
+
   // SPS and PPS NALs (Config frame) for H.264.
   private ByteBuffer configData = null;
 
-  private MediaCodecVideoEncoder() {
+  MediaCodecVideoEncoder() {
     mediaCodecThread = null;
   }
 
@@ -209,16 +209,14 @@
     }
   }
 
-  // Return the array of input buffers, or null on failure.
-  private ByteBuffer[] initEncode(
-      VideoCodecType type, int width, int height, int kbps, int fps) {
+  // Returns false if the hardware encoder currently can't be used.
+  boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps) {
     Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
-        ". @ " + kbps + " kbps. Fps: " + fps +
-        ". Color: 0x" + Integer.toHexString(colorFormat));
+        ". @ " + kbps + " kbps. Fps: " + fps + ".");
+
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
     }
-    this.type = type;
     EncoderProperties properties = null;
     String mime = null;
     int keyFrameIntervalSec = 0;
@@ -234,6 +232,7 @@
     if (properties == null) {
       throw new RuntimeException("Can not find HW encoder for " + type);
     }
+    colorFormat = properties.colorFormat;
     mediaCodecThread = Thread.currentThread();
     try {
       MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -244,25 +243,30 @@
       format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
       Logging.d(TAG, "  Format: " + format);
       mediaCodec = createByCodecName(properties.codecName);
+      this.type = type;
       if (mediaCodec == null) {
-        return null;
+        return false;
       }
       mediaCodec.configure(
           format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
       mediaCodec.start();
-      colorFormat = properties.colorFormat;
       outputBuffers = mediaCodec.getOutputBuffers();
-      ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
-      Logging.d(TAG, "Input buffers: " + inputBuffers.length +
-          ". Output buffers: " + outputBuffers.length);
-      return inputBuffers;
+
     } catch (IllegalStateException e) {
       Logging.e(TAG, "initEncode failed", e);
-      return null;
+      return false;
     }
+    return true;
   }
 
-  private boolean encode(
+  ByteBuffer[]  getInputBuffers() {
+    ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+    Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+    return inputBuffers;
+  }
+
+  boolean encodeBuffer(
       boolean isKeyframe, int inputBuffer, int size,
       long presentationTimestampUs) {
     checkOnMediaCodecThread();
@@ -282,12 +286,12 @@
       return true;
     }
     catch (IllegalStateException e) {
-      Logging.e(TAG, "encode failed", e);
+      Logging.e(TAG, "encodeBuffer failed", e);
       return false;
     }
   }
 
-  private void release() {
+  void release() {
     Logging.d(TAG, "Java releaseEncoder");
     checkOnMediaCodecThread();
     try {
@@ -318,7 +322,7 @@
 
   // Dequeue an input buffer and return its index, -1 if no input buffer is
   // available, or -2 if the codec is no longer operative.
-  private int dequeueInputBuffer() {
+  int dequeueInputBuffer() {
     checkOnMediaCodecThread();
     try {
       return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
@@ -329,7 +333,7 @@
   }
 
   // Helper struct for dequeueOutputBuffer() below.
-  private static class OutputBufferInfo {
+  static class OutputBufferInfo {
     public OutputBufferInfo(
         int index, ByteBuffer buffer,
         boolean isKeyFrame, long presentationTimestampUs) {
@@ -339,15 +343,15 @@
       this.presentationTimestampUs = presentationTimestampUs;
     }
 
-    private final int index;
-    private final ByteBuffer buffer;
-    private final boolean isKeyFrame;
-    private final long presentationTimestampUs;
+    public final int index;
+    public final ByteBuffer buffer;
+    public final boolean isKeyFrame;
+    public final long presentationTimestampUs;
   }
 
   // Dequeue and return an output buffer, or null if no output is ready.  Return
   // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
-  private OutputBufferInfo dequeueOutputBuffer() {
+  OutputBufferInfo dequeueOutputBuffer() {
     checkOnMediaCodecThread();
     try {
       MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
@@ -416,7 +420,7 @@
 
   // Release a dequeued output buffer back to the codec for re-use.  Return
   // false if the codec is no longer operable.
-  private boolean releaseOutputBuffer(int index) {
+  boolean releaseOutputBuffer(int index) {
     checkOnMediaCodecThread();
     try {
       mediaCodec.releaseOutputBuffer(index, false);