Move talk/app/webrtc to webrtc/api

The previously disabled warnings that were inherited from
talk/build/common.gypi are now replaced by target-specific disabling
of only the failing warnings. Additional disabling was needed since the stricter
compilation warnings that applies to code in webrtc/.

License headers will be updated in a follow-up CL.

Other modifications:
* Updated the header guards.
* Sorted the includes using chromium/src/tools/sort-headers.py
  except for these files:
  talk/app/webrtc/peerconnectionendtoend_unittest.cc
  talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
  talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
  webrtc/media/devices/win32devicemanager.cc

The HAVE_SCTP define was added for the peerconnection_unittests target
in api_tests.gyp.

I also checked that none of
SRTP_RELATIVE_PATH
HAVE_SRTP
HAVE_WEBRTC_VIDEO
HAVE_WEBRTC_VOICE
were used by the talk/app/webrtc code.

For Chromium, the following changes will need to be applied to the roll CL that updates the
DEPS for WebRTC and libjingle:
https://codereview.chromium.org/1615433002

BUG=webrtc:5418
NOPRESUBMIT=True
R=deadbeef@webrtc.org, pthatcher@webrtc.org, tommi@webrtc.org

Review URL: https://codereview.webrtc.org/1610243002 .

Cr-Commit-Position: refs/heads/master@{#11545}
diff --git a/webrtc/api/OWNERS b/webrtc/api/OWNERS
index cd06158..2a0a6ae 100644
--- a/webrtc/api/OWNERS
+++ b/webrtc/api/OWNERS
@@ -1 +1,6 @@
+pthatcher@webrtc.org
+glaznev@webrtc.org
+juberti@webrtc.org
+perkj@webrtc.org
 tkchin@webrtc.org
+tommi@webrtc.org
diff --git a/webrtc/api/androidtests/AndroidManifest.xml b/webrtc/api/androidtests/AndroidManifest.xml
new file mode 100644
index 0000000..0b3b0b12
--- /dev/null
+++ b/webrtc/api/androidtests/AndroidManifest.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.webrtc.test"
+    android:versionCode="1"
+    android:versionName="1.0" >
+    <uses-feature android:name="android.hardware.camera" />
+    <uses-feature android:name="android.hardware.camera.autofocus" />
+    <uses-feature android:glEsVersion="0x00020000" android:required="true" />
+
+    <uses-sdk android:minSdkVersion="14" android:targetSdkVersion="21" />
+
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+    <uses-permission android:name="android.permission.RECORD_AUDIO" />
+    <uses-permission android:name="android.permission.INTERNET" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+    <uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
+
+    <instrumentation
+        android:name="android.test.InstrumentationTestRunner"
+        android:targetPackage="org.webrtc.test" />
+
+    <application
+        android:icon="@drawable/ic_launcher"
+        android:label="@string/app_name" >
+        <uses-library android:name="android.test.runner" />
+    </application>
+
+</manifest>
diff --git a/webrtc/api/androidtests/OWNERS b/webrtc/api/androidtests/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/androidtests/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/androidtests/ant.properties b/webrtc/api/androidtests/ant.properties
new file mode 100644
index 0000000..bc05353
--- /dev/null
+++ b/webrtc/api/androidtests/ant.properties
@@ -0,0 +1,18 @@
+# This file is used to override default values used by the Ant build system.
+#
+# This file must be checked into Version Control Systems, as it is
+# integral to the build system of your project.
+
+# This file is only used by the Ant script.
+
+# You can use this to override default values such as
+#  'source.dir' for the location of your java source folder and
+#  'out.dir' for the location of your output folder.
+
+# You can also use it define how the release builds are signed by declaring
+# the following properties:
+#  'key.store' for the location of your keystore and
+#  'key.alias' for the name of the key to use.
+# The password will be asked during the build when you use the 'release' target.
+
+source.dir=../java/testcommon/src;src
\ No newline at end of file
diff --git a/webrtc/api/androidtests/build.xml b/webrtc/api/androidtests/build.xml
new file mode 100644
index 0000000..cb4cb7a
--- /dev/null
+++ b/webrtc/api/androidtests/build.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="libjingle_peerconnection_android_unittest" default="help">
+
+    <!-- The local.properties file is created and updated by the 'android' tool.
+         It contains the path to the SDK. It should *NOT* be checked into
+         Version Control Systems. -->
+    <property file="local.properties" />
+
+    <!-- The ant.properties file can be created by you. It is only edited by the
+         'android' tool to add properties to it.
+         This is the place to change some Ant specific build properties.
+         Here are some properties you may want to change/update:
+
+         source.dir
+             The name of the source directory. Default is 'src'.
+         out.dir
+             The name of the output directory. Default is 'bin'.
+
+         For other overridable properties, look at the beginning of the rules
+         files in the SDK, at tools/ant/build.xml
+
+         Properties related to the SDK location or the project target should
+         be updated using the 'android' tool with the 'update' action.
+
+         This file is an integral part of the build system for your
+         application and should be checked into Version Control Systems.
+
+         -->
+    <property file="ant.properties" />
+
+    <!-- if sdk.dir was not set from one of the property file, then
+         get it from the ANDROID_HOME env var.
+         This must be done before we load project.properties since
+         the proguard config can use sdk.dir -->
+    <property environment="env" />
+    <condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
+        <isset property="env.ANDROID_SDK_ROOT" />
+    </condition>
+
+    <!-- The project.properties file is created and updated by the 'android'
+         tool, as well as ADT.
+
+         This contains project specific properties such as project target, and library
+         dependencies. Lower level build properties are stored in ant.properties
+         (or in .classpath for Eclipse projects).
+
+         This file is an integral part of the build system for your
+         application and should be checked into Version Control Systems. -->
+    <loadproperties srcFile="project.properties" />
+
+    <!-- quick check on sdk.dir -->
+    <fail
+            message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
+            unless="sdk.dir"
+    />
+
+    <!--
+        Import per project custom build rules if present at the root of the project.
+        This is the place to put custom intermediary targets such as:
+            -pre-build
+            -pre-compile
+            -post-compile (This is typically used for code obfuscation.
+                           Compiled code location: ${out.classes.absolute.dir}
+                           If this is not done in place, override ${out.dex.input.absolute.dir})
+            -post-package
+            -post-build
+            -pre-clean
+    -->
+    <import file="custom_rules.xml" optional="true" />
+
+    <!-- Import the actual build file.
+
+         To customize existing targets, there are two options:
+         - Customize only one target:
+             - copy/paste the target into this file, *before* the
+               <import> task.
+             - customize it to your needs.
+         - Customize the whole content of build.xml
+             - copy/paste the content of the rules files (minus the top node)
+               into this file, replacing the <import> task.
+             - customize to your needs.
+
+         ***********************
+         ****** IMPORTANT ******
+         ***********************
+         In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
+         in order to avoid having your file be overridden by tools such as "android update project"
+    -->
+    <!-- version-tag: 1 -->
+    <import file="${sdk.dir}/tools/ant/build.xml" />
+
+</project>
diff --git a/webrtc/api/androidtests/project.properties b/webrtc/api/androidtests/project.properties
new file mode 100644
index 0000000..a6ca533
--- /dev/null
+++ b/webrtc/api/androidtests/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+
+java.compilerargs=-Xlint:all -Werror
diff --git a/webrtc/api/androidtests/res/drawable-hdpi/ic_launcher.png b/webrtc/api/androidtests/res/drawable-hdpi/ic_launcher.png
new file mode 100644
index 0000000..96a442e
--- /dev/null
+++ b/webrtc/api/androidtests/res/drawable-hdpi/ic_launcher.png
Binary files differ
diff --git a/webrtc/api/androidtests/res/drawable-ldpi/ic_launcher.png b/webrtc/api/androidtests/res/drawable-ldpi/ic_launcher.png
new file mode 100644
index 0000000..9923872
--- /dev/null
+++ b/webrtc/api/androidtests/res/drawable-ldpi/ic_launcher.png
Binary files differ
diff --git a/webrtc/api/androidtests/res/drawable-mdpi/ic_launcher.png b/webrtc/api/androidtests/res/drawable-mdpi/ic_launcher.png
new file mode 100644
index 0000000..359047d
--- /dev/null
+++ b/webrtc/api/androidtests/res/drawable-mdpi/ic_launcher.png
Binary files differ
diff --git a/webrtc/api/androidtests/res/drawable-xhdpi/ic_launcher.png b/webrtc/api/androidtests/res/drawable-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..71c6d76
--- /dev/null
+++ b/webrtc/api/androidtests/res/drawable-xhdpi/ic_launcher.png
Binary files differ
diff --git a/webrtc/api/androidtests/res/values/strings.xml b/webrtc/api/androidtests/res/values/strings.xml
new file mode 100644
index 0000000..dfe63f8
--- /dev/null
+++ b/webrtc/api/androidtests/res/values/strings.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+    <string name="app_name">AndroidPeerConnectionTests</string>
+
+</resources>
diff --git a/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java b/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java
new file mode 100644
index 0000000..63c05fb
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -0,0 +1,307 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+public final class GlRectDrawerTest extends ActivityTestCase {
+  // Resolution of the test image.
+  private static final int WIDTH = 16;
+  private static final int HEIGHT = 16;
+  // Seed for random pixel creation.
+  private static final int SEED = 42;
+  // When comparing pixels, allow some slack for float arithmetic and integer rounding.
+  private static final float MAX_DIFF = 1.5f;
+
+  private static float normalizedByte(byte b) {
+    return (b & 0xFF) / 255.0f;
+  }
+
+  private static float saturatedConvert(float c) {
+    return 255.0f * Math.max(0, Math.min(c, 1));
+  }
+
+  // Assert RGB ByteBuffers are pixel perfect identical.
+  private static void assertEquals(int width, int height, ByteBuffer actual, ByteBuffer expected) {
+    actual.rewind();
+    expected.rewind();
+    assertEquals(actual.remaining(), width * height * 3);
+    assertEquals(expected.remaining(), width * height * 3);
+    for (int y = 0; y < height; ++y) {
+      for (int x = 0; x < width; ++x) {
+        final int actualR = actual.get() & 0xFF;
+        final int actualG = actual.get() & 0xFF;
+        final int actualB = actual.get() & 0xFF;
+        final int expectedR = expected.get() & 0xFF;
+        final int expectedG = expected.get() & 0xFF;
+        final int expectedB = expected.get() & 0xFF;
+        if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
+          fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
+              + "(" +  x + ", " + y + "). Expected color (R,G,B): "
+              + "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
+              + " but was: " + "(" + actualR + ", " + actualG + ", " + actualB + ").");
+        }
+      }
+    }
+  }
+
+  // Convert RGBA ByteBuffer to RGB ByteBuffer.
+  private static ByteBuffer stripAlphaChannel(ByteBuffer rgbaBuffer) {
+    rgbaBuffer.rewind();
+    assertEquals(rgbaBuffer.remaining() % 4, 0);
+    final int numberOfPixels = rgbaBuffer.remaining() / 4;
+    final ByteBuffer rgbBuffer = ByteBuffer.allocateDirect(numberOfPixels * 3);
+    while (rgbaBuffer.hasRemaining()) {
+      // Copy RGB.
+      for (int channel = 0; channel < 3; ++channel) {
+        rgbBuffer.put(rgbaBuffer.get());
+      }
+      // Drop alpha.
+      rgbaBuffer.get();
+    }
+    return rgbBuffer;
+  }
+
+  @SmallTest
+  public void testRgbRendering() {
+    // Create EGL base with a pixel buffer as display output.
+    final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+    eglBase.createPbufferSurface(WIDTH, HEIGHT);
+    eglBase.makeCurrent();
+
+    // Create RGB byte buffer plane with random content.
+    final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+    final Random random = new Random(SEED);
+    random.nextBytes(rgbPlane.array());
+
+    // Upload the RGB byte buffer data as a texture.
+    final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
+        HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+    GlUtil.checkNoGLES2Error("glTexImage2D");
+
+    // Draw the RGB frame onto the pixel buffer.
+    final GlRectDrawer drawer = new GlRectDrawer();
+    drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
+
+    // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+    final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+    GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+    GlUtil.checkNoGLES2Error("glReadPixels");
+
+    // Assert rendered image is pixel perfect to source RGB.
+    assertEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+    drawer.release();
+    GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+    eglBase.release();
+  }
+
+  @SmallTest
+  public void testYuvRendering() {
+    // Create EGL base with a pixel buffer as display output.
+    EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+    eglBase.createPbufferSurface(WIDTH, HEIGHT);
+    eglBase.makeCurrent();
+
+    // Create YUV byte buffer planes with random content.
+    final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+    final Random random = new Random(SEED);
+    for (int i = 0; i < 3; ++i) {
+      yuvPlanes[i] = ByteBuffer.allocateDirect(WIDTH * HEIGHT);
+      random.nextBytes(yuvPlanes[i].array());
+    }
+
+    // Generate 3 texture ids for Y/U/V.
+    final int yuvTextures[] = new int[3];
+    for (int i = 0; i < 3; i++)  {
+      yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+    }
+
+    // Upload the YUV byte buffer data as textures.
+    for (int i = 0; i < 3; ++i) {
+      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+      GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH,
+          HEIGHT, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
+      GlUtil.checkNoGLES2Error("glTexImage2D");
+    }
+
+    // Draw the YUV frame onto the pixel buffer.
+    final GlRectDrawer drawer = new GlRectDrawer();
+    drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
+
+    // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+    final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+    GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
+    GlUtil.checkNoGLES2Error("glReadPixels");
+
+    // Compare the YUV data with the RGBA result.
+    for (int y = 0; y < HEIGHT; ++y) {
+      for (int x = 0; x < WIDTH; ++x) {
+        // YUV color space. Y in [0, 1], UV in [-0.5, 0.5]. The constants are taken from the YUV
+        // fragment shader code in GlRectDrawer.
+        final float y_luma = normalizedByte(yuvPlanes[0].get());
+        final float u_chroma = normalizedByte(yuvPlanes[1].get()) - 0.5f;
+        final float v_chroma = normalizedByte(yuvPlanes[2].get()) - 0.5f;
+        // Expected color in unrounded RGB [0.0f, 255.0f].
+        final float expectedRed = saturatedConvert(y_luma + 1.403f * v_chroma);
+        final float expectedGreen =
+            saturatedConvert(y_luma - 0.344f * u_chroma - 0.714f * v_chroma);
+        final float expectedBlue = saturatedConvert(y_luma + 1.77f * u_chroma);
+
+        // Actual color in RGB8888.
+        final int actualRed = data.get() & 0xFF;
+        final int actualGreen = data.get() & 0xFF;
+        final int actualBlue = data.get() & 0xFF;
+        final int actualAlpha = data.get() & 0xFF;
+
+        // Assert rendered image is close to pixel perfect from source YUV.
+        assertTrue(Math.abs(actualRed - expectedRed) < MAX_DIFF);
+        assertTrue(Math.abs(actualGreen - expectedGreen) < MAX_DIFF);
+        assertTrue(Math.abs(actualBlue - expectedBlue) < MAX_DIFF);
+        assertEquals(actualAlpha, 255);
+      }
+    }
+
+    drawer.release();
+    GLES20.glDeleteTextures(3, yuvTextures, 0);
+    eglBase.release();
+  }
+
+  /**
+   * The purpose here is to test GlRectDrawer.oesDraw(). Unfortunately, there is no easy way to
+   * create an OES texture, which is needed for input to oesDraw(). Most of the test is concerned
+   * with creating OES textures in the following way:
+   *  - Create SurfaceTexture with help from SurfaceTextureHelper.
+   *  - Create an EglBase with the SurfaceTexture as EGLSurface.
+   *  - Upload RGB texture with known content.
+   *  - Draw the RGB texture onto the EglBase with the SurfaceTexture as target.
+   *  - Wait for an OES texture to be produced.
+   * The actual oesDraw() test is this:
+   *  - Create an EglBase with a pixel buffer as target.
+   *  - Render the OES texture onto the pixel buffer.
+   *  - Read back the pixel buffer and compare it with the known RGB data.
+   */
+  @MediumTest
+  public void testOesRendering() throws InterruptedException {
+    /**
+     * Stub class to convert RGB ByteBuffers to OES textures by drawing onto a SurfaceTexture.
+     */
+    class StubOesTextureProducer {
+      private final EglBase eglBase;
+      private final GlRectDrawer drawer;
+      private final int rgbTexture;
+
+      public StubOesTextureProducer(
+          EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width,
+          int height) {
+        eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
+        surfaceTexture.setDefaultBufferSize(width, height);
+        eglBase.createSurface(surfaceTexture);
+        assertEquals(eglBase.surfaceWidth(), width);
+        assertEquals(eglBase.surfaceHeight(), height);
+
+        drawer = new GlRectDrawer();
+
+        eglBase.makeCurrent();
+        rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+      }
+
+      public void draw(ByteBuffer rgbPlane) {
+        eglBase.makeCurrent();
+
+        // Upload RGB data to texture.
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
+        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
+            HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
+        // Draw the RGB data onto the SurfaceTexture.
+        drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
+        eglBase.swapBuffers();
+      }
+
+      public void release() {
+        eglBase.makeCurrent();
+        drawer.release();
+        GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
+        eglBase.release();
+      }
+    }
+
+    // Create EGL base with a pixel buffer as display output.
+    final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+    eglBase.createPbufferSurface(WIDTH, HEIGHT);
+
+    // Create resources for generating OES textures.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+    final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
+        eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
+    final SurfaceTextureHelperTest.MockTextureListener listener =
+        new SurfaceTextureHelperTest.MockTextureListener();
+    surfaceTextureHelper.setListener(listener);
+
+    // Create RGB byte buffer plane with random content.
+    final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
+    final Random random = new Random(SEED);
+    random.nextBytes(rgbPlane.array());
+
+    // Draw the frame and block until an OES texture is delivered.
+    oesProducer.draw(rgbPlane);
+    listener.waitForNewFrame();
+
+    // Real test starts here.
+    // Draw the OES texture on the pixel buffer.
+    eglBase.makeCurrent();
+    final GlRectDrawer drawer = new GlRectDrawer();
+    drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, WIDTH, HEIGHT);
+
+    // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+    final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
+    GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+    GlUtil.checkNoGLES2Error("glReadPixels");
+
+    // Assert rendered image is pixel perfect to source RGB.
+    assertEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
+
+    drawer.release();
+    surfaceTextureHelper.returnTextureFrame();
+    oesProducer.release();
+    surfaceTextureHelper.disconnect();
+    eglBase.release();
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
new file mode 100644
index 0000000..b1ec5dd
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -0,0 +1,180 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+
+import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
+
+import java.nio.ByteBuffer;
+
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
+public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
+  final static String TAG = "MediaCodecVideoEncoderTest";
+
+  @SmallTest
+  public static void testInitializeUsingByteBuffer() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+      Log.i(TAG,
+            "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
+      return;
+    }
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
+    encoder.release();
+  }
+
+  @SmallTest
+  public static void testInitilizeUsingTextures() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+      Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+      return;
+    }
+    EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+        eglBase.getEglBaseContext()));
+    encoder.release();
+    eglBase.release();
+  }
+
+  @SmallTest
+  public static void testInitializeUsingByteBufferReInitilizeUsingTextures() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+      Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+      return;
+    }
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+        null));
+    encoder.release();
+    EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+        eglBase.getEglBaseContext()));
+    encoder.release();
+    eglBase.release();
+  }
+
+  @SmallTest
+  public static void testEncoderUsingByteBuffer() throws InterruptedException {
+    if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+      Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
+      return;
+    }
+
+    final int width = 640;
+    final int height = 480;
+    final int min_size = width * height * 3 / 2;
+    final long presentationTimestampUs = 2;
+
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null));
+    ByteBuffer[] inputBuffers = encoder.getInputBuffers();
+    assertNotNull(inputBuffers);
+    assertTrue(min_size <= inputBuffers[0].capacity());
+
+    int bufferIndex;
+    do {
+      Thread.sleep(10);
+      bufferIndex = encoder.dequeueInputBuffer();
+    } while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
+
+    assertTrue(bufferIndex >= 0);
+    assertTrue(bufferIndex < inputBuffers.length);
+    assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
+
+    OutputBufferInfo info;
+    do {
+      info = encoder.dequeueOutputBuffer();
+      Thread.sleep(10);
+    } while (info == null);
+    assertTrue(info.index >= 0);
+    assertEquals(presentationTimestampUs, info.presentationTimestampUs);
+    assertTrue(info.buffer.capacity() > 0);
+    encoder.releaseOutputBuffer(info.index);
+
+    encoder.release();
+  }
+
+  @SmallTest
+  public static void testEncoderUsingTextures() throws InterruptedException {
+    if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+      Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+      return;
+    }
+
+    final int width = 640;
+    final int height = 480;
+    final long presentationTs = 2;
+
+    final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
+    eglOesBase.createDummyPbufferSurface();
+    eglOesBase.makeCurrent();
+    int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+
+    // TODO(perkj): This test is week since we don't fill the texture with valid data with correct
+    // width and height and verify the encoded data. Fill the OES texture and figure out a way to
+    // verify that the output make sense.
+
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
+        eglOesBase.getEglBaseContext()));
+    assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
+        presentationTs));
+    GlUtil.checkNoGLES2Error("encodeTexture");
+
+    // It should be Ok to delete the texture after calling encodeTexture.
+    GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+
+    OutputBufferInfo info = encoder.dequeueOutputBuffer();
+    while (info == null) {
+      info = encoder.dequeueOutputBuffer();
+      Thread.sleep(20);
+    }
+    assertTrue(info.index != -1);
+    assertTrue(info.buffer.capacity() > 0);
+    assertEquals(presentationTs, info.presentationTimestampUs);
+    encoder.releaseOutputBuffer(info.index);
+
+    encoder.release();
+    eglOesBase.release();
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java b/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java
new file mode 100644
index 0000000..3bdbbdd
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/NetworkMonitorTest.java
@@ -0,0 +1,295 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate;
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
+import static org.webrtc.NetworkMonitorAutoDetect.NetworkState;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.content.Intent;
+import android.net.ConnectivityManager;
+import android.net.Network;
+import android.net.wifi.WifiManager;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.telephony.TelephonyManager;
+import android.test.ActivityTestCase;
+import android.test.UiThreadTest;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+
+/**
+ * Tests for org.webrtc.NetworkMonitor.
+ */
+@SuppressLint("NewApi")
+public class NetworkMonitorTest extends ActivityTestCase {
+  /**
+   * Listens for alerts fired by the NetworkMonitor when network status changes.
+   */
+  private static class NetworkMonitorTestObserver
+      implements NetworkMonitor.NetworkObserver {
+    private boolean receivedNotification = false;
+
+    @Override
+    public void onConnectionTypeChanged(ConnectionType connectionType) {
+      receivedNotification = true;
+    }
+
+    public boolean hasReceivedNotification() {
+      return receivedNotification;
+    }
+
+    public void resetHasReceivedNotification() {
+      receivedNotification = false;
+    }
+  }
+
+  /**
+   * Mocks out calls to the ConnectivityManager.
+   */
+  private static class MockConnectivityManagerDelegate extends ConnectivityManagerDelegate {
+    private boolean activeNetworkExists;
+    private int networkType;
+    private int networkSubtype;
+
+    @Override
+    public NetworkState getNetworkState() {
+      return new NetworkState(activeNetworkExists, networkType, networkSubtype);
+    }
+
+    // Dummy implementations to avoid NullPointerExceptions in default implementations:
+
+    @Override
+    public int getDefaultNetId() {
+      return INVALID_NET_ID;
+    }
+
+    @Override
+    public Network[] getAllNetworks() {
+      return new Network[0];
+    }
+
+    @Override
+    public NetworkState getNetworkState(Network network) {
+      return new NetworkState(false, -1, -1);
+    }
+
+    public void setActiveNetworkExists(boolean networkExists) {
+      activeNetworkExists = networkExists;
+    }
+
+    public void setNetworkType(int networkType) {
+      this.networkType = networkType;
+    }
+
+    public void setNetworkSubtype(int networkSubtype) {
+      this.networkSubtype = networkSubtype;
+    }
+  }
+
+  /**
+   * Mocks out calls to the WifiManager.
+   */
+  private static class MockWifiManagerDelegate
+      extends NetworkMonitorAutoDetect.WifiManagerDelegate {
+    private String wifiSSID;
+
+    @Override
+    public String getWifiSSID() {
+      return wifiSSID;
+    }
+
+    public void setWifiSSID(String wifiSSID) {
+      this.wifiSSID = wifiSSID;
+    }
+  }
+
+  // A dummy NetworkMonitorAutoDetect.Observer.
+  private static class TestNetworkMonitorAutoDetectObserver
+      implements NetworkMonitorAutoDetect.Observer {
+
+    @Override
+    public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
+
+    @Override
+    public void onNetworkConnect(NetworkInformation networkInfo) {}
+
+    @Override
+    public void onNetworkDisconnect(int networkHandle) {}
+  }
+
+  private static final Object lock = new Object();
+  private static Handler uiThreadHandler = null;
+
+  private NetworkMonitorAutoDetect receiver;
+  private MockConnectivityManagerDelegate connectivityDelegate;
+  private MockWifiManagerDelegate wifiDelegate;
+
+  private static Handler getUiThreadHandler() {
+    synchronized (lock) {
+      if (uiThreadHandler == null ) {
+        uiThreadHandler = new Handler(Looper.getMainLooper());
+      }
+      return uiThreadHandler;
+    }
+  }
+
+  /**
+   * Helper method to create a network monitor and delegates for testing.
+   */
+  private void createTestMonitor() {
+    Context context = getInstrumentation().getTargetContext();
+    NetworkMonitor.resetInstanceForTests(context);
+    NetworkMonitor.setAutoDetectConnectivityState(true);
+    receiver = NetworkMonitor.getAutoDetectorForTest();
+    assertNotNull(receiver);
+
+    connectivityDelegate = new MockConnectivityManagerDelegate();
+    connectivityDelegate.setActiveNetworkExists(true);
+    receiver.setConnectivityManagerDelegateForTests(connectivityDelegate);
+
+    wifiDelegate = new MockWifiManagerDelegate();
+    receiver.setWifiManagerDelegateForTests(wifiDelegate);
+    wifiDelegate.setWifiSSID("foo");
+  }
+
+  private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
+    final NetworkMonitorAutoDetect.NetworkState networkState =
+        receiver.getCurrentNetworkState();
+    return receiver.getConnectionType(networkState);
+  }
+
+  @Override
+  protected void setUp() throws Exception {
+    super.setUp();
+    getUiThreadHandler().post(new Runnable() {
+      public void run() {
+        createTestMonitor();
+      }
+    });
+  }
+
+  /**
+   * Tests that the receiver registers for connectivity intents during construction.
+   */
+  @UiThreadTest
+  @SmallTest
+  public void testNetworkMonitorRegistersInConstructor() throws InterruptedException {
+    Context context = getInstrumentation().getTargetContext();
+
+    NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
+
+    NetworkMonitorAutoDetect receiver = new NetworkMonitorAutoDetect(observer, context);
+
+    assertTrue(receiver.isReceiverRegisteredForTesting());
+  }
+
+  /**
+   * Tests that when there is an intent indicating a change in network connectivity, it sends a
+   * notification to Java observers.
+   */
+  @UiThreadTest
+  @MediumTest
+  public void testNetworkMonitorJavaObservers() throws InterruptedException {
+    // Initialize the NetworkMonitor with a connection.
+    Intent connectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+    receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+
+    // We shouldn't be re-notified if the connection hasn't actually changed.
+    NetworkMonitorTestObserver observer = new NetworkMonitorTestObserver();
+    NetworkMonitor.addNetworkObserver(observer);
+    receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+    assertFalse(observer.hasReceivedNotification());
+
+    // We shouldn't be notified if we're connected to non-Wifi and the Wifi SSID changes.
+    wifiDelegate.setWifiSSID("bar");
+    receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+    assertFalse(observer.hasReceivedNotification());
+
+    // We should be notified when we change to Wifi.
+    connectivityDelegate.setNetworkType(ConnectivityManager.TYPE_WIFI);
+    receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+    assertTrue(observer.hasReceivedNotification());
+    observer.resetHasReceivedNotification();
+
+    // We should be notified when the Wifi SSID changes.
+    wifiDelegate.setWifiSSID("foo");
+    receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+    assertTrue(observer.hasReceivedNotification());
+    observer.resetHasReceivedNotification();
+
+    // We shouldn't be re-notified if the Wifi SSID hasn't actually changed.
+    receiver.onReceive(getInstrumentation().getTargetContext(), connectivityIntent);
+    assertFalse(observer.hasReceivedNotification());
+
+    // Mimic that connectivity has been lost and ensure that the observer gets the notification.
+    connectivityDelegate.setActiveNetworkExists(false);
+    Intent noConnectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
+    receiver.onReceive(getInstrumentation().getTargetContext(), noConnectivityIntent);
+    assertTrue(observer.hasReceivedNotification());
+  }
+
+  /**
+   * Tests that ConnectivityManagerDelegate doesn't crash. This test cannot rely on having any
+   * active network connections so it cannot usefully check results, but it can at least check
+   * that the functions don't crash.
+   */
+  @UiThreadTest
+  @SmallTest
+  public void testConnectivityManagerDelegateDoesNotCrash() {
+    ConnectivityManagerDelegate delegate =
+        new ConnectivityManagerDelegate(getInstrumentation().getTargetContext());
+    delegate.getNetworkState();
+    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+      Network[] networks = delegate.getAllNetworks();
+      if (networks.length >= 1) {
+        delegate.getNetworkState(networks[0]);
+        delegate.hasInternetCapability(networks[0]);
+      }
+      delegate.getDefaultNetId();
+    }
+  }
+
+  /**
+   * Tests that NetworkMonitorAutoDetect queryable APIs don't crash. This test cannot rely
+   * on having any active network connections so it cannot usefully check results, but it can at
+   * least check that the functions don't crash.
+   */
+  @UiThreadTest
+  @SmallTest
+  public void testQueryableAPIsDoNotCrash() {
+    NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
+    NetworkMonitorAutoDetect ncn =
+        new NetworkMonitorAutoDetect(observer, getInstrumentation().getTargetContext());
+    ncn.getDefaultNetId();
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java b/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java
new file mode 100644
index 0000000..28347e9
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/PeerConnectionTest.java
@@ -0,0 +1,771 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import org.webrtc.PeerConnection.IceConnectionState;
+import org.webrtc.PeerConnection.IceGatheringState;
+import org.webrtc.PeerConnection.SignalingState;
+
+import android.test.suitebuilder.annotation.MediumTest;
+
+import java.io.File;
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.IdentityHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeSet;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/** End-to-end tests for PeerConnection.java. */
+import android.test.ActivityTestCase;
+
+public class PeerConnectionTest extends ActivityTestCase {
+  private static final int TIMEOUT_SECONDS = 20;
+  private TreeSet<String> threadsBeforeTest = null;
+
+  @Override
+  protected void setUp() {
+    assertTrue(PeerConnectionFactory.initializeAndroidGlobals(
+        getInstrumentation().getContext(), true, true, true));
+  }
+
+  private static class ObserverExpectations implements PeerConnection.Observer,
+                                                       VideoRenderer.Callbacks,
+                                                       DataChannel.Observer,
+                                                       StatsObserver {
+    private final String name;
+    private int expectedIceCandidates = 0;
+    private int expectedErrors = 0;
+    private int expectedRenegotiations = 0;
+    private int previouslySeenWidth = 0;
+    private int previouslySeenHeight = 0;
+    private int expectedFramesDelivered = 0;
+    private LinkedList<SignalingState> expectedSignalingChanges =
+        new LinkedList<SignalingState>();
+    private LinkedList<IceConnectionState> expectedIceConnectionChanges =
+        new LinkedList<IceConnectionState>();
+    private LinkedList<IceGatheringState> expectedIceGatheringChanges =
+        new LinkedList<IceGatheringState>();
+    private LinkedList<String> expectedAddStreamLabels =
+        new LinkedList<String>();
+    private LinkedList<String> expectedRemoveStreamLabels =
+        new LinkedList<String>();
+    private final LinkedList<IceCandidate> gotIceCandidates =
+        new LinkedList<IceCandidate>();
+    private Map<MediaStream, WeakReference<VideoRenderer>> renderers =
+        new IdentityHashMap<MediaStream, WeakReference<VideoRenderer>>();
+    private DataChannel dataChannel;
+    private LinkedList<DataChannel.Buffer> expectedBuffers =
+        new LinkedList<DataChannel.Buffer>();
+    private LinkedList<DataChannel.State> expectedStateChanges =
+        new LinkedList<DataChannel.State>();
+    private LinkedList<String> expectedRemoteDataChannelLabels =
+        new LinkedList<String>();
+    private int expectedStatsCallbacks = 0;
+    private LinkedList<StatsReport[]> gotStatsReports =
+        new LinkedList<StatsReport[]>();
+
+    public ObserverExpectations(String name) {
+      this.name = name;
+    }
+
+    public synchronized void setDataChannel(DataChannel dataChannel) {
+      assertNull(this.dataChannel);
+      this.dataChannel = dataChannel;
+      this.dataChannel.registerObserver(this);
+      assertNotNull(this.dataChannel);
+    }
+
+    public synchronized void expectIceCandidates(int count) {
+      expectedIceCandidates += count;
+    }
+
+    @Override
+    public synchronized void onIceCandidate(IceCandidate candidate) {
+      --expectedIceCandidates;
+
+      // We don't assert expectedIceCandidates >= 0 because it's hard to know
+      // how many to expect, in general.  We only use expectIceCandidates to
+      // assert a minimal count.
+      synchronized (gotIceCandidates) {
+        gotIceCandidates.add(candidate);
+        gotIceCandidates.notifyAll();
+      }
+    }
+
+    private synchronized void setSize(int width, int height) {
+      // Because different camera devices (fake & physical) produce different
+      // resolutions, we only sanity-check the set sizes,
+      assertTrue(width > 0);
+      assertTrue(height > 0);
+      if (previouslySeenWidth > 0) {
+        assertEquals(previouslySeenWidth, width);
+        assertEquals(previouslySeenHeight, height);
+      } else {
+        previouslySeenWidth = width;
+        previouslySeenHeight = height;
+      }
+    }
+
+    public synchronized void expectFramesDelivered(int count) {
+      expectedFramesDelivered += count;
+    }
+
+    @Override
+    public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
+      setSize(frame.rotatedWidth(), frame.rotatedHeight());
+      --expectedFramesDelivered;
+      VideoRenderer.renderFrameDone(frame);
+    }
+
+    public synchronized void expectSignalingChange(SignalingState newState) {
+      expectedSignalingChanges.add(newState);
+    }
+
+    @Override
+    public synchronized void onSignalingChange(SignalingState newState) {
+      assertEquals(expectedSignalingChanges.removeFirst(), newState);
+    }
+
+    public synchronized void expectIceConnectionChange(
+        IceConnectionState newState) {
+      expectedIceConnectionChanges.add(newState);
+    }
+
+    @Override
+    public synchronized void onIceConnectionChange(
+        IceConnectionState newState) {
+      // TODO(bemasc): remove once delivery of ICECompleted is reliable
+      // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+      if (newState.equals(IceConnectionState.COMPLETED)) {
+        return;
+      }
+
+      if (expectedIceConnectionChanges.isEmpty()) {
+        System.out.println(name + "Got an unexpected ice connection change " + newState);
+        return;
+      }
+
+      assertEquals(expectedIceConnectionChanges.removeFirst(), newState);
+    }
+
+    @Override
+    public synchronized void onIceConnectionReceivingChange(boolean receiving) {
+      System.out.println(name + "Got an ice connection receiving change " + receiving);
+    }
+
+    public synchronized void expectIceGatheringChange(
+        IceGatheringState newState) {
+      expectedIceGatheringChanges.add(newState);
+    }
+
+    @Override
+    public synchronized void onIceGatheringChange(IceGatheringState newState) {
+      // It's fine to get a variable number of GATHERING messages before
+      // COMPLETE fires (depending on how long the test runs) so we don't assert
+      // any particular count.
+      if (newState == IceGatheringState.GATHERING) {
+        return;
+      }
+      assertEquals(expectedIceGatheringChanges.removeFirst(), newState);
+    }
+
+    public synchronized void expectAddStream(String label) {
+      expectedAddStreamLabels.add(label);
+    }
+
+    @Override
+    public synchronized void onAddStream(MediaStream stream) {
+      assertEquals(expectedAddStreamLabels.removeFirst(), stream.label());
+      assertEquals(1, stream.videoTracks.size());
+      assertEquals(1, stream.audioTracks.size());
+      assertTrue(stream.videoTracks.get(0).id().endsWith("VideoTrack"));
+      assertTrue(stream.audioTracks.get(0).id().endsWith("AudioTrack"));
+      assertEquals("video", stream.videoTracks.get(0).kind());
+      assertEquals("audio", stream.audioTracks.get(0).kind());
+      VideoRenderer renderer = createVideoRenderer(this);
+      stream.videoTracks.get(0).addRenderer(renderer);
+      assertNull(renderers.put(
+          stream, new WeakReference<VideoRenderer>(renderer)));
+    }
+
+    public synchronized void expectRemoveStream(String label) {
+      expectedRemoveStreamLabels.add(label);
+    }
+
+    @Override
+    public synchronized void onRemoveStream(MediaStream stream) {
+      assertEquals(expectedRemoveStreamLabels.removeFirst(), stream.label());
+      WeakReference<VideoRenderer> renderer = renderers.remove(stream);
+      assertNotNull(renderer);
+      assertNotNull(renderer.get());
+      assertEquals(1, stream.videoTracks.size());
+      stream.videoTracks.get(0).removeRenderer(renderer.get());
+    }
+
+    public synchronized void expectDataChannel(String label) {
+      expectedRemoteDataChannelLabels.add(label);
+    }
+
+    @Override
+    public synchronized void onDataChannel(DataChannel remoteDataChannel) {
+      assertEquals(expectedRemoteDataChannelLabels.removeFirst(),
+                   remoteDataChannel.label());
+      setDataChannel(remoteDataChannel);
+      assertEquals(DataChannel.State.CONNECTING, dataChannel.state());
+    }
+
+    public synchronized void expectRenegotiationNeeded() {
+      ++expectedRenegotiations;
+    }
+
+    @Override
+    public synchronized void onRenegotiationNeeded() {
+      assertTrue(--expectedRenegotiations >= 0);
+    }
+
+    public synchronized void expectMessage(ByteBuffer expectedBuffer,
+                                           boolean expectedBinary) {
+      expectedBuffers.add(
+          new DataChannel.Buffer(expectedBuffer, expectedBinary));
+    }
+
+    @Override
+    public synchronized void onMessage(DataChannel.Buffer buffer) {
+      DataChannel.Buffer expected = expectedBuffers.removeFirst();
+      assertEquals(expected.binary, buffer.binary);
+      assertTrue(expected.data.equals(buffer.data));
+    }
+
+    @Override
+    public synchronized void onBufferedAmountChange(long previousAmount) {
+      assertFalse(previousAmount == dataChannel.bufferedAmount());
+    }
+
+    @Override
+    public synchronized void onStateChange() {
+      assertEquals(expectedStateChanges.removeFirst(), dataChannel.state());
+    }
+
+    public synchronized void expectStateChange(DataChannel.State state) {
+      expectedStateChanges.add(state);
+    }
+
+    @Override
+    public synchronized void onComplete(StatsReport[] reports) {
+      if (--expectedStatsCallbacks < 0) {
+        throw new RuntimeException("Unexpected stats report: " + reports);
+      }
+      gotStatsReports.add(reports);
+    }
+
+    public synchronized void expectStatsCallback() {
+      ++expectedStatsCallbacks;
+    }
+
+    public synchronized LinkedList<StatsReport[]> takeStatsReports() {
+      LinkedList<StatsReport[]> got = gotStatsReports;
+      gotStatsReports = new LinkedList<StatsReport[]>();
+      return got;
+    }
+
+    // Return a set of expectations that haven't been satisfied yet, possibly
+    // empty if no such expectations exist.
+    public synchronized TreeSet<String> unsatisfiedExpectations() {
+      TreeSet<String> stillWaitingForExpectations = new TreeSet<String>();
+      if (expectedIceCandidates > 0) {  // See comment in onIceCandidate.
+        stillWaitingForExpectations.add("expectedIceCandidates");
+      }
+      if (expectedErrors != 0) {
+        stillWaitingForExpectations.add("expectedErrors: " + expectedErrors);
+      }
+      if (expectedSignalingChanges.size() != 0) {
+        stillWaitingForExpectations.add(
+            "expectedSignalingChanges: " + expectedSignalingChanges.size());
+      }
+      if (expectedIceConnectionChanges.size() != 0) {
+        stillWaitingForExpectations.add("expectedIceConnectionChanges: " +
+                                        expectedIceConnectionChanges.size());
+      }
+      if (expectedIceGatheringChanges.size() != 0) {
+        stillWaitingForExpectations.add("expectedIceGatheringChanges: " +
+                                        expectedIceGatheringChanges.size());
+      }
+      if (expectedAddStreamLabels.size() != 0) {
+        stillWaitingForExpectations.add(
+            "expectedAddStreamLabels: " + expectedAddStreamLabels.size());
+      }
+      if (expectedRemoveStreamLabels.size() != 0) {
+        stillWaitingForExpectations.add(
+            "expectedRemoveStreamLabels: " + expectedRemoveStreamLabels.size());
+      }
+      if (expectedFramesDelivered > 0) {
+        stillWaitingForExpectations.add(
+            "expectedFramesDelivered: " + expectedFramesDelivered);
+      }
+      if (!expectedBuffers.isEmpty()) {
+        stillWaitingForExpectations.add(
+            "expectedBuffers: " + expectedBuffers.size());
+      }
+      if (!expectedStateChanges.isEmpty()) {
+        stillWaitingForExpectations.add(
+            "expectedStateChanges: " + expectedStateChanges.size());
+      }
+      if (!expectedRemoteDataChannelLabels.isEmpty()) {
+        stillWaitingForExpectations.add("expectedRemoteDataChannelLabels: " +
+                                        expectedRemoteDataChannelLabels.size());
+      }
+      if (expectedStatsCallbacks != 0) {
+        stillWaitingForExpectations.add(
+            "expectedStatsCallbacks: " + expectedStatsCallbacks);
+      }
+      return stillWaitingForExpectations;
+    }
+
+    public boolean waitForAllExpectationsToBeSatisfied(int timeoutSeconds) {
+      // TODO(fischman): problems with this approach:
+      // - come up with something better than a poll loop
+      // - avoid serializing expectations explicitly; the test is not as robust
+      //   as it could be because it must place expectations between wait
+      //   statements very precisely (e.g. frame must not arrive before its
+      //   expectation, and expectation must not be registered so early as to
+      //   stall a wait).  Use callbacks to fire off dependent steps instead of
+      //   explicitly waiting, so there can be just a single wait at the end of
+      //   the test.
+      long endTime = System.currentTimeMillis() + 1000 * timeoutSeconds;
+      TreeSet<String> prev = null;
+      TreeSet<String> stillWaitingForExpectations = unsatisfiedExpectations();
+      while (!stillWaitingForExpectations.isEmpty()) {
+        if (!stillWaitingForExpectations.equals(prev)) {
+          System.out.println(
+              name + " still waiting at\n    " +
+              (new Throwable()).getStackTrace()[1] +
+              "\n    for: " +
+              Arrays.toString(stillWaitingForExpectations.toArray()));
+        }
+        if (endTime < System.currentTimeMillis()) {
+          System.out.println(name + " timed out waiting for: "
+              + Arrays.toString(stillWaitingForExpectations.toArray()));
+          return false;
+        }
+        try {
+          Thread.sleep(10);
+        } catch (InterruptedException e) {
+          throw new RuntimeException(e);
+        }
+        prev = stillWaitingForExpectations;
+        stillWaitingForExpectations = unsatisfiedExpectations();
+      }
+      if (prev == null) {
+        System.out.println(name + " didn't need to wait at\n    " +
+                           (new Throwable()).getStackTrace()[1]);
+      }
+      return true;
+    }
+
+    // This methods return a list of all currently gathered ice candidates or waits until
+    // 1 candidate have been gathered.
+    public List<IceCandidate> getAtLeastOneIceCandidate() throws InterruptedException {
+      synchronized (gotIceCandidates) {
+        while (gotIceCandidates.isEmpty()) {
+          gotIceCandidates.wait();
+        }
+        return new LinkedList<IceCandidate>(gotIceCandidates);
+      }
+    }
+  }
+
+  private static class SdpObserverLatch implements SdpObserver {
+    private boolean success = false;
+    private SessionDescription sdp = null;
+    private String error = null;
+    private CountDownLatch latch = new CountDownLatch(1);
+
+    public SdpObserverLatch() {}
+
+    @Override
+    public void onCreateSuccess(SessionDescription sdp) {
+      this.sdp = sdp;
+      onSetSuccess();
+    }
+
+    @Override
+    public void onSetSuccess() {
+      success = true;
+      latch.countDown();
+    }
+
+    @Override
+    public void onCreateFailure(String error) {
+      onSetFailure(error);
+    }
+
+    @Override
+    public void onSetFailure(String error) {
+      this.error = error;
+      latch.countDown();
+    }
+
+    public boolean await() {
+      try {
+        assertTrue(latch.await(1000, TimeUnit.MILLISECONDS));
+        return getSuccess();
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+
+    public boolean getSuccess() {
+      return success;
+    }
+
+    public SessionDescription getSdp() {
+      return sdp;
+    }
+
+    public String getError() {
+      return error;
+    }
+  }
+
+  static int videoWindowsMapped = -1;
+
+  private static VideoRenderer createVideoRenderer(
+      VideoRenderer.Callbacks videoCallbacks) {
+    return new VideoRenderer(videoCallbacks);
+  }
+
+  // Return a weak reference to test that ownership is correctly held by
+  // PeerConnection, not by test code.
+  private static WeakReference<MediaStream> addTracksToPC(
+      PeerConnectionFactory factory, PeerConnection pc,
+      VideoSource videoSource,
+      String streamLabel, String videoTrackId, String audioTrackId,
+      VideoRenderer.Callbacks videoCallbacks) {
+    MediaStream lMS = factory.createLocalMediaStream(streamLabel);
+    VideoTrack videoTrack =
+        factory.createVideoTrack(videoTrackId, videoSource);
+    assertNotNull(videoTrack);
+    VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks);
+    assertNotNull(videoRenderer);
+    videoTrack.addRenderer(videoRenderer);
+    lMS.addTrack(videoTrack);
+    // Just for fun, let's remove and re-add the track.
+    lMS.removeTrack(videoTrack);
+    lMS.addTrack(videoTrack);
+    lMS.addTrack(factory.createAudioTrack(
+        audioTrackId, factory.createAudioSource(new MediaConstraints())));
+    pc.addStream(lMS);
+    return new WeakReference<MediaStream>(lMS);
+  }
+
+  // Used for making sure thread handles are not leaked.
+  // Call initializeThreadCheck before a test and finalizeThreadCheck after
+  // a test.
+  void initializeThreadCheck() {
+    System.gc();  // Encourage any GC-related threads to start up.
+    threadsBeforeTest = allThreads();
+  }
+
+  void finalizeThreadCheck() throws Exception {
+    // TreeSet<String> threadsAfterTest = allThreads();
+
+    // TODO(tommi): Figure out a more reliable way to do this test.  As is
+    // we're seeing three possible 'normal' situations:
+    // 1.  before and after sets are equal.
+    // 2.  before contains 3 threads that do not exist in after.
+    // 3.  after contains 3 threads that do not exist in before.
+    //
+    // Maybe it would be better to do the thread enumeration from C++ and get
+    // the thread names as well, in order to determine what these 3 threads are.
+
+    // assertEquals(threadsBeforeTest, threadsAfterTest);
+    // Thread.sleep(100);
+  }
+
+  @MediumTest
+  public void testCompleteSession() throws Exception {
+    // Allow loopback interfaces too since our Android devices often don't
+    // have those.
+    PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
+    options.networkIgnoreMask = 0;
+    PeerConnectionFactory factory = new PeerConnectionFactory(options);
+    // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
+    // NOTE: this _must_ happen while |factory| is alive!
+    // Logging.enableTracing(
+    //     "/tmp/PeerConnectionTest-log.txt",
+    //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
+    //     Logging.Severity.LS_SENSITIVE);
+
+    MediaConstraints pcConstraints = new MediaConstraints();
+    pcConstraints.mandatory.add(
+        new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
+
+    LinkedList<PeerConnection.IceServer> iceServers =
+        new LinkedList<PeerConnection.IceServer>();
+    iceServers.add(new PeerConnection.IceServer(
+        "stun:stun.l.google.com:19302"));
+    iceServers.add(new PeerConnection.IceServer(
+        "turn:fake.example.com", "fakeUsername", "fakePassword"));
+    ObserverExpectations offeringExpectations =
+        new ObserverExpectations("PCTest:offerer");
+    PeerConnection offeringPC = factory.createPeerConnection(
+        iceServers, pcConstraints, offeringExpectations);
+    assertNotNull(offeringPC);
+
+    ObserverExpectations answeringExpectations =
+        new ObserverExpectations("PCTest:answerer");
+    PeerConnection answeringPC = factory.createPeerConnection(
+        iceServers, pcConstraints, answeringExpectations);
+    assertNotNull(answeringPC);
+
+    // We want to use the same camera for offerer & answerer, so create it here
+    // instead of in addTracksToPC.
+    VideoSource videoSource = factory.createVideoSource(
+        VideoCapturerAndroid.create("", null), new MediaConstraints());
+
+    offeringExpectations.expectRenegotiationNeeded();
+    WeakReference<MediaStream> oLMS = addTracksToPC(
+        factory, offeringPC, videoSource, "offeredMediaStream",
+        "offeredVideoTrack", "offeredAudioTrack", offeringExpectations);
+
+    offeringExpectations.expectRenegotiationNeeded();
+    DataChannel offeringDC = offeringPC.createDataChannel(
+        "offeringDC", new DataChannel.Init());
+    assertEquals("offeringDC", offeringDC.label());
+
+    offeringExpectations.setDataChannel(offeringDC);
+    SdpObserverLatch sdpLatch = new SdpObserverLatch();
+    offeringPC.createOffer(sdpLatch, new MediaConstraints());
+    assertTrue(sdpLatch.await());
+    SessionDescription offerSdp = sdpLatch.getSdp();
+    assertEquals(offerSdp.type, SessionDescription.Type.OFFER);
+    assertFalse(offerSdp.description.isEmpty());
+
+    sdpLatch = new SdpObserverLatch();
+    answeringExpectations.expectSignalingChange(
+        SignalingState.HAVE_REMOTE_OFFER);
+    answeringExpectations.expectAddStream("offeredMediaStream");
+    // SCTP DataChannels are announced via OPEN messages over the established
+    // connection (not via SDP), so answeringExpectations can only register
+    // expecting the channel during ICE, below.
+    answeringPC.setRemoteDescription(sdpLatch, offerSdp);
+    assertEquals(
+        PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+    assertTrue(sdpLatch.await());
+    assertNull(sdpLatch.getSdp());
+
+    answeringExpectations.expectRenegotiationNeeded();
+    WeakReference<MediaStream> aLMS = addTracksToPC(
+        factory, answeringPC, videoSource, "answeredMediaStream",
+        "answeredVideoTrack", "answeredAudioTrack", answeringExpectations);
+
+    sdpLatch = new SdpObserverLatch();
+    answeringPC.createAnswer(sdpLatch, new MediaConstraints());
+    assertTrue(sdpLatch.await());
+    SessionDescription answerSdp = sdpLatch.getSdp();
+    assertEquals(answerSdp.type, SessionDescription.Type.ANSWER);
+    assertFalse(answerSdp.description.isEmpty());
+
+    offeringExpectations.expectIceCandidates(2);
+    answeringExpectations.expectIceCandidates(2);
+
+    offeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+    answeringExpectations.expectIceGatheringChange(IceGatheringState.COMPLETE);
+
+    sdpLatch = new SdpObserverLatch();
+    answeringExpectations.expectSignalingChange(SignalingState.STABLE);
+    answeringPC.setLocalDescription(sdpLatch, answerSdp);
+    assertTrue(sdpLatch.await());
+    assertNull(sdpLatch.getSdp());
+
+    sdpLatch = new SdpObserverLatch();
+    offeringExpectations.expectSignalingChange(SignalingState.HAVE_LOCAL_OFFER);
+    offeringPC.setLocalDescription(sdpLatch, offerSdp);
+    assertTrue(sdpLatch.await());
+    assertNull(sdpLatch.getSdp());
+    sdpLatch = new SdpObserverLatch();
+    offeringExpectations.expectSignalingChange(SignalingState.STABLE);
+    offeringExpectations.expectAddStream("answeredMediaStream");
+
+    offeringExpectations.expectIceConnectionChange(
+        IceConnectionState.CHECKING);
+    offeringExpectations.expectIceConnectionChange(
+        IceConnectionState.CONNECTED);
+    // TODO(bemasc): uncomment once delivery of ICECompleted is reliable
+    // (https://code.google.com/p/webrtc/issues/detail?id=3021).
+    //
+    // offeringExpectations.expectIceConnectionChange(
+    //     IceConnectionState.COMPLETED);
+    answeringExpectations.expectIceConnectionChange(
+        IceConnectionState.CHECKING);
+    answeringExpectations.expectIceConnectionChange(
+        IceConnectionState.CONNECTED);
+
+    offeringPC.setRemoteDescription(sdpLatch, answerSdp);
+    assertTrue(sdpLatch.await());
+    assertNull(sdpLatch.getSdp());
+
+    assertEquals(offeringPC.getLocalDescription().type, offerSdp.type);
+    assertEquals(offeringPC.getRemoteDescription().type, answerSdp.type);
+    assertEquals(answeringPC.getLocalDescription().type, answerSdp.type);
+    assertEquals(answeringPC.getRemoteDescription().type, offerSdp.type);
+
+    assertEquals(offeringPC.getSenders().size(), 2);
+    assertEquals(offeringPC.getReceivers().size(), 2);
+    assertEquals(answeringPC.getSenders().size(), 2);
+    assertEquals(answeringPC.getReceivers().size(), 2);
+
+
+    // Wait for at least some frames to be delivered at each end (number
+    // chosen arbitrarily).
+    offeringExpectations.expectFramesDelivered(10);
+    answeringExpectations.expectFramesDelivered(10);
+
+    offeringExpectations.expectStateChange(DataChannel.State.OPEN);
+    // See commentary about SCTP DataChannels above for why this is here.
+    answeringExpectations.expectDataChannel("offeringDC");
+    answeringExpectations.expectStateChange(DataChannel.State.OPEN);
+
+    // Wait for at least one ice candidate from the offering PC and forward them to the answering
+    // PC.
+    for (IceCandidate candidate : offeringExpectations.getAtLeastOneIceCandidate()) {
+      answeringPC.addIceCandidate(candidate);
+    }
+
+    // Wait for at least one ice candidate from the answering PC and forward them to the offering
+    // PC.
+    for (IceCandidate candidate : answeringExpectations.getAtLeastOneIceCandidate()) {
+      offeringPC.addIceCandidate(candidate);
+    }
+
+    assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+    assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+    assertEquals(
+        PeerConnection.SignalingState.STABLE, offeringPC.signalingState());
+    assertEquals(
+        PeerConnection.SignalingState.STABLE, answeringPC.signalingState());
+
+    // Test send & receive UTF-8 text.
+    answeringExpectations.expectMessage(
+        ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+    DataChannel.Buffer buffer = new DataChannel.Buffer(
+        ByteBuffer.wrap("hello!".getBytes(Charset.forName("UTF-8"))), false);
+    assertTrue(offeringExpectations.dataChannel.send(buffer));
+    assertTrue(answeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+    // Construct this binary message two different ways to ensure no
+    // shortcuts are taken.
+    ByteBuffer expectedBinaryMessage = ByteBuffer.allocateDirect(5);
+    for (byte i = 1; i < 6; ++i) {
+      expectedBinaryMessage.put(i);
+    }
+    expectedBinaryMessage.flip();
+    offeringExpectations.expectMessage(expectedBinaryMessage, true);
+    assertTrue(answeringExpectations.dataChannel.send(
+        new DataChannel.Buffer(
+            ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }), true)));
+    assertTrue(offeringExpectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+    offeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+    answeringExpectations.expectStateChange(DataChannel.State.CLOSING);
+    offeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+    answeringExpectations.expectStateChange(DataChannel.State.CLOSED);
+    answeringExpectations.dataChannel.close();
+    offeringExpectations.dataChannel.close();
+
+    // TODO(fischman) MOAR test ideas:
+    // - Test that PC.removeStream() works; requires a second
+    //   createOffer/createAnswer dance.
+    // - audit each place that uses |constraints| for specifying non-trivial
+    //   constraints (and ensure they're honored).
+    // - test error cases
+    // - ensure reasonable coverage of _jni.cc is achieved.  Coverage is
+    //   extra-important because of all the free-text (class/method names, etc)
+    //   in JNI-style programming; make sure no typos!
+    // - Test that shutdown mid-interaction is crash-free.
+
+    // Free the Java-land objects, collect them, and sleep a bit to make sure we
+    // don't get late-arrival crashes after the Java-land objects have been
+    // freed.
+    shutdownPC(offeringPC, offeringExpectations);
+    offeringPC = null;
+    shutdownPC(answeringPC, answeringExpectations);
+    answeringPC = null;
+    videoSource.dispose();
+    factory.dispose();
+    System.gc();
+  }
+
+  private static void shutdownPC(
+      PeerConnection pc, ObserverExpectations expectations) {
+    expectations.dataChannel.unregisterObserver();
+    expectations.dataChannel.dispose();
+    expectations.expectStatsCallback();
+    assertTrue(pc.getStats(expectations, null));
+    assertTrue(expectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+    expectations.expectIceConnectionChange(IceConnectionState.CLOSED);
+    expectations.expectSignalingChange(SignalingState.CLOSED);
+    pc.close();
+    assertTrue(expectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+    expectations.expectStatsCallback();
+    assertTrue(pc.getStats(expectations, null));
+    assertTrue(expectations.waitForAllExpectationsToBeSatisfied(TIMEOUT_SECONDS));
+
+    System.out.println("FYI stats: ");
+    int reportIndex = -1;
+    for (StatsReport[] reports : expectations.takeStatsReports()) {
+      System.out.println(" Report #" + (++reportIndex));
+      for (int i = 0; i < reports.length; ++i) {
+        System.out.println("  " + reports[i].toString());
+      }
+    }
+    assertEquals(1, reportIndex);
+    System.out.println("End stats.");
+
+    pc.dispose();
+  }
+
+  // Returns a set of thread IDs belonging to this process, as Strings.
+  private static TreeSet<String> allThreads() {
+    TreeSet<String> threads = new TreeSet<String>();
+    // This pokes at /proc instead of using the Java APIs because we're also
+    // looking for libjingle/webrtc native threads, most of which won't have
+    // attached to the JVM.
+    for (String threadId : (new File("/proc/self/task")).list()) {
+      threads.add(threadId);
+    }
+    return threads;
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java b/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java
new file mode 100644
index 0000000..cc73fa5
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/RendererCommonTest.java
@@ -0,0 +1,189 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.test.ActivityTestCase;
+import android.test.MoreAsserts;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import android.graphics.Point;
+
+import static org.webrtc.RendererCommon.ScalingType.*;
+import static org.webrtc.RendererCommon.getDisplaySize;
+import static org.webrtc.RendererCommon.getLayoutMatrix;
+import static org.webrtc.RendererCommon.rotateTextureMatrix;
+
+public final class RendererCommonTest extends ActivityTestCase {
+  @SmallTest
+  static public void testDisplaySizeNoFrame() {
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
+  }
+
+  @SmallTest
+  public static void testDisplaySizeDegenerateAspectRatio() {
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
+  }
+
+  @SmallTest
+  public static void testZeroDisplaySize() {
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
+  }
+
+  @SmallTest
+  public static void testDisplaySizePerfectFit() {
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
+  }
+
+  @SmallTest
+  public static void testLandscapeVideoInPortraitDisplay() {
+    assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
+    assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
+  }
+
+  @SmallTest
+  public static void testPortraitVideoInLandscapeDisplay() {
+    assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
+    assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
+  }
+
+  @SmallTest
+  public static void testFourToThreeVideoInSixteenToNineDisplay() {
+    assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
+  }
+
+  // Only keep 2 rounded decimals to make float comparison robust.
+  private static double[] round(float[] array) {
+    assertEquals(16, array.length);
+    final double[] doubleArray = new double[16];
+    for (int i = 0; i < 16; ++i) {
+      doubleArray[i] = Math.round(100 * array[i]) / 100.0;
+    }
+    return doubleArray;
+  }
+
+  // Brief summary about matrix transformations:
+  // A coordinate p = [u, v, 0, 1] is transformed by matrix m like this p' = [u', v', 0, 1] = m * p.
+  // OpenGL uses column-major order, so:
+  // u' = u * m[0] + v * m[4] + m[12].
+  // v' = u * m[1] + v * m[5] + m[13].
+
+  @SmallTest
+  public static void testLayoutMatrixDefault() {
+    final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
+    // Assert:
+    // u' = u.
+    // v' = v.
+    MoreAsserts.assertEquals(new double[] {
+        1, 0, 0, 0,
+        0, 1, 0, 0,
+        0, 0, 1, 0,
+        0, 0, 0, 1}, round(layoutMatrix));
+  }
+
+  @SmallTest
+  public static void testLayoutMatrixMirror() {
+    final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
+    // Assert:
+    // u' = 1 - u.
+    // v' = v.
+    MoreAsserts.assertEquals(new double[] {
+        -1, 0, 0, 0,
+         0, 1, 0, 0,
+         0, 0, 1, 0,
+         1, 0, 0, 1}, round(layoutMatrix));
+  }
+
+  @SmallTest
+  public static void testLayoutMatrixScale() {
+    // Video has aspect ratio 2, but layout is square. This will cause only the center part of the
+    // video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
+    final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
+    // Assert:
+    // u' = 0.25 + 0.5 u.
+    // v' = v.
+    MoreAsserts.assertEquals(new double[] {
+         0.5, 0, 0, 0,
+           0, 1, 0, 0,
+           0, 0, 1, 0,
+        0.25, 0, 0, 1}, round(layoutMatrix));
+  }
+
+  @SmallTest
+  public static void testRotateTextureMatrixDefault() {
+    // Test that rotation with 0 degrees returns an identical matrix.
+    final float[] matrix = new float[] {
+        1, 2, 3, 4,
+        5, 6, 7, 8,
+        9, 0, 1, 2,
+        3, 4, 5, 6
+    };
+    final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
+    MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix));
+  }
+
+  @SmallTest
+  public static void testRotateTextureMatrix90Deg() {
+    final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 90);
+    // Assert:
+    // u' = 1 - v.
+    // v' = u.
+    MoreAsserts.assertEquals(new double[] {
+         0, 1, 0, 0,
+        -1, 0, 0, 0,
+         0, 0, 1, 0,
+         1, 0, 0, 1}, round(samplingMatrix));
+  }
+
+  @SmallTest
+  public static void testRotateTextureMatrix180Deg() {
+    final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 180);
+    // Assert:
+    // u' = 1 - u.
+    // v' = 1 - v.
+    MoreAsserts.assertEquals(new double[] {
+        -1,  0, 0, 0,
+         0, -1, 0, 0,
+         0,  0, 1, 0,
+         1,  1, 0, 1}, round(samplingMatrix));
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
new file mode 100644
index 0000000..9e0164d
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -0,0 +1,439 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+
+import java.nio.ByteBuffer;
+
+public final class SurfaceTextureHelperTest extends ActivityTestCase {
+  /**
+   * Mock texture listener with blocking wait functionality.
+   */
+  public static final class MockTextureListener
+      implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+    public int oesTextureId;
+    public float[] transformMatrix;
+    private boolean hasNewFrame = false;
+    // Thread where frames are expected to be received on.
+    private final Thread expectedThread;
+
+    MockTextureListener() {
+      this.expectedThread = null;
+    }
+
+    MockTextureListener(Thread expectedThread) {
+      this.expectedThread = expectedThread;
+    }
+
+    @Override
+    public synchronized void onTextureFrameAvailable(
+        int oesTextureId, float[] transformMatrix, long timestampNs) {
+      if (expectedThread != null && Thread.currentThread() != expectedThread) {
+        throw new IllegalStateException("onTextureFrameAvailable called on wrong thread.");
+      }
+      this.oesTextureId = oesTextureId;
+      this.transformMatrix = transformMatrix;
+      hasNewFrame = true;
+      notifyAll();
+    }
+
+    /**
+     * Wait indefinitely for a new frame.
+     */
+    public synchronized void waitForNewFrame() throws InterruptedException {
+      while (!hasNewFrame) {
+        wait();
+      }
+      hasNewFrame = false;
+    }
+
+    /**
+     * Wait for a new frame, or until the specified timeout elapses. Returns true if a new frame was
+     * received before the timeout.
+     */
+    public synchronized boolean waitForNewFrame(final long timeoutMs) throws InterruptedException {
+      final long startTimeMs = SystemClock.elapsedRealtime();
+      long timeRemainingMs = timeoutMs;
+      while (!hasNewFrame && timeRemainingMs > 0) {
+        wait(timeRemainingMs);
+        final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+        timeRemainingMs = timeoutMs - elapsedTimeMs;
+      }
+      final boolean didReceiveFrame = hasNewFrame;
+      hasNewFrame = false;
+      return didReceiveFrame;
+    }
+  }
+
+  /** Assert that two integers are close, with difference at most
+   * {@code threshold}. */
+  public static void assertClose(int threshold, int expected, int actual) {
+    if (Math.abs(expected - actual) <= threshold)
+      return;
+    failNotEquals("Not close enough, threshold " + threshold, expected, actual);
+  }
+
+  /**
+   * Test normal use by receiving three uniform texture frames. Texture frames are returned as early
+   * as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
+   * buffer and reading it back with glReadPixels().
+   */
+  @MediumTest
+  public static void testThreeConstantColorFrames() throws InterruptedException {
+    final int width = 16;
+    final int height = 16;
+    // Create EGL base with a pixel buffer as display output.
+    final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+    eglBase.createPbufferSurface(width, height);
+    final GlRectDrawer drawer = new GlRectDrawer();
+
+    // Create SurfaceTextureHelper and listener.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+    final MockTextureListener listener = new MockTextureListener();
+    surfaceTextureHelper.setListener(listener);
+    surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+    // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
+    // |surfaceTextureHelper| as the target EGLSurface.
+    final EglBase eglOesBase =
+        EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+    assertEquals(eglOesBase.surfaceWidth(), width);
+    assertEquals(eglOesBase.surfaceHeight(), height);
+
+    final int red[] = new int[] {79, 144, 185};
+    final int green[] = new int[] {66, 210, 162};
+    final int blue[] = new int[] {161, 117, 158};
+    // Draw three frames.
+    for (int i = 0; i < 3; ++i) {
+      // Draw a constant color frame onto the SurfaceTexture.
+      eglOesBase.makeCurrent();
+      GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+      eglOesBase.swapBuffers();
+
+      // Wait for an OES texture to arrive and draw it onto the pixel buffer.
+      listener.waitForNewFrame();
+      eglBase.makeCurrent();
+      drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
+
+      surfaceTextureHelper.returnTextureFrame();
+
+      // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g.
+      // Nexus 9.
+      final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+      GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+      GlUtil.checkNoGLES2Error("glReadPixels");
+
+      // Assert rendered image is expected constant color.
+      while (rgbaData.hasRemaining()) {
+        assertEquals(rgbaData.get() & 0xFF, red[i]);
+        assertEquals(rgbaData.get() & 0xFF, green[i]);
+        assertEquals(rgbaData.get() & 0xFF, blue[i]);
+        assertEquals(rgbaData.get() & 0xFF, 255);
+      }
+    }
+
+    drawer.release();
+    surfaceTextureHelper.disconnect();
+    eglBase.release();
+  }
+
+  /**
+   * Test disconnecting the SurfaceTextureHelper while holding a pending texture frame. The pending
+   * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
+   * buffer and reading it back with glReadPixels().
+   */
+  @MediumTest
+  public static void testLateReturnFrame() throws InterruptedException {
+    final int width = 16;
+    final int height = 16;
+    // Create EGL base with a pixel buffer as display output.
+    final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
+    eglBase.createPbufferSurface(width, height);
+
+    // Create SurfaceTextureHelper and listener.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+    final MockTextureListener listener = new MockTextureListener();
+    surfaceTextureHelper.setListener(listener);
+    surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+    // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
+    // |surfaceTextureHelper| as the target EGLSurface.
+    final EglBase eglOesBase =
+        EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
+    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+    assertEquals(eglOesBase.surfaceWidth(), width);
+    assertEquals(eglOesBase.surfaceHeight(), height);
+
+    final int red = 79;
+    final int green = 66;
+    final int blue = 161;
+    // Draw a constant color frame onto the SurfaceTexture.
+    eglOesBase.makeCurrent();
+    GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+    eglOesBase.swapBuffers();
+    eglOesBase.release();
+
+    // Wait for OES texture frame.
+    listener.waitForNewFrame();
+    // Diconnect while holding the frame.
+    surfaceTextureHelper.disconnect();
+
+    // Draw the pending texture frame onto the pixel buffer.
+    eglBase.makeCurrent();
+    final GlRectDrawer drawer = new GlRectDrawer();
+    drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
+    drawer.release();
+
+    // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
+    final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
+    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
+    GlUtil.checkNoGLES2Error("glReadPixels");
+    eglBase.release();
+
+    // Assert rendered image is expected constant color.
+    while (rgbaData.hasRemaining()) {
+      assertEquals(rgbaData.get() & 0xFF, red);
+      assertEquals(rgbaData.get() & 0xFF, green);
+      assertEquals(rgbaData.get() & 0xFF, blue);
+      assertEquals(rgbaData.get() & 0xFF, 255);
+    }
+    // Late frame return after everything has been disconnected and released.
+    surfaceTextureHelper.returnTextureFrame();
+  }
+
+  /**
+   * Test disconnecting the SurfaceTextureHelper, but keep trying to produce more texture frames. No
+   * frames should be delivered to the listener.
+   */
+  @MediumTest
+  public static void testDisconnect() throws InterruptedException {
+    // Create SurfaceTextureHelper and listener.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(null);
+    final MockTextureListener listener = new MockTextureListener();
+    surfaceTextureHelper.setListener(listener);
+    // Create EglBase with the SurfaceTexture as target EGLSurface.
+    final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+    eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+    eglBase.makeCurrent();
+    // Assert no frame has been received yet.
+    assertFalse(listener.waitForNewFrame(1));
+    // Draw and wait for one frame.
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+    eglBase.swapBuffers();
+    listener.waitForNewFrame();
+    surfaceTextureHelper.returnTextureFrame();
+
+    // Disconnect - we should not receive any textures after this.
+    surfaceTextureHelper.disconnect();
+
+    // Draw one frame.
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    eglBase.swapBuffers();
+    // swapBuffers() should not trigger onTextureFrameAvailable() because we are disconnected.
+    // Assert that no OES texture was delivered.
+    assertFalse(listener.waitForNewFrame(500));
+
+    eglBase.release();
+  }
+
+  /**
+   * Test disconnecting the SurfaceTextureHelper immediately after is has been setup to use a
+   * shared context. No frames should be delivered to the listener.
+   */
+  @SmallTest
+  public static void testDisconnectImmediately() {
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(null);
+    surfaceTextureHelper.disconnect();
+  }
+
+  /**
+   * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and
+   * received on a thread separate from the test thread.
+   */
+  @MediumTest
+  public static void testFrameOnSeparateThread() throws InterruptedException {
+    final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread");
+    thread.start();
+    final Handler handler = new Handler(thread.getLooper());
+
+    // Create SurfaceTextureHelper and listener.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(null, handler);
+    // Create a mock listener and expect frames to be delivered on |thread|.
+    final MockTextureListener listener = new MockTextureListener(thread);
+    surfaceTextureHelper.setListener(listener);
+
+    // Create resources for stubbing an OES texture producer. |eglOesBase| has the
+    // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
+    final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+    eglOesBase.makeCurrent();
+    // Draw a frame onto the SurfaceTexture.
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+    eglOesBase.swapBuffers();
+    eglOesBase.release();
+
+    // Wait for an OES texture to arrive.
+    listener.waitForNewFrame();
+
+    // Return the frame from this thread.
+    surfaceTextureHelper.returnTextureFrame();
+    surfaceTextureHelper.disconnect(handler);
+  }
+
+  /**
+   * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and
+   * received on a thread separate from the test thread and returned after disconnect.
+   */
+  @MediumTest
+  public static void testLateReturnFrameOnSeparateThread() throws InterruptedException {
+    final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread");
+    thread.start();
+    final Handler handler = new Handler(thread.getLooper());
+
+    // Create SurfaceTextureHelper and listener.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(null, handler);
+    // Create a mock listener and expect frames to be delivered on |thread|.
+    final MockTextureListener listener = new MockTextureListener(thread);
+    surfaceTextureHelper.setListener(listener);
+
+    // Create resources for stubbing an OES texture producer. |eglOesBase| has the
+    // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
+    final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+    eglOesBase.makeCurrent();
+    // Draw a frame onto the SurfaceTexture.
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+    eglOesBase.swapBuffers();
+    eglOesBase.release();
+
+    // Wait for an OES texture to arrive.
+    listener.waitForNewFrame();
+
+    surfaceTextureHelper.disconnect(handler);
+
+    surfaceTextureHelper.returnTextureFrame();
+  }
+
+  @MediumTest
+  public static void testTexturetoYUV() throws InterruptedException {
+    final int width = 16;
+    final int height = 16;
+
+    final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
+
+    // Create SurfaceTextureHelper and listener.
+    final SurfaceTextureHelper surfaceTextureHelper =
+        SurfaceTextureHelper.create(eglBase.getEglBaseContext());
+    final MockTextureListener listener = new MockTextureListener();
+    surfaceTextureHelper.setListener(listener);
+    surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
+
+    // Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
+    // |surfaceTextureHelper| as the target EGLSurface.
+
+    eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
+    assertEquals(eglBase.surfaceWidth(), width);
+    assertEquals(eglBase.surfaceHeight(), height);
+
+    final int red[] = new int[] {79, 144, 185};
+    final int green[] = new int[] {66, 210, 162};
+    final int blue[] = new int[] {161, 117, 158};
+
+    final int ref_y[] = new int[] {81, 180, 168};
+    final int ref_u[] = new int[] {173, 93, 122};
+    final int ref_v[] = new int[] {127, 103, 140};
+
+    // Draw three frames.
+    for (int i = 0; i < 3; ++i) {
+      // Draw a constant color frame onto the SurfaceTexture.
+      eglBase.makeCurrent();
+      GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      // swapBuffers() will ultimately trigger onTextureFrameAvailable().
+      eglBase.swapBuffers();
+
+      // Wait for an OES texture to arrive.
+      listener.waitForNewFrame();
+
+      // Memory layout: Lines are 16 bytes. First 16 lines are
+      // the Y data. These are followed by 8 lines with 8 bytes of U
+      // data on the left and 8 bytes of V data on the right.
+      //
+      // Offset
+      //      0 YYYYYYYY YYYYYYYY
+      //     16 YYYYYYYY YYYYYYYY
+      //    ...
+      //    240 YYYYYYYY YYYYYYYY
+      //    256 UUUUUUUU VVVVVVVV
+      //    272 UUUUUUUU VVVVVVVV
+      //    ...
+      //    368 UUUUUUUU VVVVVVVV
+      //    384 buffer end
+      ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
+      surfaceTextureHelper.textureToYUV(buffer, width, height, width,
+          listener.oesTextureId, listener.transformMatrix);
+
+      surfaceTextureHelper.returnTextureFrame();
+
+      // Allow off-by-one differences due to different rounding.
+      while (buffer.position() < width*height) {
+        assertClose(1, buffer.get() & 0xff, ref_y[i]);
+      }
+      while (buffer.hasRemaining()) {
+        if (buffer.position() % width < width/2)
+          assertClose(1, buffer.get() & 0xff, ref_u[i]);
+        else
+          assertClose(1, buffer.get() & 0xff, ref_v[i]);
+      }
+    }
+
+    surfaceTextureHelper.disconnect();
+    eglBase.release();
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java b/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
new file mode 100644
index 0000000..341c632
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java
@@ -0,0 +1,215 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.view.View.MeasureSpec;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+
+public final class SurfaceViewRendererOnMeasureTest extends ActivityTestCase {
+  /**
+   * List with all possible scaling types.
+   */
+  private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
+      RendererCommon.ScalingType.SCALE_ASPECT_FIT,
+      RendererCommon.ScalingType.SCALE_ASPECT_FILL,
+      RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
+
+  /**
+   * List with MeasureSpec modes.
+   */
+  private static final List<Integer> measureSpecModes =
+      Arrays.asList(MeasureSpec.EXACTLY, MeasureSpec.AT_MOST);
+
+  /**
+   * Returns a dummy YUV frame.
+   */
+  static VideoRenderer.I420Frame createFrame(int width, int height, int rotationDegree) {
+    final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
+    final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
+    final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
+    for (int i = 0; i < 3; ++i) {
+      yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
+    }
+    return new VideoRenderer.I420Frame(width, height, rotationDegree, yuvStrides, yuvPlanes, 0);
+  }
+
+  /**
+   * Assert onMeasure() with given parameters will result in expected measured size.
+   */
+  private static void assertMeasuredSize(
+      SurfaceViewRenderer surfaceViewRenderer, RendererCommon.ScalingType scalingType,
+      String frameDimensions,
+      int expectedWidth, int expectedHeight,
+      int widthSpec, int heightSpec) {
+    surfaceViewRenderer.setScalingType(scalingType);
+    surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
+    final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
+    final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
+    if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
+      fail("onMeasure("
+          + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec) + ")"
+          + " with scaling type " + scalingType
+          + " and frame: " + frameDimensions
+          + " expected measured size " + expectedWidth + "x" + expectedHeight
+          + ", but was " + measuredWidth + "x" + measuredHeight);
+    }
+  }
+
+  /**
+   * Test how SurfaceViewRenderer.onMeasure() behaves when no frame has been delivered.
+   */
+  @MediumTest
+  public void testNoFrame() {
+    final SurfaceViewRenderer surfaceViewRenderer =
+        new SurfaceViewRenderer(getInstrumentation().getContext());
+    final String frameDimensions = "null";
+
+    // Test behaviour before SurfaceViewRenderer.init() is called.
+    for (RendererCommon.ScalingType scalingType : scalingTypes) {
+      for (int measureSpecMode : measureSpecModes) {
+        final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+            0, 0, zeroMeasureSize, zeroMeasureSize);
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+            1280, 720,
+            MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+            MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+      }
+    }
+
+   // Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
+    surfaceViewRenderer.init((EglBase.Context) null, null);
+    for (RendererCommon.ScalingType scalingType : scalingTypes) {
+      for (int measureSpecMode : measureSpecModes) {
+        final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+            0, 0, zeroMeasureSize, zeroMeasureSize);
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+            1280, 720,
+            MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
+            MeasureSpec.makeMeasureSpec(720, measureSpecMode));
+      }
+    }
+
+    surfaceViewRenderer.release();
+  }
+
+  /**
+   * Test how SurfaceViewRenderer.onMeasure() behaves with a 1280x720 frame.
+   */
+  @MediumTest
+  public void testFrame1280x720() {
+    final SurfaceViewRenderer surfaceViewRenderer =
+        new SurfaceViewRenderer(getInstrumentation().getContext());
+    surfaceViewRenderer.init((EglBase.Context) null, null);
+
+    // Test different rotation degress, but same rotated size.
+    for (int rotationDegree : new int[] {0, 90, 180, 270}) {
+      final int rotatedWidth = 1280;
+      final int rotatedHeight = 720;
+      final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
+      final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
+      final VideoRenderer.I420Frame frame =
+          createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
+      assertEquals(rotatedWidth, frame.rotatedWidth());
+      assertEquals(rotatedHeight, frame.rotatedHeight());
+      final String frameDimensions =
+          unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
+      surfaceViewRenderer.renderFrame(frame);
+
+      // Test forcing to zero size.
+      for (RendererCommon.ScalingType scalingType : scalingTypes) {
+        for (int measureSpecMode : measureSpecModes) {
+          final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+              0, 0, zeroMeasureSize, zeroMeasureSize);
+        }
+      }
+
+      // Test perfect fit.
+      for (RendererCommon.ScalingType scalingType : scalingTypes) {
+        for (int measureSpecMode : measureSpecModes) {
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+              rotatedWidth, rotatedHeight,
+              MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
+              MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
+        }
+      }
+
+      // Force spec size with different aspect ratio than frame aspect ratio.
+      for (RendererCommon.ScalingType scalingType : scalingTypes) {
+        assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+            720, 1280,
+            MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
+            MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
+      }
+
+      final float videoAspectRatio = (float) rotatedWidth / rotatedHeight;
+      {
+        // Relax both width and height constraints.
+        final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+        final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+        for (RendererCommon.ScalingType scalingType : scalingTypes) {
+          final Point expectedSize =
+              RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+                  expectedSize.x, expectedSize.y, widthSpec, heightSpec);
+        }
+      }
+      {
+        // Force width to 720, but relax height constraint. This will give the same result as
+        // above, because width is already the limiting factor and will be maxed out.
+        final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY);
+        final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
+        for (RendererCommon.ScalingType scalingType : scalingTypes) {
+          final Point expectedSize =
+              RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+                  expectedSize.x, expectedSize.y, widthSpec, heightSpec);
+        }
+      }
+      {
+        // Force height, but relax width constraint. This will force a bad layout size.
+        final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
+        final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
+        for (RendererCommon.ScalingType scalingType : scalingTypes) {
+          assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions,
+                  720, 1280, widthSpec, heightSpec);
+        }
+      }
+    }
+
+    surfaceViewRenderer.release();
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
new file mode 100644
index 0000000..1b97201
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -0,0 +1,340 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.MediumTest;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Size;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.HashSet;
+import java.util.Set;
+
+@SuppressWarnings("deprecation")
+public class VideoCapturerAndroidTest extends ActivityTestCase {
+  static final String TAG = "VideoCapturerAndroidTest";
+
+  @Override
+  protected void setUp() {
+    assertTrue(PeerConnectionFactory.initializeAndroidGlobals(
+        getInstrumentation().getContext(), true, true, true));
+  }
+
+  @SmallTest
+  // Test that enumerating formats using android.hardware.camera2 will give the same formats as
+  // android.hardware.camera in the range 320x240 to 1280x720. Often the camera2 API may contain
+  // some high resolutions that are not supported in camera1, but it may also be the other way
+  // around in some cases. Supported framerates may also differ, so don't compare those.
+  public void testCamera2Enumerator() {
+    if (!Camera2Enumerator.isSupported()) {
+      return;
+    }
+    final CameraEnumerationAndroid.Enumerator camera1Enumerator = new CameraEnumerator();
+    final CameraEnumerationAndroid.Enumerator camera2Enumerator =
+        new Camera2Enumerator(getInstrumentation().getContext());
+
+    for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
+      final Set<Size> resolutions1 = new HashSet<Size>();
+      for (CaptureFormat format : camera1Enumerator.getSupportedFormats(i)) {
+        resolutions1.add(new Size(format.width, format.height));
+      }
+      final Set<Size> resolutions2 = new HashSet<Size>();
+      for (CaptureFormat format : camera2Enumerator.getSupportedFormats(i)) {
+        resolutions2.add(new Size(format.width, format.height));
+      }
+      for (Size size : resolutions1) {
+        if (size.getWidth() >= 320 && size.getHeight() >= 240
+            && size.getWidth() <= 1280 && size.getHeight() <= 720) {
+          assertTrue(resolutions2.contains(size));
+        }
+      }
+    }
+  }
+
+  @SmallTest
+  public void testCreateAndRelease() {
+    VideoCapturerAndroidTestFixtures.release(VideoCapturerAndroid.create("", null));
+  }
+
+  @SmallTest
+  public void testCreateAndReleaseUsingTextures() {
+    EglBase eglBase = EglBase.create();
+    VideoCapturerAndroidTestFixtures.release(
+        VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext()));
+    eglBase.release();
+  }
+
+  @SmallTest
+  public void testCreateNonExistingCamera() {
+    VideoCapturerAndroid capturer = VideoCapturerAndroid.create(
+        "non-existing camera", null);
+    assertNull(capturer);
+  }
+
+  @SmallTest
+  // This test that the camera can be started and that the frames are forwarded
+  // to a Java video renderer using a "default" capturer.
+  // It tests both the Java and the C++ layer.
+  public void testStartVideoCapturer() throws InterruptedException {
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create("", null);
+    VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+  }
+
+  @SmallTest
+  public void testStartVideoCapturerUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+    eglBase.release();
+  }
+
+  @SmallTest
+  // This test that the camera can be started and that the frames are forwarded
+  // to a Java video renderer using the front facing video capturer.
+  // It tests both the Java and the C++ layer.
+  public void testStartFrontFacingVideoCapturer() throws InterruptedException {
+    String deviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice();
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+  }
+
+  @SmallTest
+  // This test that the camera can be started and that the frames are forwarded
+  // to a Java video renderer using the back facing video capturer.
+  // It tests both the Java and the C++ layer.
+  public void testStartBackFacingVideoCapturer() throws InterruptedException {
+    if (!VideoCapturerAndroidTestFixtures.HaveTwoCameras()) {
+      return;
+    }
+
+    String deviceName = CameraEnumerationAndroid.getNameOfBackFacingDevice();
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
+  }
+
+  @SmallTest
+  // This test that the default camera can be started and that the camera can
+  // later be switched to another camera.
+  // It tests both the Java and the C++ layer.
+  public void testSwitchVideoCapturer() throws InterruptedException {
+    VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+    VideoCapturerAndroidTestFixtures.switchCamera(capturer);
+  }
+
+  @SmallTest
+  public void testSwitchVideoCapturerUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.switchCamera(capturer);
+    eglBase.release();
+  }
+
+  @MediumTest
+  public void testCameraEvents() throws InterruptedException {
+    VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
+        VideoCapturerAndroidTestFixtures.createCameraEvents();
+    VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents);
+    VideoCapturerAndroidTestFixtures.cameraEventsInvoked(
+        capturer, cameraEvents, getInstrumentation().getContext());
+  }
+
+  @MediumTest
+  // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
+  public void testCameraCallsAfterStop() throws InterruptedException {
+    final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null);
+
+    VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
+        getInstrumentation().getContext());
+  }
+
+  @MediumTest
+  public void testCameraCallsAfterStopUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null,
+        eglBase.getEglBaseContext());
+
+    VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
+        getInstrumentation().getContext());
+    eglBase.release();
+  }
+
+  @SmallTest
+  // This test that the VideoSource that the VideoCapturer is connected to can
+  // be stopped and restarted. It tests both the Java and the C++ layer.
+  public void testStopRestartVideoSource() throws InterruptedException {
+    VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+    VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
+  }
+
+  @SmallTest
+  public void testStopRestartVideoSourceUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
+    eglBase.release();
+  }
+
+  @SmallTest
+  // This test that the camera can be started at different resolutions.
+  // It does not test or use the C++ layer.
+  public void testStartStopWithDifferentResolutions() throws InterruptedException {
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
+        getInstrumentation().getContext());
+  }
+
+  @SmallTest
+  public void testStartStopWithDifferentResolutionsUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
+        getInstrumentation().getContext());
+    eglBase.release();
+  }
+
+  @SmallTest
+  // This test that an error is reported if the camera is already opened
+  // when VideoCapturerAndroid is started.
+  public void testStartWhileCameraAlreadyOpened() throws InterruptedException {
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpen(
+        capturer, getInstrumentation().getContext());
+  }
+
+  @SmallTest
+  // This test that VideoCapturerAndroid can be started, even if the camera is already opened
+  // if the camera is closed while VideoCapturerAndroid is re-trying to start.
+  public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpenAndCloseCamera(
+        capturer, getInstrumentation().getContext());
+  }
+
+  @SmallTest
+  // This test that VideoCapturerAndroid.stop can be called while VideoCapturerAndroid is
+  // re-trying to start.
+  public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.startWhileCameraIsAlreadyOpenAndStop(
+        capturer, getInstrumentation().getContext());
+  }
+
+
+
+  @SmallTest
+  // This test what happens if buffers are returned after the capturer have
+  // been stopped and restarted. It does not test or use the C++ layer.
+  public void testReturnBufferLate() throws InterruptedException {
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null);
+    VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
+        getInstrumentation().getContext());
+  }
+
+  @SmallTest
+  public void testReturnBufferLateUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    String deviceName = CameraEnumerationAndroid.getDeviceName(0);
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
+        getInstrumentation().getContext());
+    eglBase.release();
+  }
+
+  @MediumTest
+  // This test that we can capture frames, keep the frames in a local renderer, stop capturing,
+  // and then return the frames. The difference between the test testReturnBufferLate() is that we
+  // also test the JNI and C++ AndroidVideoCapturer parts.
+  public void testReturnBufferLateEndToEnd() throws InterruptedException {
+    final VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+    VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
+  }
+
+  @MediumTest
+  public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    final VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
+    eglBase.release();
+  }
+
+  @MediumTest
+  // This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
+  // the capturer.
+  public void testCameraFreezedEventOnBufferStarvationUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
+        VideoCapturerAndroidTestFixtures.createCameraEvents();
+    VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents,
+        eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvationUsingTextures(capturer,
+        cameraEvents, getInstrumentation().getContext());
+    eglBase.release();
+  }
+
+  @MediumTest
+  // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+  // called. This test both Java and C++ parts of of the stack.
+  public void testScaleCameraOutput() throws InterruptedException {
+    VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+    VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+  }
+
+  @MediumTest
+  // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+  // called. This test both Java and C++ parts of of the stack.
+  public void testScaleCameraOutputUsingTextures() throws InterruptedException {
+    EglBase eglBase = EglBase.create();
+    VideoCapturerAndroid capturer =
+        VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
+    VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+    eglBase.release();
+  }
+}
diff --git a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
new file mode 100644
index 0000000..0b42e33
--- /dev/null
+++ b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -0,0 +1,615 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import android.content.Context;
+import android.hardware.Camera;
+
+import org.webrtc.VideoCapturerAndroidTestFixtures;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.VideoRenderer.I420Frame;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+
+import static junit.framework.Assert.*;
+
+public class VideoCapturerAndroidTestFixtures {
+  static class RendererCallbacks implements VideoRenderer.Callbacks {
+    private int framesRendered = 0;
+    private Object frameLock = 0;
+    private int width = 0;
+    private int height = 0;
+
+    @Override
+    public void renderFrame(I420Frame frame) {
+      synchronized (frameLock) {
+        ++framesRendered;
+        width = frame.rotatedWidth();
+        height = frame.rotatedHeight();
+        frameLock.notify();
+      }
+      VideoRenderer.renderFrameDone(frame);
+    }
+
+    public int frameWidth() {
+      synchronized (frameLock) {
+        return width;
+      }
+    }
+
+    public int frameHeight() {
+      synchronized (frameLock) {
+        return height;
+      }
+    }
+
+    public int WaitForNextFrameToRender() throws InterruptedException {
+      synchronized (frameLock) {
+        frameLock.wait();
+        return framesRendered;
+      }
+    }
+  }
+
+  static class FakeAsyncRenderer implements VideoRenderer.Callbacks {
+    private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>();
+
+    @Override
+    public void renderFrame(I420Frame frame) {
+      synchronized (pendingFrames) {
+        pendingFrames.add(frame);
+        pendingFrames.notifyAll();
+      }
+    }
+
+    // Wait until at least one frame have been received, before returning them.
+    public List<I420Frame> waitForPendingFrames() throws InterruptedException {
+      synchronized (pendingFrames) {
+        while (pendingFrames.isEmpty()) {
+          pendingFrames.wait();
+        }
+        return new ArrayList<I420Frame>(pendingFrames);
+      }
+    }
+  }
+
+  static class FakeCapturerObserver implements
+      VideoCapturerAndroid.CapturerObserver {
+    private int framesCaptured = 0;
+    private int frameSize = 0;
+    private int frameWidth = 0;
+    private int frameHeight = 0;
+    private Object frameLock = 0;
+    private Object capturerStartLock = 0;
+    private boolean captureStartResult = false;
+    private List<Long> timestamps = new ArrayList<Long>();
+
+    @Override
+    public void onCapturerStarted(boolean success) {
+      synchronized (capturerStartLock) {
+        captureStartResult = success;
+        capturerStartLock.notify();
+      }
+    }
+
+    @Override
+    public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
+        long timeStamp) {
+      synchronized (frameLock) {
+        ++framesCaptured;
+        frameSize = frame.length;
+        frameWidth = width;
+        frameHeight = height;
+        timestamps.add(timeStamp);
+        frameLock.notify();
+      }
+    }
+    @Override
+    public void onTextureFrameCaptured(
+        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+        long timeStamp) {
+      synchronized (frameLock) {
+        ++framesCaptured;
+        frameWidth = width;
+        frameHeight = height;
+        frameSize = 0;
+        timestamps.add(timeStamp);
+        frameLock.notify();
+      }
+    }
+
+    @Override
+    public void onOutputFormatRequest(int width, int height, int fps) {}
+
+    public boolean WaitForCapturerToStart() throws InterruptedException {
+      synchronized (capturerStartLock) {
+        capturerStartLock.wait();
+        return captureStartResult;
+      }
+    }
+
+    public int WaitForNextCapturedFrame() throws InterruptedException {
+      synchronized (frameLock) {
+        frameLock.wait();
+        return framesCaptured;
+      }
+    }
+
+    int frameSize() {
+      synchronized (frameLock) {
+        return frameSize;
+      }
+    }
+
+    int frameWidth() {
+      synchronized (frameLock) {
+        return frameWidth;
+      }
+    }
+
+    int frameHeight() {
+      synchronized (frameLock) {
+        return frameHeight;
+      }
+    }
+
+    List<Long> getCopyAndResetListOftimeStamps() {
+      synchronized (frameLock) {
+        ArrayList<Long> list = new ArrayList<Long>(timestamps);
+        timestamps.clear();
+        return list;
+      }
+    }
+  }
+
+  static class CameraEvents implements
+      VideoCapturerAndroid.CameraEventsHandler {
+    public boolean onCameraOpeningCalled;
+    public boolean onFirstFrameAvailableCalled;
+    public final Object onCameraFreezedLock = new Object();
+    private String onCameraFreezedDescription;
+
+    @Override
+    public void onCameraError(String errorDescription) {
+    }
+
+    @Override
+    public void onCameraFreezed(String errorDescription) {
+      synchronized (onCameraFreezedLock) {
+        onCameraFreezedDescription = errorDescription;
+        onCameraFreezedLock.notifyAll();
+      }
+    }
+
+    @Override
+    public void onCameraOpening(int cameraId) {
+      onCameraOpeningCalled = true;
+    }
+
+    @Override
+    public void onFirstFrameAvailable() {
+      onFirstFrameAvailableCalled = true;
+    }
+
+    @Override
+    public void onCameraClosed() { }
+
+    public String WaitForCameraFreezed() throws InterruptedException {
+      synchronized (onCameraFreezedLock) {
+        onCameraFreezedLock.wait();
+        return onCameraFreezedDescription;
+      }
+    }
+  }
+
+  static public CameraEvents createCameraEvents() {
+    return new CameraEvents();
+  }
+
+  // Return true if the device under test have at least two cameras.
+  @SuppressWarnings("deprecation")
+  static public boolean HaveTwoCameras() {
+    return (Camera.getNumberOfCameras() >= 2);
+  }
+
+  static public void release(VideoCapturerAndroid capturer) {
+    assertNotNull(capturer);
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void startCapturerAndRender(VideoCapturerAndroid capturer)
+      throws InterruptedException {
+    PeerConnectionFactory factory = new PeerConnectionFactory();
+    VideoSource source =
+        factory.createVideoSource(capturer, new MediaConstraints());
+    VideoTrack track = factory.createVideoTrack("dummy", source);
+    RendererCallbacks callbacks = new RendererCallbacks();
+    track.addRenderer(new VideoRenderer(callbacks));
+    assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+    track.dispose();
+    source.dispose();
+    factory.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void switchCamera(VideoCapturerAndroid capturer) throws InterruptedException {
+    PeerConnectionFactory factory = new PeerConnectionFactory();
+    VideoSource source =
+        factory.createVideoSource(capturer, new MediaConstraints());
+    VideoTrack track = factory.createVideoTrack("dummy", source);
+
+    // Array with one element to avoid final problem in nested classes.
+    final boolean[] cameraSwitchSuccessful = new boolean[1];
+    final CountDownLatch barrier = new CountDownLatch(1);
+    capturer.switchCamera(new VideoCapturerAndroid.CameraSwitchHandler() {
+      @Override
+      public void onCameraSwitchDone(boolean isFrontCamera) {
+        cameraSwitchSuccessful[0] = true;
+        barrier.countDown();
+      }
+      @Override
+      public void onCameraSwitchError(String errorDescription) {
+        cameraSwitchSuccessful[0] = false;
+        barrier.countDown();
+      }
+    });
+    // Wait until the camera has been switched.
+    barrier.await();
+
+    // Check result.
+    if (HaveTwoCameras()) {
+      assertTrue(cameraSwitchSuccessful[0]);
+    } else {
+      assertFalse(cameraSwitchSuccessful[0]);
+    }
+    // Ensure that frames are received.
+    RendererCallbacks callbacks = new RendererCallbacks();
+    track.addRenderer(new VideoRenderer(callbacks));
+    assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+    track.dispose();
+    source.dispose();
+    factory.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void cameraEventsInvoked(VideoCapturerAndroid capturer, CameraEvents events,
+      Context appContext) throws InterruptedException {
+    final List<CaptureFormat> formats = capturer.getSupportedFormats();
+    final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+    final FakeCapturerObserver observer = new FakeCapturerObserver();
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    // Make sure camera is started and first frame is received and then stop it.
+    assertTrue(observer.WaitForCapturerToStart());
+    observer.WaitForNextCapturedFrame();
+    capturer.stopCapture();
+    if (capturer.isCapturingToTexture()) {
+      capturer.surfaceHelper.returnTextureFrame();
+    }
+    capturer.dispose();
+
+    assertTrue(capturer.isReleased());
+    assertTrue(events.onCameraOpeningCalled);
+    assertTrue(events.onFirstFrameAvailableCalled);
+  }
+
+  static public void cameraCallsAfterStop(
+      VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+    final List<CaptureFormat> formats = capturer.getSupportedFormats();
+    final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+    final FakeCapturerObserver observer = new FakeCapturerObserver();
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    // Make sure camera is started and then stop it.
+    assertTrue(observer.WaitForCapturerToStart());
+    capturer.stopCapture();
+    if (capturer.isCapturingToTexture()) {
+      capturer.surfaceHelper.returnTextureFrame();
+    }
+
+    // We can't change |capturer| at this point, but we should not crash.
+    capturer.switchCamera(null);
+    capturer.onOutputFormatRequest(640, 480, 15);
+    capturer.changeCaptureFormat(640, 480, 15);
+
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void stopRestartVideoSource(VideoCapturerAndroid capturer)
+      throws InterruptedException {
+    PeerConnectionFactory factory = new PeerConnectionFactory();
+    VideoSource source =
+        factory.createVideoSource(capturer, new MediaConstraints());
+    VideoTrack track = factory.createVideoTrack("dummy", source);
+    RendererCallbacks callbacks = new RendererCallbacks();
+    track.addRenderer(new VideoRenderer(callbacks));
+    assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+    assertEquals(MediaSource.State.LIVE, source.state());
+
+    source.stop();
+    assertEquals(MediaSource.State.ENDED, source.state());
+
+    source.restart();
+    assertTrue(callbacks.WaitForNextFrameToRender() > 0);
+    assertEquals(MediaSource.State.LIVE, source.state());
+    track.dispose();
+    source.dispose();
+    factory.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer,
+      Context appContext) throws InterruptedException {
+    FakeCapturerObserver observer = new FakeCapturerObserver();
+    List<CaptureFormat> formats = capturer.getSupportedFormats();
+
+    for(int i = 0; i < 3 ; ++i) {
+      CameraEnumerationAndroid.CaptureFormat format = formats.get(i);
+      capturer.startCapture(format.width, format.height, format.maxFramerate,
+          appContext, observer);
+      assertTrue(observer.WaitForCapturerToStart());
+      observer.WaitForNextCapturedFrame();
+
+      // Check the frame size. The actual width and height depend on how the capturer is mounted.
+      final boolean identicalResolution = (observer.frameWidth() == format.width
+          &&  observer.frameHeight() == format.height);
+      final boolean flippedResolution = (observer.frameWidth() == format.height
+          && observer.frameHeight() == format.width);
+      if (!identicalResolution && !flippedResolution) {
+        fail("Wrong resolution, got: " + observer.frameWidth() + "x" + observer.frameHeight()
+            + " expected: " + format.width + "x" + format.height + " or " + format.height + "x"
+            + format.width);
+      }
+
+      if (capturer.isCapturingToTexture()) {
+        assertEquals(0, observer.frameSize());
+      } else {
+        assertTrue(format.frameSize() <= observer.frameSize());
+      }
+      capturer.stopCapture();
+      if (capturer.isCapturingToTexture()) {
+        capturer.surfaceHelper.returnTextureFrame();
+      }
+    }
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
+    final CountDownLatch barrier = new CountDownLatch(1);
+    capturer.getCameraThreadHandler().post(new Runnable() {
+        @Override public void run() {
+          barrier.countDown();
+        }
+    });
+    barrier.await();
+  }
+
+  static public void startWhileCameraIsAlreadyOpen(
+      VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+    Camera camera = Camera.open(capturer.getCurrentCameraId());
+
+    final List<CaptureFormat> formats = capturer.getSupportedFormats();
+    final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+    final FakeCapturerObserver observer = new FakeCapturerObserver();
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+
+    if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
+      // The first opened camera client will be evicted.
+      assertTrue(observer.WaitForCapturerToStart());
+      capturer.stopCapture();
+    } else {
+      assertFalse(observer.WaitForCapturerToStart());
+    }
+
+    capturer.dispose();
+    camera.release();
+  }
+
+  static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
+      VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+    Camera camera = Camera.open(capturer.getCurrentCameraId());
+
+    final List<CaptureFormat> formats = capturer.getSupportedFormats();
+    final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+    final FakeCapturerObserver observer = new FakeCapturerObserver();
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    waitUntilIdle(capturer);
+
+    camera.release();
+
+    // Make sure camera is started and first frame is received and then stop it.
+    assertTrue(observer.WaitForCapturerToStart());
+    observer.WaitForNextCapturedFrame();
+    capturer.stopCapture();
+    if (capturer.isCapturingToTexture()) {
+      capturer.surfaceHelper.returnTextureFrame();
+    }
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void startWhileCameraIsAlreadyOpenAndStop(
+      VideoCapturerAndroid capturer, Context appContext) throws InterruptedException {
+    Camera camera = Camera.open(capturer.getCurrentCameraId());
+    final List<CaptureFormat> formats = capturer.getSupportedFormats();
+    final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+    final FakeCapturerObserver observer = new FakeCapturerObserver();
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    capturer.stopCapture();
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+    camera.release();
+  }
+
+  static public void returnBufferLate(VideoCapturerAndroid capturer,
+      Context appContext) throws InterruptedException {
+    FakeCapturerObserver observer = new FakeCapturerObserver();
+
+    List<CaptureFormat> formats = capturer.getSupportedFormats();
+    CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    assertTrue(observer.WaitForCapturerToStart());
+
+    observer.WaitForNextCapturedFrame();
+    capturer.stopCapture();
+    List<Long> listOftimestamps = observer.getCopyAndResetListOftimeStamps();
+    assertTrue(listOftimestamps.size() >= 1);
+
+    format = formats.get(1);
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    observer.WaitForCapturerToStart();
+    if (capturer.isCapturingToTexture()) {
+      capturer.surfaceHelper.returnTextureFrame();
+    }
+
+    observer.WaitForNextCapturedFrame();
+    capturer.stopCapture();
+
+    listOftimestamps = observer.getCopyAndResetListOftimeStamps();
+    assertTrue(listOftimestamps.size() >= 1);
+    if (capturer.isCapturingToTexture()) {
+      capturer.surfaceHelper.returnTextureFrame();
+    }
+
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer)
+      throws InterruptedException {
+    final PeerConnectionFactory factory = new PeerConnectionFactory();
+    final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
+    final VideoTrack track = factory.createVideoTrack("dummy", source);
+    final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
+
+    track.addRenderer(new VideoRenderer(renderer));
+    // Wait for at least one frame that has not been returned.
+    assertFalse(renderer.waitForPendingFrames().isEmpty());
+
+    capturer.stopCapture();
+
+    // Dispose everything.
+    track.dispose();
+    source.dispose();
+    factory.dispose();
+    assertTrue(capturer.isReleased());
+
+    // Return the frame(s), on a different thread out of spite.
+    final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
+    final Thread returnThread = new Thread(new Runnable() {
+      @Override
+      public void run() {
+        for (I420Frame frame : pendingFrames) {
+          VideoRenderer.renderFrameDone(frame);
+        }
+      }
+    });
+    returnThread.start();
+    returnThread.join();
+  }
+
+  static public void cameraFreezedEventOnBufferStarvationUsingTextures(
+      VideoCapturerAndroid capturer,
+      CameraEvents events, Context appContext) throws InterruptedException {
+    assertTrue("Not capturing to textures.", capturer.isCapturingToTexture());
+
+    final List<CaptureFormat> formats = capturer.getSupportedFormats();
+    final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
+
+    final FakeCapturerObserver observer = new FakeCapturerObserver();
+    capturer.startCapture(format.width, format.height, format.maxFramerate,
+        appContext, observer);
+    // Make sure camera is started.
+    assertTrue(observer.WaitForCapturerToStart());
+    // Since we don't return the buffer, we should get a starvation message if we are
+    // capturing to a texture.
+    assertEquals("Camera failure. Client must return video buffers.",
+        events.WaitForCameraFreezed());
+
+    capturer.stopCapture();
+    if (capturer.isCapturingToTexture()) {
+      capturer.surfaceHelper.returnTextureFrame();
+    }
+
+    capturer.dispose();
+    assertTrue(capturer.isReleased());
+  }
+
+  static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
+    PeerConnectionFactory factory = new PeerConnectionFactory();
+    VideoSource source =
+        factory.createVideoSource(capturer, new MediaConstraints());
+    VideoTrack track = factory.createVideoTrack("dummy", source);
+    RendererCallbacks renderer = new RendererCallbacks();
+    track.addRenderer(new VideoRenderer(renderer));
+    assertTrue(renderer.WaitForNextFrameToRender() > 0);
+
+    final int startWidth = renderer.frameWidth();
+    final int startHeight = renderer.frameHeight();
+    final int frameRate = 30;
+    final int scaledWidth = startWidth / 2;
+    final int scaledHeight = startHeight / 2;
+
+    // Request the captured frames to be scaled.
+    capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
+
+    boolean gotExpectedResolution = false;
+    int numberOfInspectedFrames = 0;
+
+    do {
+      renderer.WaitForNextFrameToRender();
+      ++numberOfInspectedFrames;
+
+      gotExpectedResolution = (renderer.frameWidth() == scaledWidth
+          &&  renderer.frameHeight() == scaledHeight);
+    } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
+
+    source.stop();
+    track.dispose();
+    source.dispose();
+    factory.dispose();
+    assertTrue(capturer.isReleased());
+
+    assertTrue(gotExpectedResolution);
+  }
+
+}
diff --git a/webrtc/api/androidvideocapturer.cc b/webrtc/api/androidvideocapturer.cc
new file mode 100644
index 0000000..276067a
--- /dev/null
+++ b/webrtc/api/androidvideocapturer.cc
@@ -0,0 +1,251 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/androidvideocapturer.h"
+
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/json.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/media/webrtc/webrtcvideoframe.h"
+
+namespace webrtc {
+
+// A hack for avoiding deep frame copies in
+// cricket::VideoCapturer.SignalFrameCaptured() using a custom FrameFactory.
+// A frame is injected using UpdateCapturedFrame(), and converted into a
+// cricket::VideoFrame with CreateAliasedFrame(). UpdateCapturedFrame() should
+// be called before CreateAliasedFrame() for every frame.
+// TODO(magjed): Add an interface cricket::VideoCapturer::OnFrameCaptured()
+// for ref counted I420 frames instead of this hack.
+class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
+ public:
+  FrameFactory(const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
+      : delegate_(delegate) {
+    // Create a CapturedFrame that only contains header information, not the
+    // actual pixel data.
+    captured_frame_.pixel_height = 1;
+    captured_frame_.pixel_width = 1;
+    captured_frame_.data = nullptr;
+    captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize;
+    captured_frame_.fourcc = static_cast<uint32_t>(cricket::FOURCC_ANY);
+  }
+
+  void UpdateCapturedFrame(
+      const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+      int rotation,
+      int64_t time_stamp_in_ns) {
+    RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+               rotation == 270);
+    buffer_ = buffer;
+    captured_frame_.width = buffer->width();
+    captured_frame_.height = buffer->height();
+    captured_frame_.time_stamp = time_stamp_in_ns;
+    captured_frame_.rotation = static_cast<webrtc::VideoRotation>(rotation);
+  }
+
+  void ClearCapturedFrame() {
+    buffer_ = nullptr;
+    captured_frame_.width = 0;
+    captured_frame_.height = 0;
+    captured_frame_.time_stamp = 0;
+  }
+
+  const cricket::CapturedFrame* GetCapturedFrame() const {
+    return &captured_frame_;
+  }
+
+  cricket::VideoFrame* CreateAliasedFrame(
+      const cricket::CapturedFrame* captured_frame,
+      int dst_width,
+      int dst_height) const override {
+    // Check that captured_frame is actually our frame.
+    RTC_CHECK(captured_frame == &captured_frame_);
+    RTC_CHECK(buffer_->native_handle() == nullptr);
+
+    rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
+        ShallowCenterCrop(buffer_, dst_width, dst_height),
+        captured_frame->time_stamp, captured_frame->rotation));
+    // Caller takes ownership.
+    // TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
+    return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
+                           : frame.release();
+  }
+
+  cricket::VideoFrame* CreateAliasedFrame(
+      const cricket::CapturedFrame* input_frame,
+      int cropped_input_width,
+      int cropped_input_height,
+      int output_width,
+      int output_height) const override {
+    if (buffer_->native_handle() != nullptr) {
+      // TODO(perkj) Implement cropping.
+      RTC_CHECK_EQ(cropped_input_width, buffer_->width());
+      RTC_CHECK_EQ(cropped_input_height, buffer_->height());
+      rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+          static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
+              ->ScaleAndRotate(output_width, output_height,
+                               apply_rotation_ ? input_frame->rotation :
+                                   webrtc::kVideoRotation_0));
+      return new cricket::WebRtcVideoFrame(
+          scaled_buffer, input_frame->time_stamp,
+          apply_rotation_ ? webrtc::kVideoRotation_0 : input_frame->rotation);
+    }
+    return VideoFrameFactory::CreateAliasedFrame(input_frame,
+                                                 cropped_input_width,
+                                                 cropped_input_height,
+                                                 output_width,
+                                                 output_height);
+  }
+
+ private:
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
+  cricket::CapturedFrame captured_frame_;
+  rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
+};
+
+AndroidVideoCapturer::AndroidVideoCapturer(
+    const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
+    : running_(false),
+      delegate_(delegate),
+      frame_factory_(NULL),
+      current_state_(cricket::CS_STOPPED) {
+  thread_checker_.DetachFromThread();
+  std::string json_string = delegate_->GetSupportedFormats();
+  LOG(LS_INFO) << json_string;
+
+  Json::Value json_values;
+  Json::Reader reader(Json::Features::strictMode());
+  if (!reader.parse(json_string, json_values)) {
+    LOG(LS_ERROR) << "Failed to parse formats.";
+  }
+
+  std::vector<cricket::VideoFormat> formats;
+  for (Json::ArrayIndex i = 0; i < json_values.size(); ++i) {
+      const Json::Value& json_value = json_values[i];
+      RTC_CHECK(!json_value["width"].isNull() &&
+                !json_value["height"].isNull() &&
+                !json_value["framerate"].isNull());
+      cricket::VideoFormat format(
+          json_value["width"].asInt(),
+          json_value["height"].asInt(),
+          cricket::VideoFormat::FpsToInterval(json_value["framerate"].asInt()),
+          cricket::FOURCC_YV12);
+      formats.push_back(format);
+  }
+  SetSupportedFormats(formats);
+  // Do not apply frame rotation by default.
+  SetApplyRotation(false);
+}
+
+AndroidVideoCapturer::~AndroidVideoCapturer() {
+  RTC_CHECK(!running_);
+}
+
+cricket::CaptureState AndroidVideoCapturer::Start(
+    const cricket::VideoFormat& capture_format) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!running_);
+  const int fps = cricket::VideoFormat::IntervalToFps(capture_format.interval);
+  LOG(LS_INFO) << " AndroidVideoCapturer::Start " << capture_format.width << "x"
+               << capture_format.height << "@" << fps;
+
+  frame_factory_ = new AndroidVideoCapturer::FrameFactory(delegate_.get());
+  set_frame_factory(frame_factory_);
+
+  running_ = true;
+  delegate_->Start(capture_format.width, capture_format.height, fps, this);
+  SetCaptureFormat(&capture_format);
+  current_state_ = cricket::CS_STARTING;
+  return current_state_;
+}
+
+void AndroidVideoCapturer::Stop() {
+  LOG(LS_INFO) << " AndroidVideoCapturer::Stop ";
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  RTC_CHECK(running_);
+  running_ = false;
+  SetCaptureFormat(NULL);
+
+  delegate_->Stop();
+  current_state_ = cricket::CS_STOPPED;
+  SignalStateChange(this, current_state_);
+}
+
+bool AndroidVideoCapturer::IsRunning() {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  return running_;
+}
+
+bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  fourccs->push_back(cricket::FOURCC_YV12);
+  return true;
+}
+
+void AndroidVideoCapturer::OnCapturerStarted(bool success) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  cricket::CaptureState new_state =
+      success ? cricket::CS_RUNNING : cricket::CS_FAILED;
+  if (new_state == current_state_)
+    return;
+  current_state_ = new_state;
+
+  // TODO(perkj): SetCaptureState can not be used since it posts to |thread_|.
+  // But |thread_ | is currently just the thread that happened to create the
+  // cricket::VideoCapturer.
+  SignalStateChange(this, new_state);
+}
+
+void AndroidVideoCapturer::OnIncomingFrame(
+    const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+    int rotation,
+    int64_t time_stamp) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  frame_factory_->UpdateCapturedFrame(buffer, rotation, time_stamp);
+  SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
+  frame_factory_->ClearCapturedFrame();
+}
+
+void AndroidVideoCapturer::OnOutputFormatRequest(
+    int width, int height, int fps) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  const cricket::VideoFormat& current = video_adapter()->output_format();
+  cricket::VideoFormat format(
+      width, height, cricket::VideoFormat::FpsToInterval(fps), current.fourcc);
+  video_adapter()->OnOutputFormatRequest(format);
+}
+
+bool AndroidVideoCapturer::GetBestCaptureFormat(
+    const cricket::VideoFormat& desired,
+    cricket::VideoFormat* best_format) {
+  // Delegate this choice to VideoCapturerAndroid.startCapture().
+  *best_format = desired;
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/androidvideocapturer.h b/webrtc/api/androidvideocapturer.h
new file mode 100644
index 0000000..cba5224
--- /dev/null
+++ b/webrtc/api/androidvideocapturer.h
@@ -0,0 +1,109 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_ANDROIDVIDEOCAPTURER_H_
+#define WEBRTC_API_ANDROIDVIDEOCAPTURER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/media/base/videocapturer.h"
+
+namespace webrtc {
+
+class AndroidVideoCapturer;
+
+class AndroidVideoCapturerDelegate : public rtc::RefCountInterface {
+ public:
+  virtual ~AndroidVideoCapturerDelegate() {}
+  // Start capturing. The implementation of the delegate must call
+  // AndroidVideoCapturer::OnCapturerStarted with the result of this request.
+  virtual void Start(int width, int height, int framerate,
+                     AndroidVideoCapturer* capturer) = 0;
+
+  // Stops capturing.
+  // The delegate may not call into AndroidVideoCapturer after this call.
+  virtual void Stop() = 0;
+
+  // Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
+  virtual std::string GetSupportedFormats() = 0;
+};
+
+// Android implementation of cricket::VideoCapturer for use with WebRtc
+// PeerConnection.
+class AndroidVideoCapturer : public cricket::VideoCapturer {
+ public:
+  explicit AndroidVideoCapturer(
+      const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate);
+  virtual ~AndroidVideoCapturer();
+
+  // Called from JNI when the capturer has been started.
+  void OnCapturerStarted(bool success);
+
+  // Called from JNI when a new frame has been captured.
+  // Argument |buffer| is intentionally by value, for use with rtc::Bind.
+  void OnIncomingFrame(
+      const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+      int rotation,
+      int64_t time_stamp);
+
+  // Called from JNI to request a new video format.
+  void OnOutputFormatRequest(int width, int height, int fps);
+
+  AndroidVideoCapturerDelegate* delegate() { return delegate_.get(); }
+
+  // cricket::VideoCapturer implementation.
+  bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+                            cricket::VideoFormat* best_format) override;
+
+ private:
+  // cricket::VideoCapturer implementation.
+  // Video frames will be delivered using
+  // cricket::VideoCapturer::SignalFrameCaptured on the thread that calls Start.
+  cricket::CaptureState Start(
+      const cricket::VideoFormat& capture_format) override;
+  void Stop() override;
+  bool IsRunning() override;
+  bool IsScreencast() const override { return false; }
+  bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
+
+  bool running_;
+  rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
+
+  rtc::ThreadChecker thread_checker_;
+
+  class FrameFactory;
+  FrameFactory* frame_factory_;  // Owned by cricket::VideoCapturer.
+
+  cricket::CaptureState current_state_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_ANDROIDVIDEOCAPTURER_H_
diff --git a/webrtc/api/api.gyp b/webrtc/api/api.gyp
index ac4ea84..fe7cd0e 100644
--- a/webrtc/api/api.gyp
+++ b/webrtc/api/api.gyp
@@ -9,6 +9,131 @@
 {
   'includes': [ '../build/common.gypi', ],
   'conditions': [
+    ['os_posix == 1 and OS != "mac" and OS != "ios"', {
+      'conditions': [
+        ['sysroot!=""', {
+          'variables': {
+            'pkg-config': '../../../build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
+          },
+        }, {
+          'variables': {
+            'pkg-config': 'pkg-config'
+          },
+        }],
+      ],
+    }],
+    ['OS=="android"', {
+      'targets': [
+        {
+          'target_name': 'libjingle_peerconnection_jni',
+          'type': 'static_library',
+          'dependencies': [
+            '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
+            'libjingle_peerconnection',
+          ],
+          'sources': [
+            'androidvideocapturer.cc',
+            'androidvideocapturer.h',
+            'java/jni/androidmediacodeccommon.h',
+            'java/jni/androidmediadecoder_jni.cc',
+            'java/jni/androidmediadecoder_jni.h',
+            'java/jni/androidmediaencoder_jni.cc',
+            'java/jni/androidmediaencoder_jni.h',
+            'java/jni/androidnetworkmonitor_jni.cc',
+            'java/jni/androidnetworkmonitor_jni.h',
+            'java/jni/androidvideocapturer_jni.cc',
+            'java/jni/androidvideocapturer_jni.h',
+            'java/jni/eglbase_jni.cc',
+            'java/jni/eglbase_jni.h',
+            'java/jni/surfacetexturehelper_jni.cc',
+            'java/jni/surfacetexturehelper_jni.h',
+            'java/jni/classreferenceholder.cc',
+            'java/jni/classreferenceholder.h',
+            'java/jni/jni_helpers.cc',
+            'java/jni/jni_helpers.h',
+            'java/jni/native_handle_impl.cc',
+            'java/jni/native_handle_impl.h',
+            'java/jni/peerconnection_jni.cc',
+          ],
+          'include_dirs': [
+            '<(libyuv_dir)/include',
+          ],
+          # TODO(kjellander): Make the code compile without disabling these flags.
+          # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
+          'cflags': [
+            '-Wno-sign-compare',
+            '-Wno-unused-variable',
+          ],
+          'cflags!': [
+            '-Wextra',
+          ],
+          'cflags_cc!': [
+            '-Wnon-virtual-dtor',
+            '-Woverloaded-virtual',
+          ],
+          'msvs_disabled_warnings': [
+            4245,  # conversion from 'int' to 'size_t', signed/unsigned mismatch.
+            4267,  # conversion from 'size_t' to 'int', possible loss of data.
+            4389,  # signed/unsigned mismatch.
+          ],
+          'conditions': [
+            ['build_json==1', {
+              'dependencies': [
+                '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+              ],
+              'export_dependent_settings': [
+                '<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'libjingle_peerconnection_so',
+          'type': 'shared_library',
+          'dependencies': [
+            'libjingle_peerconnection',
+            'libjingle_peerconnection_jni',
+          ],
+          'sources': [
+           'java/jni/jni_onload.cc',
+          ],
+          'variables': {
+            # This library uses native JNI exports; tell GYP so that the
+            # required symbols will be kept.
+            'use_native_jni_exports': 1,
+          },
+        },
+        {
+          # |libjingle_peerconnection_java| builds a jar file with name
+          # libjingle_peerconnection_java.jar using Chrome's build system.
+          # It includes all Java files needed to setup a PeeerConnection call
+          # from Android.
+          'target_name': 'libjingle_peerconnection_java',
+          'type': 'none',
+          'dependencies': [
+            'libjingle_peerconnection_so',
+          ],
+          'variables': {
+            # Designate as Chromium code and point to our lint settings to
+            # enable linting of the WebRTC code (this is the only way to make
+            # lint_action invoke the Android linter).
+            'android_manifest_path': '<(webrtc_root)/build/android/AndroidManifest.xml',
+            'suppressions_file': '<(webrtc_root)/build/android/suppressions.xml',
+            'chromium_code': 1,
+            'java_in_dir': 'java',
+            'webrtc_base_dir': '<(webrtc_root)/base',
+            'webrtc_modules_dir': '<(webrtc_root)/modules',
+            'additional_src_dirs' : [
+              'java/android',
+              '<(webrtc_base_dir)/java/src',
+              '<(webrtc_modules_dir)/audio_device/android/java/src',
+              '<(webrtc_modules_dir)/video_render/android/java/src',
+            ],
+          },
+          'includes': ['../../build/java.gypi'],
+        }, # libjingle_peerconnection_java
+      ]
+    }],
     ['OS=="ios"', {
       'targets': [
         {
@@ -16,7 +141,7 @@
           'type': 'static_library',
           'dependencies': [
             '<(webrtc_root)/base/base.gyp:rtc_base_objc',
-            '../../talk/libjingle.gyp:libjingle_peerconnection',
+            'libjingle_peerconnection',
           ],
           'sources': [
             'objc/RTCAVFoundationVideoSource+Private.h',
@@ -117,6 +242,128 @@
           },
         }
       ],
-    }], # OS=="ios"
-  ],
+    }],  # OS=="ios"
+  ],  # conditions
+  'targets': [
+    {
+      'target_name': 'libjingle_peerconnection',
+      'type': 'static_library',
+      'dependencies': [
+        '<(webrtc_root)/media/media.gyp:rtc_media',
+        '../../talk/libjingle.gyp:libjingle_p2p',
+      ],
+      'sources': [
+        'audiotrack.cc',
+        'audiotrack.h',
+        'datachannel.cc',
+        'datachannel.h',
+        'datachannelinterface.h',
+        'dtlsidentitystore.cc',
+        'dtlsidentitystore.h',
+        'dtmfsender.cc',
+        'dtmfsender.h',
+        'dtmfsenderinterface.h',
+        'jsep.h',
+        'jsepicecandidate.cc',
+        'jsepicecandidate.h',
+        'jsepsessiondescription.cc',
+        'jsepsessiondescription.h',
+        'localaudiosource.cc',
+        'localaudiosource.h',
+        'mediaconstraintsinterface.cc',
+        'mediaconstraintsinterface.h',
+        'mediacontroller.cc',
+        'mediacontroller.h',
+        'mediastream.cc',
+        'mediastream.h',
+        'mediastreaminterface.h',
+        'mediastreamobserver.cc',
+        'mediastreamobserver.h',
+        'mediastreamprovider.h',
+        'mediastreamproxy.h',
+        'mediastreamtrack.h',
+        'mediastreamtrackproxy.h',
+        'notifier.h',
+        'peerconnection.cc',
+        'peerconnection.h',
+        'peerconnectionfactory.cc',
+        'peerconnectionfactory.h',
+        'peerconnectionfactoryproxy.h',
+        'peerconnectioninterface.h',
+        'peerconnectionproxy.h',
+        'proxy.h',
+        'remoteaudiosource.cc',
+        'remoteaudiosource.h',
+        'remotevideocapturer.cc',
+        'remotevideocapturer.h',
+        'rtpreceiver.cc',
+        'rtpreceiver.h',
+        'rtpreceiverinterface.h',
+        'rtpsender.cc',
+        'rtpsender.h',
+        'rtpsenderinterface.h',
+        'sctputils.cc',
+        'sctputils.h',
+        'statscollector.cc',
+        'statscollector.h',
+        'statstypes.cc',
+        'statstypes.h',
+        'streamcollection.h',
+        'videosource.cc',
+        'videosource.h',
+        'videosourceinterface.h',
+        'videosourceproxy.h',
+        'videotrack.cc',
+        'videotrack.h',
+        'videotrackrenderers.cc',
+        'videotrackrenderers.h',
+        'webrtcsdp.cc',
+        'webrtcsdp.h',
+        'webrtcsession.cc',
+        'webrtcsession.h',
+        'webrtcsessiondescriptionfactory.cc',
+        'webrtcsessiondescriptionfactory.h',
+      ],
+      # TODO(kjellander): Make the code compile without disabling these flags.
+      # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
+      'cflags': [
+        '-Wno-sign-compare',
+      ],
+      'cflags_cc!': [
+        '-Wnon-virtual-dtor',
+        '-Woverloaded-virtual',
+      ],
+      'conditions': [
+        ['clang==1', {
+          'cflags!': [
+            '-Wextra',
+          ],
+          'xcode_settings': {
+            'WARNING_CFLAGS!': ['-Wextra'],
+          },
+        }, {
+          'cflags': [
+            '-Wno-maybe-uninitialized',  # Only exists for GCC.
+          ],
+        }],
+        ['OS=="win"', {
+          # Disable warning for signed/unsigned mismatch.
+          'msvs_settings': {
+            'VCCLCompilerTool': {
+              'AdditionalOptions!': ['/we4389'],
+            },
+          },
+        }],
+        ['OS=="win" and clang==1', {
+          'msvs_settings': {
+            'VCCLCompilerTool': {
+              'AdditionalOptions': [
+                '-Wno-sign-compare',
+              ],
+            },
+          },
+        }],
+      ],
+    },  # target libjingle_peerconnection
+  ],  # targets
 }
diff --git a/webrtc/api/api_tests.gyp b/webrtc/api/api_tests.gyp
index cdb23fb..31bc699 100644
--- a/webrtc/api/api_tests.gyp
+++ b/webrtc/api/api_tests.gyp
@@ -8,7 +8,135 @@
 
 {
   'includes': [ '../build/common.gypi', ],
+  'targets': [
+    {
+      'target_name': 'peerconnection_unittests',
+      'type': '<(gtest_target_type)',
+      'dependencies': [
+        '<(DEPTH)/testing/gmock.gyp:gmock',
+        '<(webrtc_root)/api/api.gyp:libjingle_peerconnection',
+        '<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
+        '<(webrtc_root)/common.gyp:webrtc_common',
+        '<(webrtc_root)/webrtc.gyp:rtc_unittest_main',
+        '../../talk/libjingle.gyp:libjingle_p2p',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(DEPTH)/testing/gmock/include',
+        ],
+      },
+      'defines': [
+        # Feature selection.
+        'HAVE_SCTP',
+      ],
+      'sources': [
+        'datachannel_unittest.cc',
+        'dtlsidentitystore_unittest.cc',
+        'dtmfsender_unittest.cc',
+        'fakemetricsobserver.cc',
+        'fakemetricsobserver.h',
+        'jsepsessiondescription_unittest.cc',
+        'localaudiosource_unittest.cc',
+        'mediastream_unittest.cc',
+        'peerconnection_unittest.cc',
+        'peerconnectionendtoend_unittest.cc',
+        'peerconnectionfactory_unittest.cc',
+        'peerconnectioninterface_unittest.cc',
+        # 'peerconnectionproxy_unittest.cc',
+        'remotevideocapturer_unittest.cc',
+        'rtpsenderreceiver_unittest.cc',
+        'statscollector_unittest.cc',
+        'test/fakeaudiocapturemodule.cc',
+        'test/fakeaudiocapturemodule.h',
+        'test/fakeaudiocapturemodule_unittest.cc',
+        'test/fakeconstraints.h',
+        'test/fakedatachannelprovider.h',
+        'test/fakedtlsidentitystore.h',
+        'test/fakeperiodicvideocapturer.h',
+        'test/fakevideotrackrenderer.h',
+        'test/mockpeerconnectionobservers.h',
+        'test/peerconnectiontestwrapper.h',
+        'test/peerconnectiontestwrapper.cc',
+        'test/testsdpstrings.h',
+        'videosource_unittest.cc',
+        'videotrack_unittest.cc',
+        'webrtcsdp_unittest.cc',
+        'webrtcsession_unittest.cc',
+      ],
+      # TODO(kjellander): Make the code compile without disabling these flags.
+      # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
+      'cflags': [
+        '-Wno-sign-compare',
+      ],
+      'cflags!': [
+        '-Wextra',
+      ],
+      'cflags_cc!': [
+        '-Wnon-virtual-dtor',
+        '-Woverloaded-virtual',
+      ],
+      'msvs_disabled_warnings': [
+        4245,  # conversion from 'int' to 'size_t', signed/unsigned mismatch.
+        4267,  # conversion from 'size_t' to 'int', possible loss of data.
+        4389,  # signed/unsigned mismatch.
+      ],
+      'conditions': [
+        ['clang==1', {
+          # TODO(kjellander): Make the code compile without disabling these flags.
+          # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
+          'cflags!': [
+            '-Wextra',
+          ],
+          'xcode_settings': {
+            'WARNING_CFLAGS!': ['-Wextra'],
+          },
+        }],
+        ['OS=="android"', {
+          'sources': [
+            'test/androidtestinitializer.cc',
+            'test/androidtestinitializer.h',
+          ],
+          'dependencies': [
+            '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+            '<(webrtc_root)/api/api.gyp:libjingle_peerconnection_jni',
+          ],
+        }],
+        ['OS=="win" and clang==1', {
+          'msvs_settings': {
+            'VCCLCompilerTool': {
+              'AdditionalOptions': [
+                # Disable warnings failing when compiling with Clang on Windows.
+                # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+                '-Wno-sign-compare',
+                '-Wno-unused-function',
+              ],
+            },
+          },
+        }],
+      ],  # conditions
+    },  # target peerconnection_unittests
+  ],  # targets
   'conditions': [
+    ['OS=="android"', {
+      'targets': [
+        {
+          'target_name': 'libjingle_peerconnection_android_unittest',
+          'type': 'none',
+          'dependencies': [
+            '<(webrtc_root)/api/api.gyp:libjingle_peerconnection_java',
+          ],
+          'variables': {
+            'apk_name': 'libjingle_peerconnection_android_unittest',
+            'java_in_dir': 'androidtests',
+            'resource_dir': 'androidtests/res',
+            'native_lib_target': 'libjingle_peerconnection_so',
+            'is_test_apk': 1,
+            'never_lint': 1,
+          },
+          'includes': [ '../../build/java_apk.gypi' ],
+        },
+      ],  # targets
+    }],  # OS=="android"
     ['OS=="ios"', {
       'targets': [
         {
@@ -35,8 +163,36 @@
             # https://developer.apple.com/library/mac/qa/qa1490/_index.html
             'OTHER_LDFLAGS': ['-ObjC'],
           },
-        }
+        },
       ],
-    }], # OS=="ios"
-  ],
+    }],  # OS=="ios"
+    ['OS=="android"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_unittests_apk_target',
+          'type': 'none',
+          'dependencies': [
+            '<(apk_tests_path):peerconnection_unittests_apk',
+          ],
+        },
+      ],
+    }],  # OS=="android"
+    ['test_isolation_mode != "noop"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_unittests_run',
+          'type': 'none',
+          'dependencies': [
+            'peerconnection_unittests',
+          ],
+          'includes': [
+            '../build/isolate.gypi',
+          ],
+          'sources': [
+            'peerconnection_unittests.isolate',
+          ],
+        },
+      ],  # targets
+    }],  # test_isolation_mode != "noop"
+  ],  # conditions
 }
diff --git a/webrtc/api/audiotrack.cc b/webrtc/api/audiotrack.cc
new file mode 100644
index 0000000..1ae0535
--- /dev/null
+++ b/webrtc/api/audiotrack.cc
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/audiotrack.h"
+
+#include "webrtc/base/checks.h"
+
+using rtc::scoped_refptr;
+
+namespace webrtc {
+
+const char MediaStreamTrackInterface::kAudioKind[] = "audio";
+
+// static
+scoped_refptr<AudioTrack> AudioTrack::Create(
+    const std::string& id,
+    const scoped_refptr<AudioSourceInterface>& source) {
+  return new rtc::RefCountedObject<AudioTrack>(id, source);
+}
+
+AudioTrack::AudioTrack(const std::string& label,
+                       const scoped_refptr<AudioSourceInterface>& source)
+    : MediaStreamTrack<AudioTrackInterface>(label), audio_source_(source) {
+  if (audio_source_) {
+    audio_source_->RegisterObserver(this);
+    OnChanged();
+  }
+}
+
+AudioTrack::~AudioTrack() {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  set_state(MediaStreamTrackInterface::kEnded);
+  if (audio_source_)
+    audio_source_->UnregisterObserver(this);
+}
+
+std::string AudioTrack::kind() const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  return kAudioKind;
+}
+
+AudioSourceInterface* AudioTrack::GetSource() const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  return audio_source_.get();
+}
+
+void AudioTrack::AddSink(AudioTrackSinkInterface* sink) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  if (audio_source_)
+    audio_source_->AddSink(sink);
+}
+
+void AudioTrack::RemoveSink(AudioTrackSinkInterface* sink) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  if (audio_source_)
+    audio_source_->RemoveSink(sink);
+}
+
+void AudioTrack::OnChanged() {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  if (state() == kFailed)
+    return;  // We can't recover from this state (do we ever set it?).
+
+  TrackState new_state = kInitializing;
+
+  // |audio_source_| must be non-null if we ever get here.
+  switch (audio_source_->state()) {
+    case MediaSourceInterface::kLive:
+    case MediaSourceInterface::kMuted:
+      new_state = kLive;
+      break;
+    case MediaSourceInterface::kEnded:
+      new_state = kEnded;
+      break;
+    case MediaSourceInterface::kInitializing:
+    default:
+      // use kInitializing.
+      break;
+  }
+
+  set_state(new_state);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/audiotrack.h b/webrtc/api/audiotrack.h
new file mode 100644
index 0000000..87fc41f
--- /dev/null
+++ b/webrtc/api/audiotrack.h
@@ -0,0 +1,76 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_AUDIOTRACK_H_
+#define WEBRTC_API_AUDIOTRACK_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/mediastreamtrack.h"
+#include "webrtc/api/notifier.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc {
+
+class AudioTrack : public MediaStreamTrack<AudioTrackInterface>,
+                   public ObserverInterface {
+ protected:
+  // Protected ctor to force use of factory method.
+  AudioTrack(const std::string& label,
+             const rtc::scoped_refptr<AudioSourceInterface>& source);
+  ~AudioTrack() override;
+
+ public:
+  static rtc::scoped_refptr<AudioTrack> Create(
+      const std::string& id,
+      const rtc::scoped_refptr<AudioSourceInterface>& source);
+
+ private:
+  // MediaStreamTrack implementation.
+  std::string kind() const override;
+
+  // AudioTrackInterface implementation.
+  AudioSourceInterface* GetSource() const override;
+
+  void AddSink(AudioTrackSinkInterface* sink) override;
+  void RemoveSink(AudioTrackSinkInterface* sink) override;
+
+  // ObserverInterface implementation.
+  void OnChanged() override;
+
+ private:
+  const rtc::scoped_refptr<AudioSourceInterface> audio_source_;
+  rtc::ThreadChecker thread_checker_;
+  RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AudioTrack);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_AUDIOTRACK_H_
diff --git a/webrtc/api/datachannel.cc b/webrtc/api/datachannel.cc
new file mode 100644
index 0000000..855831a
--- /dev/null
+++ b/webrtc/api/datachannel.cc
@@ -0,0 +1,655 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/datachannel.h"
+
+#include <string>
+
+#include "webrtc/api/mediastreamprovider.h"
+#include "webrtc/api/sctputils.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/media/sctp/sctpdataengine.h"
+
+namespace webrtc {
+
+static size_t kMaxQueuedReceivedDataBytes = 16 * 1024 * 1024;
+static size_t kMaxQueuedSendDataBytes = 16 * 1024 * 1024;
+
+enum {
+  MSG_CHANNELREADY,
+};
+
+bool SctpSidAllocator::AllocateSid(rtc::SSLRole role, int* sid) {
+  int potential_sid = (role == rtc::SSL_CLIENT) ? 0 : 1;
+  while (!IsSidAvailable(potential_sid)) {
+    potential_sid += 2;
+    if (potential_sid > static_cast<int>(cricket::kMaxSctpSid)) {
+      return false;
+    }
+  }
+
+  *sid = potential_sid;
+  used_sids_.insert(potential_sid);
+  return true;
+}
+
+bool SctpSidAllocator::ReserveSid(int sid) {
+  if (!IsSidAvailable(sid)) {
+    return false;
+  }
+  used_sids_.insert(sid);
+  return true;
+}
+
+void SctpSidAllocator::ReleaseSid(int sid) {
+  auto it = used_sids_.find(sid);
+  if (it != used_sids_.end()) {
+    used_sids_.erase(it);
+  }
+}
+
+bool SctpSidAllocator::IsSidAvailable(int sid) const {
+  if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid)) {
+    return false;
+  }
+  return used_sids_.find(sid) == used_sids_.end();
+}
+
+DataChannel::PacketQueue::PacketQueue() : byte_count_(0) {}
+
+DataChannel::PacketQueue::~PacketQueue() {
+  Clear();
+}
+
+bool DataChannel::PacketQueue::Empty() const {
+  return packets_.empty();
+}
+
+DataBuffer* DataChannel::PacketQueue::Front() {
+  return packets_.front();
+}
+
+void DataChannel::PacketQueue::Pop() {
+  if (packets_.empty()) {
+    return;
+  }
+
+  byte_count_ -= packets_.front()->size();
+  packets_.pop_front();
+}
+
+void DataChannel::PacketQueue::Push(DataBuffer* packet) {
+  byte_count_ += packet->size();
+  packets_.push_back(packet);
+}
+
+void DataChannel::PacketQueue::Clear() {
+  while (!packets_.empty()) {
+    delete packets_.front();
+    packets_.pop_front();
+  }
+  byte_count_ = 0;
+}
+
+void DataChannel::PacketQueue::Swap(PacketQueue* other) {
+  size_t other_byte_count = other->byte_count_;
+  other->byte_count_ = byte_count_;
+  byte_count_ = other_byte_count;
+
+  other->packets_.swap(packets_);
+}
+
+rtc::scoped_refptr<DataChannel> DataChannel::Create(
+    DataChannelProviderInterface* provider,
+    cricket::DataChannelType dct,
+    const std::string& label,
+    const InternalDataChannelInit& config) {
+  rtc::scoped_refptr<DataChannel> channel(
+      new rtc::RefCountedObject<DataChannel>(provider, dct, label));
+  if (!channel->Init(config)) {
+    return NULL;
+  }
+  return channel;
+}
+
+DataChannel::DataChannel(
+    DataChannelProviderInterface* provider,
+    cricket::DataChannelType dct,
+    const std::string& label)
+    : label_(label),
+      observer_(NULL),
+      state_(kConnecting),
+      data_channel_type_(dct),
+      provider_(provider),
+      handshake_state_(kHandshakeInit),
+      connected_to_provider_(false),
+      send_ssrc_set_(false),
+      receive_ssrc_set_(false),
+      writable_(false),
+      send_ssrc_(0),
+      receive_ssrc_(0) {
+}
+
+bool DataChannel::Init(const InternalDataChannelInit& config) {
+  if (data_channel_type_ == cricket::DCT_RTP) {
+    if (config.reliable ||
+        config.id != -1 ||
+        config.maxRetransmits != -1 ||
+        config.maxRetransmitTime != -1) {
+      LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
+                    << "invalid DataChannelInit.";
+      return false;
+    }
+    handshake_state_ = kHandshakeReady;
+  } else if (data_channel_type_ == cricket::DCT_SCTP) {
+    if (config.id < -1 ||
+        config.maxRetransmits < -1 ||
+        config.maxRetransmitTime < -1) {
+      LOG(LS_ERROR) << "Failed to initialize the SCTP data channel due to "
+                    << "invalid DataChannelInit.";
+      return false;
+    }
+    if (config.maxRetransmits != -1 && config.maxRetransmitTime != -1) {
+      LOG(LS_ERROR) <<
+          "maxRetransmits and maxRetransmitTime should not be both set.";
+      return false;
+    }
+    config_ = config;
+
+    switch (config_.open_handshake_role) {
+    case webrtc::InternalDataChannelInit::kNone:  // pre-negotiated
+      handshake_state_ = kHandshakeReady;
+      break;
+    case webrtc::InternalDataChannelInit::kOpener:
+      handshake_state_ = kHandshakeShouldSendOpen;
+      break;
+    case webrtc::InternalDataChannelInit::kAcker:
+      handshake_state_ = kHandshakeShouldSendAck;
+      break;
+    };
+
+    // Try to connect to the transport in case the transport channel already
+    // exists.
+    OnTransportChannelCreated();
+
+    // Checks if the transport is ready to send because the initial channel
+    // ready signal may have been sent before the DataChannel creation.
+    // This has to be done async because the upper layer objects (e.g.
+    // Chrome glue and WebKit) are not wired up properly until after this
+    // function returns.
+    if (provider_->ReadyToSendData()) {
+      rtc::Thread::Current()->Post(this, MSG_CHANNELREADY, NULL);
+    }
+  }
+
+  return true;
+}
+
+DataChannel::~DataChannel() {}
+
+void DataChannel::RegisterObserver(DataChannelObserver* observer) {
+  observer_ = observer;
+  DeliverQueuedReceivedData();
+}
+
+void DataChannel::UnregisterObserver() {
+  observer_ = NULL;
+}
+
+bool DataChannel::reliable() const {
+  if (data_channel_type_ == cricket::DCT_RTP) {
+    return false;
+  } else {
+    return config_.maxRetransmits == -1 &&
+           config_.maxRetransmitTime == -1;
+  }
+}
+
+uint64_t DataChannel::buffered_amount() const {
+  return queued_send_data_.byte_count();
+}
+
+void DataChannel::Close() {
+  if (state_ == kClosed)
+    return;
+  send_ssrc_ = 0;
+  send_ssrc_set_ = false;
+  SetState(kClosing);
+  UpdateState();
+}
+
+bool DataChannel::Send(const DataBuffer& buffer) {
+  if (state_ != kOpen) {
+    return false;
+  }
+
+  // TODO(jiayl): the spec is unclear about if the remote side should get the
+  // onmessage event. We need to figure out the expected behavior and change the
+  // code accordingly.
+  if (buffer.size() == 0) {
+    return true;
+  }
+
+  // If the queue is non-empty, we're waiting for SignalReadyToSend,
+  // so just add to the end of the queue and keep waiting.
+  if (!queued_send_data_.Empty()) {
+    // Only SCTP DataChannel queues the outgoing data when the transport is
+    // blocked.
+    ASSERT(data_channel_type_ == cricket::DCT_SCTP);
+    if (!QueueSendDataMessage(buffer)) {
+      Close();
+    }
+    return true;
+  }
+
+  bool success = SendDataMessage(buffer, true);
+  if (data_channel_type_ == cricket::DCT_RTP) {
+    return success;
+  }
+
+  // Always return true for SCTP DataChannel per the spec.
+  return true;
+}
+
+void DataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
+  ASSERT(data_channel_type_ == cricket::DCT_RTP);
+
+  if (receive_ssrc_set_) {
+    return;
+  }
+  receive_ssrc_ = receive_ssrc;
+  receive_ssrc_set_ = true;
+  UpdateState();
+}
+
+// The remote peer request that this channel shall be closed.
+void DataChannel::RemotePeerRequestClose() {
+  DoClose();
+}
+
+void DataChannel::SetSctpSid(int sid) {
+  ASSERT(config_.id < 0 && sid >= 0 && data_channel_type_ == cricket::DCT_SCTP);
+  if (config_.id == sid) {
+    return;
+  }
+
+  config_.id = sid;
+  provider_->AddSctpDataStream(sid);
+}
+
+void DataChannel::OnTransportChannelCreated() {
+  ASSERT(data_channel_type_ == cricket::DCT_SCTP);
+  if (!connected_to_provider_) {
+    connected_to_provider_ = provider_->ConnectDataChannel(this);
+  }
+  // The sid may have been unassigned when provider_->ConnectDataChannel was
+  // done. So always add the streams even if connected_to_provider_ is true.
+  if (config_.id >= 0) {
+    provider_->AddSctpDataStream(config_.id);
+  }
+}
+
+// The underlying transport channel was destroyed.
+// This function makes sure the DataChannel is disconnected and changes state to
+// kClosed.
+void DataChannel::OnTransportChannelDestroyed() {
+  DoClose();
+}
+
+void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
+  ASSERT(data_channel_type_ == cricket::DCT_RTP);
+  if (send_ssrc_set_) {
+    return;
+  }
+  send_ssrc_ = send_ssrc;
+  send_ssrc_set_ = true;
+  UpdateState();
+}
+
+void DataChannel::OnMessage(rtc::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_CHANNELREADY:
+      OnChannelReady(true);
+      break;
+  }
+}
+
+void DataChannel::OnDataReceived(cricket::DataChannel* channel,
+                                 const cricket::ReceiveDataParams& params,
+                                 const rtc::Buffer& payload) {
+  uint32_t expected_ssrc =
+      (data_channel_type_ == cricket::DCT_RTP) ? receive_ssrc_ : config_.id;
+  if (params.ssrc != expected_ssrc) {
+    return;
+  }
+
+  if (params.type == cricket::DMT_CONTROL) {
+    ASSERT(data_channel_type_ == cricket::DCT_SCTP);
+    if (handshake_state_ != kHandshakeWaitingForAck) {
+      // Ignore it if we are not expecting an ACK message.
+      LOG(LS_WARNING) << "DataChannel received unexpected CONTROL message, "
+                      << "sid = " << params.ssrc;
+      return;
+    }
+    if (ParseDataChannelOpenAckMessage(payload)) {
+      // We can send unordered as soon as we receive the ACK message.
+      handshake_state_ = kHandshakeReady;
+      LOG(LS_INFO) << "DataChannel received OPEN_ACK message, sid = "
+                   << params.ssrc;
+    } else {
+      LOG(LS_WARNING) << "DataChannel failed to parse OPEN_ACK message, sid = "
+                      << params.ssrc;
+    }
+    return;
+  }
+
+  ASSERT(params.type == cricket::DMT_BINARY ||
+         params.type == cricket::DMT_TEXT);
+
+  LOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " << params.ssrc;
+  // We can send unordered as soon as we receive any DATA message since the
+  // remote side must have received the OPEN (and old clients do not send
+  // OPEN_ACK).
+  if (handshake_state_ == kHandshakeWaitingForAck) {
+    handshake_state_ = kHandshakeReady;
+  }
+
+  bool binary = (params.type == cricket::DMT_BINARY);
+  rtc::scoped_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
+  if (state_ == kOpen && observer_) {
+    observer_->OnMessage(*buffer.get());
+  } else {
+    if (queued_received_data_.byte_count() + payload.size() >
+        kMaxQueuedReceivedDataBytes) {
+      LOG(LS_ERROR) << "Queued received data exceeds the max buffer size.";
+
+      queued_received_data_.Clear();
+      if (data_channel_type_ != cricket::DCT_RTP) {
+        Close();
+      }
+
+      return;
+    }
+    queued_received_data_.Push(buffer.release());
+  }
+}
+
+void DataChannel::OnStreamClosedRemotely(uint32_t sid) {
+  if (data_channel_type_ == cricket::DCT_SCTP && sid == config_.id) {
+    Close();
+  }
+}
+
+void DataChannel::OnChannelReady(bool writable) {
+  writable_ = writable;
+  if (!writable) {
+    return;
+  }
+
+  SendQueuedControlMessages();
+  SendQueuedDataMessages();
+  UpdateState();
+}
+
+void DataChannel::DoClose() {
+  if (state_ == kClosed)
+    return;
+
+  receive_ssrc_set_ = false;
+  send_ssrc_set_ = false;
+  SetState(kClosing);
+  UpdateState();
+}
+
+void DataChannel::UpdateState() {
+  // UpdateState determines what to do from a few state variables.  Include
+  // all conditions required for each state transition here for
+  // clarity. OnChannelReady(true) will send any queued data and then invoke
+  // UpdateState().
+  switch (state_) {
+    case kConnecting: {
+      if (send_ssrc_set_ == receive_ssrc_set_) {
+        if (data_channel_type_ == cricket::DCT_RTP && !connected_to_provider_) {
+          connected_to_provider_ = provider_->ConnectDataChannel(this);
+        }
+        if (connected_to_provider_) {
+          if (handshake_state_ == kHandshakeShouldSendOpen) {
+            rtc::Buffer payload;
+            WriteDataChannelOpenMessage(label_, config_, &payload);
+            SendControlMessage(payload);
+          } else if (handshake_state_ == kHandshakeShouldSendAck) {
+            rtc::Buffer payload;
+            WriteDataChannelOpenAckMessage(&payload);
+            SendControlMessage(payload);
+          }
+          if (writable_ &&
+              (handshake_state_ == kHandshakeReady ||
+               handshake_state_ == kHandshakeWaitingForAck)) {
+            SetState(kOpen);
+            // If we have received buffers before the channel got writable.
+            // Deliver them now.
+            DeliverQueuedReceivedData();
+          }
+        }
+      }
+      break;
+    }
+    case kOpen: {
+      break;
+    }
+    case kClosing: {
+      if (queued_send_data_.Empty() && queued_control_data_.Empty()) {
+        if (connected_to_provider_) {
+          DisconnectFromProvider();
+        }
+
+        if (!connected_to_provider_ && !send_ssrc_set_ && !receive_ssrc_set_) {
+          SetState(kClosed);
+        }
+      }
+      break;
+    }
+    case kClosed:
+      break;
+  }
+}
+
+void DataChannel::SetState(DataState state) {
+  if (state_ == state) {
+    return;
+  }
+
+  state_ = state;
+  if (observer_) {
+    observer_->OnStateChange();
+  }
+  if (state_ == kClosed) {
+    SignalClosed(this);
+  }
+}
+
+void DataChannel::DisconnectFromProvider() {
+  if (!connected_to_provider_)
+    return;
+
+  provider_->DisconnectDataChannel(this);
+  connected_to_provider_ = false;
+
+  if (data_channel_type_ == cricket::DCT_SCTP && config_.id >= 0) {
+    provider_->RemoveSctpDataStream(config_.id);
+  }
+}
+
+void DataChannel::DeliverQueuedReceivedData() {
+  if (!observer_) {
+    return;
+  }
+
+  while (!queued_received_data_.Empty()) {
+    rtc::scoped_ptr<DataBuffer> buffer(queued_received_data_.Front());
+    observer_->OnMessage(*buffer);
+    queued_received_data_.Pop();
+  }
+}
+
+void DataChannel::SendQueuedDataMessages() {
+  if (queued_send_data_.Empty()) {
+    return;
+  }
+
+  ASSERT(state_ == kOpen || state_ == kClosing);
+
+  uint64_t start_buffered_amount = buffered_amount();
+  while (!queued_send_data_.Empty()) {
+    DataBuffer* buffer = queued_send_data_.Front();
+    if (!SendDataMessage(*buffer, false)) {
+      // Leave the message in the queue if sending is aborted.
+      break;
+    }
+    queued_send_data_.Pop();
+    delete buffer;
+  }
+
+  if (observer_ && buffered_amount() < start_buffered_amount) {
+    observer_->OnBufferedAmountChange(start_buffered_amount);
+  }
+}
+
+bool DataChannel::SendDataMessage(const DataBuffer& buffer,
+                                  bool queue_if_blocked) {
+  cricket::SendDataParams send_params;
+
+  if (data_channel_type_ == cricket::DCT_SCTP) {
+    send_params.ordered = config_.ordered;
+    // Send as ordered if it is still going through OPEN/ACK signaling.
+    if (handshake_state_ != kHandshakeReady && !config_.ordered) {
+      send_params.ordered = true;
+      LOG(LS_VERBOSE) << "Sending data as ordered for unordered DataChannel "
+                      << "because the OPEN_ACK message has not been received.";
+    }
+
+    send_params.max_rtx_count = config_.maxRetransmits;
+    send_params.max_rtx_ms = config_.maxRetransmitTime;
+    send_params.ssrc = config_.id;
+  } else {
+    send_params.ssrc = send_ssrc_;
+  }
+  send_params.type = buffer.binary ? cricket::DMT_BINARY : cricket::DMT_TEXT;
+
+  cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
+  bool success = provider_->SendData(send_params, buffer.data, &send_result);
+
+  if (success) {
+    return true;
+  }
+
+  if (data_channel_type_ != cricket::DCT_SCTP) {
+    return false;
+  }
+
+  if (send_result == cricket::SDR_BLOCK) {
+    if (!queue_if_blocked || QueueSendDataMessage(buffer)) {
+      return false;
+    }
+  }
+  // Close the channel if the error is not SDR_BLOCK, or if queuing the
+  // message failed.
+  LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send data, "
+                << "send_result = " << send_result;
+  Close();
+
+  return false;
+}
+
+bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
+  size_t start_buffered_amount = buffered_amount();
+  if (start_buffered_amount >= kMaxQueuedSendDataBytes) {
+    LOG(LS_ERROR) << "Can't buffer any more data for the data channel.";
+    return false;
+  }
+  queued_send_data_.Push(new DataBuffer(buffer));
+
+  // The buffer can have length zero, in which case there is no change.
+  if (observer_ && buffered_amount() > start_buffered_amount) {
+    observer_->OnBufferedAmountChange(start_buffered_amount);
+  }
+  return true;
+}
+
+void DataChannel::SendQueuedControlMessages() {
+  PacketQueue control_packets;
+  control_packets.Swap(&queued_control_data_);
+
+  while (!control_packets.Empty()) {
+    rtc::scoped_ptr<DataBuffer> buf(control_packets.Front());
+    SendControlMessage(buf->data);
+    control_packets.Pop();
+  }
+}
+
+void DataChannel::QueueControlMessage(const rtc::Buffer& buffer) {
+  queued_control_data_.Push(new DataBuffer(buffer, true));
+}
+
+bool DataChannel::SendControlMessage(const rtc::Buffer& buffer) {
+  bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
+
+  ASSERT(data_channel_type_ == cricket::DCT_SCTP &&
+         writable_ &&
+         config_.id >= 0 &&
+         (!is_open_message || !config_.negotiated));
+
+  cricket::SendDataParams send_params;
+  send_params.ssrc = config_.id;
+  // Send data as ordered before we receive any message from the remote peer to
+  // make sure the remote peer will not receive any data before it receives the
+  // OPEN message.
+  send_params.ordered = config_.ordered || is_open_message;
+  send_params.type = cricket::DMT_CONTROL;
+
+  cricket::SendDataResult send_result = cricket::SDR_SUCCESS;
+  bool retval = provider_->SendData(send_params, buffer, &send_result);
+  if (retval) {
+    LOG(LS_INFO) << "Sent CONTROL message on channel " << config_.id;
+
+    if (handshake_state_ == kHandshakeShouldSendAck) {
+      handshake_state_ = kHandshakeReady;
+    } else if (handshake_state_ == kHandshakeShouldSendOpen) {
+      handshake_state_ = kHandshakeWaitingForAck;
+    }
+  } else if (send_result == cricket::SDR_BLOCK) {
+    QueueControlMessage(buffer);
+  } else {
+    LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send"
+                  << " the CONTROL message, send_result = " << send_result;
+    Close();
+  }
+  return retval;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/datachannel.h b/webrtc/api/datachannel.h
new file mode 100644
index 0000000..649cb24
--- /dev/null
+++ b/webrtc/api/datachannel.h
@@ -0,0 +1,299 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_DATACHANNEL_H_
+#define WEBRTC_API_DATACHANNEL_H_
+
+#include <deque>
+#include <set>
+#include <string>
+
+#include "talk/session/media/channel.h"
+#include "webrtc/api/datachannelinterface.h"
+#include "webrtc/api/proxy.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/media/base/mediachannel.h"
+
+namespace webrtc {
+
+class DataChannel;
+
+class DataChannelProviderInterface {
+ public:
+  // Sends the data to the transport.
+  virtual bool SendData(const cricket::SendDataParams& params,
+                        const rtc::Buffer& payload,
+                        cricket::SendDataResult* result) = 0;
+  // Connects to the transport signals.
+  virtual bool ConnectDataChannel(DataChannel* data_channel) = 0;
+  // Disconnects from the transport signals.
+  virtual void DisconnectDataChannel(DataChannel* data_channel) = 0;
+  // Adds the data channel SID to the transport for SCTP.
+  virtual void AddSctpDataStream(int sid) = 0;
+  // Removes the data channel SID from the transport for SCTP.
+  virtual void RemoveSctpDataStream(int sid) = 0;
+  // Returns true if the transport channel is ready to send data.
+  virtual bool ReadyToSendData() const = 0;
+
+ protected:
+  virtual ~DataChannelProviderInterface() {}
+};
+
+struct InternalDataChannelInit : public DataChannelInit {
+  enum OpenHandshakeRole {
+    kOpener,
+    kAcker,
+    kNone
+  };
+  // The default role is kOpener because the default |negotiated| is false.
+  InternalDataChannelInit() : open_handshake_role(kOpener) {}
+  explicit InternalDataChannelInit(const DataChannelInit& base)
+      : DataChannelInit(base), open_handshake_role(kOpener) {
+    // If the channel is externally negotiated, do not send the OPEN message.
+    if (base.negotiated) {
+      open_handshake_role = kNone;
+    }
+  }
+
+  OpenHandshakeRole open_handshake_role;
+};
+
+// Helper class to allocate unique IDs for SCTP DataChannels
+class SctpSidAllocator {
+ public:
+  // Gets the first unused odd/even id based on the DTLS role. If |role| is
+  // SSL_CLIENT, the allocated id starts from 0 and takes even numbers;
+  // otherwise, the id starts from 1 and takes odd numbers.
+  // Returns false if no id can be allocated.
+  bool AllocateSid(rtc::SSLRole role, int* sid);
+
+  // Attempts to reserve a specific sid. Returns false if it's unavailable.
+  bool ReserveSid(int sid);
+
+  // Indicates that |sid| isn't in use any more, and is thus available again.
+  void ReleaseSid(int sid);
+
+ private:
+  // Checks if |sid| is available to be assigned to a new SCTP data channel.
+  bool IsSidAvailable(int sid) const;
+
+  std::set<int> used_sids_;
+};
+
+// DataChannel is a an implementation of the DataChannelInterface based on
+// libjingle's data engine. It provides an implementation of unreliable or
+// reliabledata channels. Currently this class is specifically designed to use
+// both RtpDataEngine and SctpDataEngine.
+
+// DataChannel states:
+// kConnecting: The channel has been created the transport might not yet be
+//              ready.
+// kOpen: The channel have a local SSRC set by a call to UpdateSendSsrc
+//        and a remote SSRC set by call to UpdateReceiveSsrc and the transport
+//        has been writable once.
+// kClosing: DataChannelInterface::Close has been called or UpdateReceiveSsrc
+//           has been called with SSRC==0
+// kClosed: Both UpdateReceiveSsrc and UpdateSendSsrc has been called with
+//          SSRC==0.
+class DataChannel : public DataChannelInterface,
+                    public sigslot::has_slots<>,
+                    public rtc::MessageHandler {
+ public:
+  static rtc::scoped_refptr<DataChannel> Create(
+      DataChannelProviderInterface* provider,
+      cricket::DataChannelType dct,
+      const std::string& label,
+      const InternalDataChannelInit& config);
+
+  virtual void RegisterObserver(DataChannelObserver* observer);
+  virtual void UnregisterObserver();
+
+  virtual std::string label() const { return label_; }
+  virtual bool reliable() const;
+  virtual bool ordered() const { return config_.ordered; }
+  virtual uint16_t maxRetransmitTime() const {
+    return config_.maxRetransmitTime;
+  }
+  virtual uint16_t maxRetransmits() const { return config_.maxRetransmits; }
+  virtual std::string protocol() const { return config_.protocol; }
+  virtual bool negotiated() const { return config_.negotiated; }
+  virtual int id() const { return config_.id; }
+  virtual uint64_t buffered_amount() const;
+  virtual void Close();
+  virtual DataState state() const { return state_; }
+  virtual bool Send(const DataBuffer& buffer);
+
+  // rtc::MessageHandler override.
+  virtual void OnMessage(rtc::Message* msg);
+
+  // Called when the channel's ready to use.  That can happen when the
+  // underlying DataMediaChannel becomes ready, or when this channel is a new
+  // stream on an existing DataMediaChannel, and we've finished negotiation.
+  void OnChannelReady(bool writable);
+
+  // Sigslots from cricket::DataChannel
+  void OnDataReceived(cricket::DataChannel* channel,
+                      const cricket::ReceiveDataParams& params,
+                      const rtc::Buffer& payload);
+  void OnStreamClosedRemotely(uint32_t sid);
+
+  // The remote peer request that this channel should be closed.
+  void RemotePeerRequestClose();
+
+  // The following methods are for SCTP only.
+
+  // Sets the SCTP sid and adds to transport layer if not set yet. Should only
+  // be called once.
+  void SetSctpSid(int sid);
+  // Called when the transport channel is created.
+  // Only needs to be called for SCTP data channels.
+  void OnTransportChannelCreated();
+  // Called when the transport channel is destroyed.
+  void OnTransportChannelDestroyed();
+
+  // The following methods are for RTP only.
+
+  // Set the SSRC this channel should use to send data on the
+  // underlying data engine. |send_ssrc| == 0 means that the channel is no
+  // longer part of the session negotiation.
+  void SetSendSsrc(uint32_t send_ssrc);
+  // Set the SSRC this channel should use to receive data from the
+  // underlying data engine.
+  void SetReceiveSsrc(uint32_t receive_ssrc);
+
+  cricket::DataChannelType data_channel_type() const {
+    return data_channel_type_;
+  }
+
+  // Emitted when state transitions to kClosed.
+  // In the case of SCTP channels, this signal can be used to tell when the
+  // channel's sid is free.
+  sigslot::signal1<DataChannel*> SignalClosed;
+
+ protected:
+  DataChannel(DataChannelProviderInterface* client,
+              cricket::DataChannelType dct,
+              const std::string& label);
+  virtual ~DataChannel();
+
+ private:
+  // A packet queue which tracks the total queued bytes. Queued packets are
+  // owned by this class.
+  class PacketQueue {
+   public:
+    PacketQueue();
+    ~PacketQueue();
+
+    size_t byte_count() const {
+      return byte_count_;
+    }
+
+    bool Empty() const;
+
+    DataBuffer* Front();
+
+    void Pop();
+
+    void Push(DataBuffer* packet);
+
+    void Clear();
+
+    void Swap(PacketQueue* other);
+
+   private:
+    std::deque<DataBuffer*> packets_;
+    size_t byte_count_;
+  };
+
+  // The OPEN(_ACK) signaling state.
+  enum HandshakeState {
+    kHandshakeInit,
+    kHandshakeShouldSendOpen,
+    kHandshakeShouldSendAck,
+    kHandshakeWaitingForAck,
+    kHandshakeReady
+  };
+
+  bool Init(const InternalDataChannelInit& config);
+  void DoClose();
+  void UpdateState();
+  void SetState(DataState state);
+  void DisconnectFromProvider();
+
+  void DeliverQueuedReceivedData();
+
+  void SendQueuedDataMessages();
+  bool SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked);
+  bool QueueSendDataMessage(const DataBuffer& buffer);
+
+  void SendQueuedControlMessages();
+  void QueueControlMessage(const rtc::Buffer& buffer);
+  bool SendControlMessage(const rtc::Buffer& buffer);
+
+  std::string label_;
+  InternalDataChannelInit config_;
+  DataChannelObserver* observer_;
+  DataState state_;
+  cricket::DataChannelType data_channel_type_;
+  DataChannelProviderInterface* provider_;
+  HandshakeState handshake_state_;
+  bool connected_to_provider_;
+  bool send_ssrc_set_;
+  bool receive_ssrc_set_;
+  bool writable_;
+  uint32_t send_ssrc_;
+  uint32_t receive_ssrc_;
+  // Control messages that always have to get sent out before any queued
+  // data.
+  PacketQueue queued_control_data_;
+  PacketQueue queued_received_data_;
+  PacketQueue queued_send_data_;
+};
+
+// Define proxy for DataChannelInterface.
+BEGIN_PROXY_MAP(DataChannel)
+  PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
+  PROXY_METHOD0(void, UnregisterObserver)
+  PROXY_CONSTMETHOD0(std::string, label)
+  PROXY_CONSTMETHOD0(bool, reliable)
+  PROXY_CONSTMETHOD0(bool, ordered)
+  PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+  PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
+  PROXY_CONSTMETHOD0(std::string, protocol)
+  PROXY_CONSTMETHOD0(bool, negotiated)
+  PROXY_CONSTMETHOD0(int, id)
+  PROXY_CONSTMETHOD0(DataState, state)
+  PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
+  PROXY_METHOD0(void, Close)
+  PROXY_METHOD1(bool, Send, const DataBuffer&)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_DATACHANNEL_H_
diff --git a/webrtc/api/datachannel_unittest.cc b/webrtc/api/datachannel_unittest.cc
new file mode 100644
index 0000000..d5711e8
--- /dev/null
+++ b/webrtc/api/datachannel_unittest.cc
@@ -0,0 +1,581 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/datachannel.h"
+#include "webrtc/api/sctputils.h"
+#include "webrtc/api/test/fakedatachannelprovider.h"
+#include "webrtc/base/gunit.h"
+
+using webrtc::DataChannel;
+using webrtc::SctpSidAllocator;
+
+class FakeDataChannelObserver : public webrtc::DataChannelObserver {
+ public:
+  FakeDataChannelObserver()
+      : messages_received_(0),
+        on_state_change_count_(0),
+        on_buffered_amount_change_count_(0) {}
+
+  void OnStateChange() {
+    ++on_state_change_count_;
+  }
+
+  void OnBufferedAmountChange(uint64_t previous_amount) {
+    ++on_buffered_amount_change_count_;
+  }
+
+  void OnMessage(const webrtc::DataBuffer& buffer) {
+    ++messages_received_;
+  }
+
+  size_t messages_received() const {
+    return messages_received_;
+  }
+
+  void ResetOnStateChangeCount() {
+    on_state_change_count_ = 0;
+  }
+
+  void ResetOnBufferedAmountChangeCount() {
+    on_buffered_amount_change_count_ = 0;
+  }
+
+  size_t on_state_change_count() const {
+    return on_state_change_count_;
+  }
+
+  size_t on_buffered_amount_change_count() const {
+    return on_buffered_amount_change_count_;
+  }
+
+ private:
+  size_t messages_received_;
+  size_t on_state_change_count_;
+  size_t on_buffered_amount_change_count_;
+};
+
+class SctpDataChannelTest : public testing::Test {
+ protected:
+  SctpDataChannelTest()
+      : webrtc_data_channel_(
+          DataChannel::Create(
+              &provider_, cricket::DCT_SCTP, "test", init_)) {
+  }
+
+  void SetChannelReady() {
+    provider_.set_transport_available(true);
+    webrtc_data_channel_->OnTransportChannelCreated();
+    if (webrtc_data_channel_->id() < 0) {
+      webrtc_data_channel_->SetSctpSid(0);
+    }
+    provider_.set_ready_to_send(true);
+  }
+
+  void AddObserver() {
+    observer_.reset(new FakeDataChannelObserver());
+    webrtc_data_channel_->RegisterObserver(observer_.get());
+  }
+
+  webrtc::InternalDataChannelInit init_;
+  FakeDataChannelProvider provider_;
+  rtc::scoped_ptr<FakeDataChannelObserver> observer_;
+  rtc::scoped_refptr<DataChannel> webrtc_data_channel_;
+};
+
+// Verifies that the data channel is connected to the transport after creation.
+TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) {
+  provider_.set_transport_available(true);
+  rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+      &provider_, cricket::DCT_SCTP, "test1", init_);
+
+  EXPECT_TRUE(provider_.IsConnected(dc.get()));
+  // The sid is not set yet, so it should not have added the streams.
+  EXPECT_FALSE(provider_.IsSendStreamAdded(dc->id()));
+  EXPECT_FALSE(provider_.IsRecvStreamAdded(dc->id()));
+
+  dc->SetSctpSid(0);
+  EXPECT_TRUE(provider_.IsSendStreamAdded(dc->id()));
+  EXPECT_TRUE(provider_.IsRecvStreamAdded(dc->id()));
+}
+
+// Verifies that the data channel is connected to the transport if the transport
+// is not available initially and becomes available later.
+TEST_F(SctpDataChannelTest, ConnectedAfterTransportBecomesAvailable) {
+  EXPECT_FALSE(provider_.IsConnected(webrtc_data_channel_.get()));
+
+  provider_.set_transport_available(true);
+  webrtc_data_channel_->OnTransportChannelCreated();
+  EXPECT_TRUE(provider_.IsConnected(webrtc_data_channel_.get()));
+}
+
+// Tests the state of the data channel.
+TEST_F(SctpDataChannelTest, StateTransition) {
+  EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+            webrtc_data_channel_->state());
+  SetChannelReady();
+
+  EXPECT_EQ(webrtc::DataChannelInterface::kOpen, webrtc_data_channel_->state());
+  webrtc_data_channel_->Close();
+  EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+            webrtc_data_channel_->state());
+  // Verifies that it's disconnected from the transport.
+  EXPECT_FALSE(provider_.IsConnected(webrtc_data_channel_.get()));
+}
+
+// Tests that DataChannel::buffered_amount() is correct after the channel is
+// blocked.
+TEST_F(SctpDataChannelTest, BufferedAmountWhenBlocked) {
+  AddObserver();
+  SetChannelReady();
+  webrtc::DataBuffer buffer("abcd");
+  EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+
+  EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+  EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
+
+  provider_.set_send_blocked(true);
+
+  const int number_of_packets = 3;
+  for (int i = 0; i < number_of_packets; ++i) {
+    EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+  }
+  EXPECT_EQ(buffer.data.size() * number_of_packets,
+            webrtc_data_channel_->buffered_amount());
+  EXPECT_EQ(number_of_packets, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that the queued data are sent when the channel transitions from blocked
+// to unblocked.
+TEST_F(SctpDataChannelTest, QueuedDataSentWhenUnblocked) {
+  AddObserver();
+  SetChannelReady();
+  webrtc::DataBuffer buffer("abcd");
+  provider_.set_send_blocked(true);
+  EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+
+  EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
+  provider_.set_send_blocked(false);
+  SetChannelReady();
+  EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+  EXPECT_EQ(2U, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that no crash when the channel is blocked right away while trying to
+// send queued data.
+TEST_F(SctpDataChannelTest, BlockedWhenSendQueuedDataNoCrash) {
+  AddObserver();
+  SetChannelReady();
+  webrtc::DataBuffer buffer("abcd");
+  provider_.set_send_blocked(true);
+  EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+  EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
+  // Set channel ready while it is still blocked.
+  SetChannelReady();
+  EXPECT_EQ(buffer.size(), webrtc_data_channel_->buffered_amount());
+  EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
+  // Unblock the channel to send queued data again, there should be no crash.
+  provider_.set_send_blocked(false);
+  SetChannelReady();
+  EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+  EXPECT_EQ(2U, observer_->on_buffered_amount_change_count());
+}
+
+// Tests that the queued control message is sent when channel is ready.
+TEST_F(SctpDataChannelTest, OpenMessageSent) {
+  // Initially the id is unassigned.
+  EXPECT_EQ(-1, webrtc_data_channel_->id());
+
+  SetChannelReady();
+  EXPECT_GE(webrtc_data_channel_->id(), 0);
+  EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+  EXPECT_EQ(provider_.last_send_data_params().ssrc,
+            static_cast<uint32_t>(webrtc_data_channel_->id()));
+}
+
+TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
+  provider_.set_send_blocked(true);
+  SetChannelReady();
+  provider_.set_send_blocked(false);
+
+  EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+  EXPECT_EQ(provider_.last_send_data_params().ssrc,
+            static_cast<uint32_t>(webrtc_data_channel_->id()));
+}
+
+// Tests that the DataChannel created after transport gets ready can enter OPEN
+// state.
+TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) {
+  SetChannelReady();
+  webrtc::InternalDataChannelInit init;
+  init.id = 1;
+  rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+      &provider_, cricket::DCT_SCTP, "test1", init);
+  EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, dc->state());
+  EXPECT_TRUE_WAIT(webrtc::DataChannelInterface::kOpen == dc->state(),
+                   1000);
+}
+
+// Tests that an unordered DataChannel sends data as ordered until the OPEN_ACK
+// message is received.
+TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) {
+  SetChannelReady();
+  webrtc::InternalDataChannelInit init;
+  init.id = 1;
+  init.ordered = false;
+  rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+      &provider_, cricket::DCT_SCTP, "test1", init);
+
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+
+  // Sends a message and verifies it's ordered.
+  webrtc::DataBuffer buffer("some data");
+  ASSERT_TRUE(dc->Send(buffer));
+  EXPECT_TRUE(provider_.last_send_data_params().ordered);
+
+  // Emulates receiving an OPEN_ACK message.
+  cricket::ReceiveDataParams params;
+  params.ssrc = init.id;
+  params.type = cricket::DMT_CONTROL;
+  rtc::Buffer payload;
+  webrtc::WriteDataChannelOpenAckMessage(&payload);
+  dc->OnDataReceived(NULL, params, payload);
+
+  // Sends another message and verifies it's unordered.
+  ASSERT_TRUE(dc->Send(buffer));
+  EXPECT_FALSE(provider_.last_send_data_params().ordered);
+}
+
+// Tests that an unordered DataChannel sends unordered data after any DATA
+// message is received.
+TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
+  SetChannelReady();
+  webrtc::InternalDataChannelInit init;
+  init.id = 1;
+  init.ordered = false;
+  rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+      &provider_, cricket::DCT_SCTP, "test1", init);
+
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+
+  // Emulates receiving a DATA message.
+  cricket::ReceiveDataParams params;
+  params.ssrc = init.id;
+  params.type = cricket::DMT_TEXT;
+  webrtc::DataBuffer buffer("data");
+  dc->OnDataReceived(NULL, params, buffer.data);
+
+  // Sends a message and verifies it's unordered.
+  ASSERT_TRUE(dc->Send(buffer));
+  EXPECT_FALSE(provider_.last_send_data_params().ordered);
+}
+
+// Tests that the channel can't open until it's successfully sent the OPEN
+// message.
+TEST_F(SctpDataChannelTest, OpenWaitsForOpenMesssage) {
+  webrtc::DataBuffer buffer("foo");
+
+  provider_.set_send_blocked(true);
+  SetChannelReady();
+  EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+            webrtc_data_channel_->state());
+  provider_.set_send_blocked(false);
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
+                 webrtc_data_channel_->state(), 1000);
+  EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+}
+
+// Tests that close first makes sure all queued data gets sent.
+TEST_F(SctpDataChannelTest, QueuedCloseFlushes) {
+  webrtc::DataBuffer buffer("foo");
+
+  provider_.set_send_blocked(true);
+  SetChannelReady();
+  EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+            webrtc_data_channel_->state());
+  provider_.set_send_blocked(false);
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
+                 webrtc_data_channel_->state(), 1000);
+  provider_.set_send_blocked(true);
+  webrtc_data_channel_->Send(buffer);
+  webrtc_data_channel_->Close();
+  provider_.set_send_blocked(false);
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed,
+                 webrtc_data_channel_->state(), 1000);
+  EXPECT_EQ(cricket::DMT_TEXT, provider_.last_send_data_params().type);
+}
+
+// Tests that messages are sent with the right ssrc.
+TEST_F(SctpDataChannelTest, SendDataSsrc) {
+  webrtc_data_channel_->SetSctpSid(1);
+  SetChannelReady();
+  webrtc::DataBuffer buffer("data");
+  EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+  EXPECT_EQ(1U, provider_.last_send_data_params().ssrc);
+}
+
+// Tests that the incoming messages with wrong ssrcs are rejected.
+TEST_F(SctpDataChannelTest, ReceiveDataWithInvalidSsrc) {
+  webrtc_data_channel_->SetSctpSid(1);
+  SetChannelReady();
+
+  AddObserver();
+
+  cricket::ReceiveDataParams params;
+  params.ssrc = 0;
+  webrtc::DataBuffer buffer("abcd");
+  webrtc_data_channel_->OnDataReceived(NULL, params, buffer.data);
+
+  EXPECT_EQ(0U, observer_->messages_received());
+}
+
+// Tests that the incoming messages with right ssrcs are acceted.
+TEST_F(SctpDataChannelTest, ReceiveDataWithValidSsrc) {
+  webrtc_data_channel_->SetSctpSid(1);
+  SetChannelReady();
+
+  AddObserver();
+
+  cricket::ReceiveDataParams params;
+  params.ssrc = 1;
+  webrtc::DataBuffer buffer("abcd");
+
+  webrtc_data_channel_->OnDataReceived(NULL, params, buffer.data);
+  EXPECT_EQ(1U, observer_->messages_received());
+}
+
+// Tests that no CONTROL message is sent if the datachannel is negotiated and
+// not created from an OPEN message.
+TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) {
+  webrtc::InternalDataChannelInit config;
+  config.id = 1;
+  config.negotiated = true;
+  config.open_handshake_role = webrtc::InternalDataChannelInit::kNone;
+
+  SetChannelReady();
+  rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+      &provider_, cricket::DCT_SCTP, "test1", config);
+
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+  EXPECT_EQ(0U, provider_.last_send_data_params().ssrc);
+}
+
+// Tests that OPEN_ACK message is sent if the datachannel is created from an
+// OPEN message.
+TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) {
+  webrtc::InternalDataChannelInit config;
+  config.id = 1;
+  config.negotiated = true;
+  config.open_handshake_role = webrtc::InternalDataChannelInit::kAcker;
+
+  SetChannelReady();
+  rtc::scoped_refptr<DataChannel> dc = DataChannel::Create(
+      &provider_, cricket::DCT_SCTP, "test1", config);
+
+  EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, dc->state(), 1000);
+
+  EXPECT_EQ(static_cast<unsigned int>(config.id),
+            provider_.last_send_data_params().ssrc);
+  EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+}
+
+// Tests the OPEN_ACK role assigned by InternalDataChannelInit.
+TEST_F(SctpDataChannelTest, OpenAckRoleInitialization) {
+  webrtc::InternalDataChannelInit init;
+  EXPECT_EQ(webrtc::InternalDataChannelInit::kOpener, init.open_handshake_role);
+  EXPECT_FALSE(init.negotiated);
+
+  webrtc::DataChannelInit base;
+  base.negotiated = true;
+  webrtc::InternalDataChannelInit init2(base);
+  EXPECT_EQ(webrtc::InternalDataChannelInit::kNone, init2.open_handshake_role);
+}
+
+// Tests that the DataChannel is closed if the sending buffer is full.
+TEST_F(SctpDataChannelTest, ClosedWhenSendBufferFull) {
+  SetChannelReady();
+
+  rtc::Buffer buffer(1024);
+  memset(buffer.data(), 0, buffer.size());
+
+  webrtc::DataBuffer packet(buffer, true);
+  provider_.set_send_blocked(true);
+
+  for (size_t i = 0; i < 16 * 1024 + 1; ++i) {
+    EXPECT_TRUE(webrtc_data_channel_->Send(packet));
+  }
+
+  EXPECT_TRUE(
+      webrtc::DataChannelInterface::kClosed == webrtc_data_channel_->state() ||
+      webrtc::DataChannelInterface::kClosing == webrtc_data_channel_->state());
+}
+
+// Tests that the DataChannel is closed on transport errors.
+TEST_F(SctpDataChannelTest, ClosedOnTransportError) {
+  SetChannelReady();
+  webrtc::DataBuffer buffer("abcd");
+  provider_.set_transport_error();
+
+  EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+
+  EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+            webrtc_data_channel_->state());
+}
+
+// Tests that a already closed DataChannel does not fire onStateChange again.
+TEST_F(SctpDataChannelTest, ClosedDataChannelDoesNotFireOnStateChange) {
+  AddObserver();
+  webrtc_data_channel_->Close();
+  // OnStateChange called for kClosing and kClosed.
+  EXPECT_EQ(2U, observer_->on_state_change_count());
+
+  observer_->ResetOnStateChangeCount();
+  webrtc_data_channel_->RemotePeerRequestClose();
+  EXPECT_EQ(0U, observer_->on_state_change_count());
+}
+
+// Tests that RemotePeerRequestClose closes the local DataChannel.
+TEST_F(SctpDataChannelTest, RemotePeerRequestClose) {
+  AddObserver();
+  webrtc_data_channel_->RemotePeerRequestClose();
+
+  // OnStateChange called for kClosing and kClosed.
+  EXPECT_EQ(2U, observer_->on_state_change_count());
+  EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+            webrtc_data_channel_->state());
+}
+
+// Tests that the DataChannel is closed if the received buffer is full.
+TEST_F(SctpDataChannelTest, ClosedWhenReceivedBufferFull) {
+  SetChannelReady();
+  rtc::Buffer buffer(1024);
+  memset(buffer.data(), 0, buffer.size());
+
+  cricket::ReceiveDataParams params;
+  params.ssrc = 0;
+
+  // Receiving data without having an observer will overflow the buffer.
+  for (size_t i = 0; i < 16 * 1024 + 1; ++i) {
+    webrtc_data_channel_->OnDataReceived(NULL, params, buffer);
+  }
+  EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
+            webrtc_data_channel_->state());
+}
+
+// Tests that sending empty data returns no error and keeps the channel open.
+TEST_F(SctpDataChannelTest, SendEmptyData) {
+  webrtc_data_channel_->SetSctpSid(1);
+  SetChannelReady();
+  EXPECT_EQ(webrtc::DataChannelInterface::kOpen,
+            webrtc_data_channel_->state());
+
+  webrtc::DataBuffer buffer("");
+  EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+  EXPECT_EQ(webrtc::DataChannelInterface::kOpen,
+            webrtc_data_channel_->state());
+}
+
+// Tests that a channel can be closed without being opened or assigned an sid.
+TEST_F(SctpDataChannelTest, NeverOpened) {
+  provider_.set_transport_available(true);
+  webrtc_data_channel_->OnTransportChannelCreated();
+  webrtc_data_channel_->Close();
+}
+
+class SctpSidAllocatorTest : public testing::Test {
+ protected:
+  SctpSidAllocator allocator_;
+};
+
+// Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for
+// SSL_SERVER.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationBasedOnRole) {
+  int id;
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &id));
+  EXPECT_EQ(1, id);
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &id));
+  EXPECT_EQ(0, id);
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &id));
+  EXPECT_EQ(3, id);
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &id));
+  EXPECT_EQ(2, id);
+}
+
+// Verifies that SCTP ids of existing DataChannels are not reused.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationNoReuse) {
+  int old_id = 1;
+  EXPECT_TRUE(allocator_.ReserveSid(old_id));
+
+  int new_id;
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &new_id));
+  EXPECT_NE(old_id, new_id);
+
+  old_id = 0;
+  EXPECT_TRUE(allocator_.ReserveSid(old_id));
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &new_id));
+  EXPECT_NE(old_id, new_id);
+}
+
+// Verifies that SCTP ids of removed DataChannels can be reused.
+TEST_F(SctpSidAllocatorTest, SctpIdReusedForRemovedDataChannel) {
+  int odd_id = 1;
+  int even_id = 0;
+  EXPECT_TRUE(allocator_.ReserveSid(odd_id));
+  EXPECT_TRUE(allocator_.ReserveSid(even_id));
+
+  int allocated_id = -1;
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id + 2, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id + 2, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id + 4, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id + 4, allocated_id);
+
+  allocator_.ReleaseSid(odd_id);
+  allocator_.ReleaseSid(even_id);
+
+  // Verifies that removed ids are reused.
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id, allocated_id);
+
+  // Verifies that used higher ids are not reused.
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id + 6, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id + 6, allocated_id);
+}
diff --git a/webrtc/api/datachannelinterface.h b/webrtc/api/datachannelinterface.h
new file mode 100644
index 0000000..e291328
--- /dev/null
+++ b/webrtc/api/datachannelinterface.h
@@ -0,0 +1,159 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for DataChannels
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcdatachannel
+
+#ifndef WEBRTC_API_DATACHANNELINTERFACE_H_
+#define WEBRTC_API_DATACHANNELINTERFACE_H_
+
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/refcount.h"
+
+
+namespace webrtc {
+
+struct DataChannelInit {
+  DataChannelInit()
+      : reliable(false),
+        ordered(true),
+        maxRetransmitTime(-1),
+        maxRetransmits(-1),
+        negotiated(false),
+        id(-1) {
+  }
+
+  bool reliable;           // Deprecated.
+  bool ordered;            // True if ordered delivery is required.
+  int maxRetransmitTime;   // The max period of time in milliseconds in which
+                           // retransmissions will be sent.  After this time, no
+                           // more retransmissions will be sent. -1 if unset.
+  int maxRetransmits;      // The max number of retransmissions. -1 if unset.
+  std::string protocol;    // This is set by the application and opaque to the
+                           // WebRTC implementation.
+  bool negotiated;         // True if the channel has been externally negotiated
+                           // and we do not send an in-band signalling in the
+                           // form of an "open" message.
+  int id;                  // The stream id, or SID, for SCTP data channels. -1
+                           // if unset.
+};
+
+struct DataBuffer {
+  DataBuffer(const rtc::Buffer& data, bool binary)
+      : data(data),
+        binary(binary) {
+  }
+  // For convenience for unit tests.
+  explicit DataBuffer(const std::string& text)
+      : data(text.data(), text.length()),
+        binary(false) {
+  }
+  size_t size() const { return data.size(); }
+
+  rtc::Buffer data;
+  // Indicates if the received data contains UTF-8 or binary data.
+  // Note that the upper layers are left to verify the UTF-8 encoding.
+  // TODO(jiayl): prefer to use an enum instead of a bool.
+  bool binary;
+};
+
+class DataChannelObserver {
+ public:
+  // The data channel state have changed.
+  virtual void OnStateChange() = 0;
+  //  A data buffer was successfully received.
+  virtual void OnMessage(const DataBuffer& buffer) = 0;
+  // The data channel's buffered_amount has changed.
+  virtual void OnBufferedAmountChange(uint64_t previous_amount){};
+
+ protected:
+  virtual ~DataChannelObserver() {}
+};
+
+class DataChannelInterface : public rtc::RefCountInterface {
+ public:
+  // Keep in sync with DataChannel.java:State and
+  // RTCDataChannel.h:RTCDataChannelState.
+  enum DataState {
+    kConnecting,
+    kOpen,  // The DataChannel is ready to send data.
+    kClosing,
+    kClosed
+  };
+
+  static const char* DataStateString(DataState state) {
+    switch (state) {
+      case kConnecting:
+        return "connecting";
+      case kOpen:
+        return "open";
+      case kClosing:
+        return "closing";
+      case kClosed:
+        return "closed";
+    }
+    RTC_CHECK(false) << "Unknown DataChannel state: " << state;
+    return "";
+  }
+
+  virtual void RegisterObserver(DataChannelObserver* observer) = 0;
+  virtual void UnregisterObserver() = 0;
+  // The label attribute represents a label that can be used to distinguish this
+  // DataChannel object from other DataChannel objects.
+  virtual std::string label() const = 0;
+  virtual bool reliable() const = 0;
+
+  // TODO(tommyw): Remove these dummy implementations when all classes have
+  // implemented these APIs. They should all just return the values the
+  // DataChannel was created with.
+  virtual bool ordered() const { return false; }
+  virtual uint16_t maxRetransmitTime() const { return 0; }
+  virtual uint16_t maxRetransmits() const { return 0; }
+  virtual std::string protocol() const { return std::string(); }
+  virtual bool negotiated() const { return false; }
+
+  virtual int id() const = 0;
+  virtual DataState state() const = 0;
+  // The buffered_amount returns the number of bytes of application data
+  // (UTF-8 text and binary data) that have been queued using SendBuffer but
+  // have not yet been transmitted to the network.
+  virtual uint64_t buffered_amount() const = 0;
+  virtual void Close() = 0;
+  // Sends |data| to the remote peer.
+  virtual bool Send(const DataBuffer& buffer) = 0;
+
+ protected:
+  virtual ~DataChannelInterface() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_DATACHANNELINTERFACE_H_
diff --git a/webrtc/api/dtlsidentitystore.cc b/webrtc/api/dtlsidentitystore.cc
new file mode 100644
index 0000000..79c2075
--- /dev/null
+++ b/webrtc/api/dtlsidentitystore.cc
@@ -0,0 +1,250 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/dtlsidentitystore.h"
+
+#include <utility>
+
+#include "webrtc/api/webrtcsessiondescriptionfactory.h"
+#include "webrtc/base/logging.h"
+
+using webrtc::DtlsIdentityRequestObserver;
+
+namespace webrtc {
+
+// Passed to SSLIdentity::Generate, "WebRTC". Used for the certificates'
+// subject and issuer name.
+const char kIdentityName[] = "WebRTC";
+
+namespace {
+
+enum {
+  MSG_DESTROY,
+  MSG_GENERATE_IDENTITY,
+  MSG_GENERATE_IDENTITY_RESULT
+};
+
+}  // namespace
+
+// This class runs on the worker thread to generate the identity. It's necessary
+// to separate this class from DtlsIdentityStore so that it can live on the
+// worker thread after DtlsIdentityStore is destroyed.
+class DtlsIdentityStoreImpl::WorkerTask : public sigslot::has_slots<>,
+                                          public rtc::MessageHandler {
+ public:
+  WorkerTask(DtlsIdentityStoreImpl* store, rtc::KeyType key_type)
+      : signaling_thread_(rtc::Thread::Current()),
+        store_(store),
+        key_type_(key_type) {
+    store_->SignalDestroyed.connect(this, &WorkerTask::OnStoreDestroyed);
+  }
+
+  virtual ~WorkerTask() { RTC_DCHECK(signaling_thread_->IsCurrent()); }
+
+ private:
+  void GenerateIdentity_w() {
+    LOG(LS_INFO) << "Generating identity, using keytype " << key_type_;
+    rtc::scoped_ptr<rtc::SSLIdentity> identity(
+        rtc::SSLIdentity::Generate(kIdentityName, key_type_));
+
+    // Posting to |this| avoids touching |store_| on threads other than
+    // |signaling_thread_| and thus avoids having to use locks.
+    IdentityResultMessageData* msg = new IdentityResultMessageData(
+        new IdentityResult(key_type_, std::move(identity)));
+    signaling_thread_->Post(this, MSG_GENERATE_IDENTITY_RESULT, msg);
+  }
+
+  void OnMessage(rtc::Message* msg) override {
+    switch (msg->message_id) {
+      case MSG_GENERATE_IDENTITY:
+        // This message always runs on the worker thread.
+        GenerateIdentity_w();
+
+        // Must delete |this|, owned by msg->pdata, on the signaling thread to
+        // avoid races on disconnecting the signal.
+        signaling_thread_->Post(this, MSG_DESTROY, msg->pdata);
+        break;
+      case MSG_GENERATE_IDENTITY_RESULT:
+        RTC_DCHECK(signaling_thread_->IsCurrent());
+        {
+          rtc::scoped_ptr<IdentityResultMessageData> pdata(
+              static_cast<IdentityResultMessageData*>(msg->pdata));
+          if (store_) {
+            store_->OnIdentityGenerated(pdata->data()->key_type_,
+                                        std::move(pdata->data()->identity_));
+          }
+        }
+        break;
+      case MSG_DESTROY:
+        RTC_DCHECK(signaling_thread_->IsCurrent());
+        delete msg->pdata;
+        // |this| has now been deleted. Don't touch member variables.
+        break;
+      default:
+        RTC_CHECK(false) << "Unexpected message type";
+    }
+  }
+
+  void OnStoreDestroyed() {
+    RTC_DCHECK(signaling_thread_->IsCurrent());
+    store_ = nullptr;
+  }
+
+  rtc::Thread* const signaling_thread_;
+  DtlsIdentityStoreImpl* store_;  // Only touched on |signaling_thread_|.
+  const rtc::KeyType key_type_;
+};
+
+DtlsIdentityStoreImpl::DtlsIdentityStoreImpl(rtc::Thread* signaling_thread,
+                                             rtc::Thread* worker_thread)
+    : signaling_thread_(signaling_thread),
+      worker_thread_(worker_thread),
+      request_info_() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  // Preemptively generate identities unless the worker thread and signaling
+  // thread are the same (only do preemptive work in the background).
+  if (worker_thread_ != signaling_thread_) {
+    // Only necessary for RSA.
+    GenerateIdentity(rtc::KT_RSA, nullptr);
+  }
+}
+
+DtlsIdentityStoreImpl::~DtlsIdentityStoreImpl() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  SignalDestroyed();
+}
+
+void DtlsIdentityStoreImpl::RequestIdentity(
+    rtc::KeyType key_type,
+    const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>& observer) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  RTC_DCHECK(observer);
+
+  GenerateIdentity(key_type, observer);
+}
+
+void DtlsIdentityStoreImpl::OnMessage(rtc::Message* msg) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  switch (msg->message_id) {
+    case MSG_GENERATE_IDENTITY_RESULT: {
+      rtc::scoped_ptr<IdentityResultMessageData> pdata(
+          static_cast<IdentityResultMessageData*>(msg->pdata));
+      OnIdentityGenerated(pdata->data()->key_type_,
+                          std::move(pdata->data()->identity_));
+      break;
+    }
+  }
+}
+
+bool DtlsIdentityStoreImpl::HasFreeIdentityForTesting(
+    rtc::KeyType key_type) const {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  return request_info_[key_type].free_identity_.get() != nullptr;
+}
+
+void DtlsIdentityStoreImpl::GenerateIdentity(
+    rtc::KeyType key_type,
+    const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>& observer) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+
+  // Enqueue observer to be informed when generation of |key_type| is completed.
+  if (observer.get()) {
+    request_info_[key_type].request_observers_.push(observer);
+
+    // Already have a free identity generated?
+    if (request_info_[key_type].free_identity_.get()) {
+      // Return identity async - post even though we are on |signaling_thread_|.
+      LOG(LS_VERBOSE) << "Using a free DTLS identity.";
+      ++request_info_[key_type].gen_in_progress_counts_;
+      IdentityResultMessageData* msg =
+          new IdentityResultMessageData(new IdentityResult(
+              key_type, std::move(request_info_[key_type].free_identity_)));
+      signaling_thread_->Post(this, MSG_GENERATE_IDENTITY_RESULT, msg);
+      return;
+    }
+
+    // Free identity in the process of being generated?
+    if (request_info_[key_type].gen_in_progress_counts_ ==
+            request_info_[key_type].request_observers_.size()) {
+      // No need to do anything, the free identity will be returned to the
+      // observer in a MSG_GENERATE_IDENTITY_RESULT.
+      return;
+    }
+  }
+
+  // Enqueue/Post a worker task to do the generation.
+  ++request_info_[key_type].gen_in_progress_counts_;
+  WorkerTask* task = new WorkerTask(this, key_type);  // Post 1 task/request.
+  // The WorkerTask is owned by the message data to make sure it will not be
+  // leaked even if the task does not get run.
+  WorkerTaskMessageData* msg = new WorkerTaskMessageData(task);
+  worker_thread_->Post(task, MSG_GENERATE_IDENTITY, msg);
+}
+
+void DtlsIdentityStoreImpl::OnIdentityGenerated(
+    rtc::KeyType key_type, rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+
+  RTC_DCHECK(request_info_[key_type].gen_in_progress_counts_);
+  --request_info_[key_type].gen_in_progress_counts_;
+
+  rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver> observer;
+  if (!request_info_[key_type].request_observers_.empty()) {
+    observer = request_info_[key_type].request_observers_.front();
+    request_info_[key_type].request_observers_.pop();
+  }
+
+  if (observer.get() == nullptr) {
+    // No observer - store result in |free_identities_|.
+    RTC_DCHECK(!request_info_[key_type].free_identity_.get());
+    request_info_[key_type].free_identity_.swap(identity);
+    if (request_info_[key_type].free_identity_.get())
+      LOG(LS_VERBOSE) << "A free DTLS identity was saved.";
+    else
+      LOG(LS_WARNING) << "Failed to generate DTLS identity (preemptively).";
+  } else {
+    // Return the result to the observer.
+    if (identity.get()) {
+      LOG(LS_VERBOSE) << "A DTLS identity is returned to an observer.";
+      observer->OnSuccess(std::move(identity));
+    } else {
+      LOG(LS_WARNING) << "Failed to generate DTLS identity.";
+      observer->OnFailure(0);
+    }
+
+    // Preemptively generate another identity of the same type?
+    if (worker_thread_ != signaling_thread_ && // Only do in background thread.
+        key_type == rtc::KT_RSA &&             // Only necessary for RSA.
+        !request_info_[key_type].free_identity_.get() &&
+        request_info_[key_type].request_observers_.size() <=
+            request_info_[key_type].gen_in_progress_counts_) {
+      GenerateIdentity(key_type, nullptr);
+    }
+  }
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/dtlsidentitystore.h b/webrtc/api/dtlsidentitystore.h
new file mode 100644
index 0000000..9313b996
--- /dev/null
+++ b/webrtc/api/dtlsidentitystore.h
@@ -0,0 +1,165 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_DTLSIDENTITYSTORE_H_
+#define WEBRTC_API_DTLSIDENTITYSTORE_H_
+
+#include <queue>
+#include <string>
+#include <utility>
+
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+// Passed to SSLIdentity::Generate.
+extern const char kIdentityName[];
+
+class SSLIdentity;
+class Thread;
+
+// Used to receive callbacks of DTLS identity requests.
+class DtlsIdentityRequestObserver : public rtc::RefCountInterface {
+ public:
+  virtual void OnFailure(int error) = 0;
+  // TODO(hbos): Unify the OnSuccess method once Chrome code is updated.
+  virtual void OnSuccess(const std::string& der_cert,
+                         const std::string& der_private_key) = 0;
+  // |identity| is a scoped_ptr because rtc::SSLIdentity is not copyable and the
+  // client has to get the ownership of the object to make use of it.
+  virtual void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) = 0;
+
+ protected:
+  virtual ~DtlsIdentityRequestObserver() {}
+};
+
+// This interface defines an in-memory DTLS identity store, which generates DTLS
+// identities.
+// APIs calls must be made on the signaling thread and the callbacks are also
+// called on the signaling thread.
+class DtlsIdentityStoreInterface {
+ public:
+  virtual ~DtlsIdentityStoreInterface() { }
+
+  // The |observer| will be called when the requested identity is ready, or when
+  // identity generation fails.
+  // TODO(torbjorng,hbos): The following RequestIdentity is about to be removed,
+  // see below todo.
+  virtual void RequestIdentity(
+      rtc::KeyType key_type,
+      const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) {
+    // Add default parameterization.
+    RequestIdentity(rtc::KeyParams(key_type), observer);
+  }
+  // TODO(torbjorng,hbos): Parameterized key types! The following
+  // RequestIdentity should replace the old one that takes rtc::KeyType. When
+  // the new one is implemented by Chromium and WebRTC the old one should be
+  // removed. crbug.com/544902, webrtc:5092.
+  virtual void RequestIdentity(
+      rtc::KeyParams key_params,
+      const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) {
+    // Drop parameterization.
+    RequestIdentity(key_params.type(), observer);
+  }
+};
+
+// The WebRTC default implementation of DtlsIdentityStoreInterface.
+// Identity generation is performed on the worker thread.
+class DtlsIdentityStoreImpl : public DtlsIdentityStoreInterface,
+                              public rtc::MessageHandler {
+ public:
+  // This will start to preemptively generating an RSA identity in the
+  // background if the worker thread is not the same as the signaling thread.
+  DtlsIdentityStoreImpl(rtc::Thread* signaling_thread,
+                        rtc::Thread* worker_thread);
+  ~DtlsIdentityStoreImpl() override;
+
+  // DtlsIdentityStoreInterface override;
+  void RequestIdentity(
+      rtc::KeyType key_type,
+      const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) override;
+
+  // rtc::MessageHandler override;
+  void OnMessage(rtc::Message* msg) override;
+
+  // Returns true if there is a free RSA identity, used for unit tests.
+  bool HasFreeIdentityForTesting(rtc::KeyType key_type) const;
+
+ private:
+  void GenerateIdentity(
+      rtc::KeyType key_type,
+      const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer);
+  void OnIdentityGenerated(rtc::KeyType key_type,
+                           rtc::scoped_ptr<rtc::SSLIdentity> identity);
+
+  class WorkerTask;
+  typedef rtc::ScopedMessageData<DtlsIdentityStoreImpl::WorkerTask>
+      WorkerTaskMessageData;
+
+  // A key type-identity pair.
+  struct IdentityResult {
+    IdentityResult(rtc::KeyType key_type,
+                   rtc::scoped_ptr<rtc::SSLIdentity> identity)
+        : key_type_(key_type), identity_(std::move(identity)) {}
+
+    rtc::KeyType key_type_;
+    rtc::scoped_ptr<rtc::SSLIdentity> identity_;
+  };
+
+  typedef rtc::ScopedMessageData<IdentityResult> IdentityResultMessageData;
+
+  sigslot::signal0<> SignalDestroyed;
+
+  rtc::Thread* const signaling_thread_;
+  // TODO(hbos): RSA generation is slow and would be VERY slow if we switch over
+  // to 2048, DtlsIdentityStore should use a new thread and not the "general
+  // purpose" worker thread.
+  rtc::Thread* const worker_thread_;
+
+  struct RequestInfo {
+    RequestInfo()
+        : request_observers_(), gen_in_progress_counts_(0), free_identity_() {}
+
+    std::queue<rtc::scoped_refptr<DtlsIdentityRequestObserver>>
+        request_observers_;
+    size_t gen_in_progress_counts_;
+    rtc::scoped_ptr<rtc::SSLIdentity> free_identity_;
+  };
+
+  // One RequestInfo per KeyType. Only touch on the |signaling_thread_|.
+  RequestInfo request_info_[rtc::KT_LAST];
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_DTLSIDENTITYSTORE_H_
diff --git a/webrtc/api/dtlsidentitystore_unittest.cc b/webrtc/api/dtlsidentitystore_unittest.cc
new file mode 100644
index 0000000..f96cf57
--- /dev/null
+++ b/webrtc/api/dtlsidentitystore_unittest.cc
@@ -0,0 +1,152 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/dtlsidentitystore.h"
+
+#include "webrtc/api/webrtcsessiondescriptionfactory.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/ssladapter.h"
+
+using webrtc::DtlsIdentityStoreImpl;
+
+static const int kTimeoutMs = 10000;
+
+class MockDtlsIdentityRequestObserver :
+    public webrtc::DtlsIdentityRequestObserver {
+ public:
+  MockDtlsIdentityRequestObserver()
+      : call_back_called_(false), last_request_success_(false) {}
+  void OnFailure(int error) override {
+    EXPECT_FALSE(call_back_called_);
+    call_back_called_ = true;
+    last_request_success_ = false;
+  }
+  void OnSuccess(const std::string& der_cert,
+                 const std::string& der_private_key) override {
+    LOG(LS_WARNING) << "The string version of OnSuccess is called unexpectedly";
+    EXPECT_TRUE(false);
+  }
+  void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) override {
+    EXPECT_FALSE(call_back_called_);
+    call_back_called_ = true;
+    last_request_success_ = true;
+  }
+
+  void Reset() {
+    call_back_called_ = false;
+    last_request_success_ = false;
+  }
+
+  bool LastRequestSucceeded() const {
+    return call_back_called_ && last_request_success_;
+  }
+
+  bool call_back_called() const {
+    return call_back_called_;
+  }
+
+ private:
+  bool call_back_called_;
+  bool last_request_success_;
+};
+
+class DtlsIdentityStoreTest : public testing::Test {
+ protected:
+  DtlsIdentityStoreTest()
+      : worker_thread_(new rtc::Thread()),
+        store_(new DtlsIdentityStoreImpl(rtc::Thread::Current(),
+                                         worker_thread_.get())),
+        observer_(
+            new rtc::RefCountedObject<MockDtlsIdentityRequestObserver>()) {
+    RTC_CHECK(worker_thread_->Start());
+  }
+  ~DtlsIdentityStoreTest() {}
+
+  static void SetUpTestCase() {
+    rtc::InitializeSSL();
+  }
+  static void TearDownTestCase() {
+    rtc::CleanupSSL();
+  }
+
+  rtc::scoped_ptr<rtc::Thread> worker_thread_;
+  rtc::scoped_ptr<DtlsIdentityStoreImpl> store_;
+  rtc::scoped_refptr<MockDtlsIdentityRequestObserver> observer_;
+};
+
+TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessRSA) {
+  EXPECT_TRUE_WAIT(store_->HasFreeIdentityForTesting(rtc::KT_RSA), kTimeoutMs);
+
+  store_->RequestIdentity(rtc::KT_RSA, observer_.get());
+  EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+
+  EXPECT_TRUE_WAIT(store_->HasFreeIdentityForTesting(rtc::KT_RSA), kTimeoutMs);
+
+  observer_->Reset();
+
+  // Verifies that the callback is async when a free identity is ready.
+  store_->RequestIdentity(rtc::KT_RSA, observer_.get());
+  EXPECT_FALSE(observer_->call_back_called());
+  EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+}
+
+TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessECDSA) {
+  // Since store currently does not preemptively generate free ECDSA identities
+  // we do not invoke HasFreeIdentityForTesting between requests.
+
+  store_->RequestIdentity(rtc::KT_ECDSA, observer_.get());
+  EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+
+  observer_->Reset();
+
+  // Verifies that the callback is async when a free identity is ready.
+  store_->RequestIdentity(rtc::KT_ECDSA, observer_.get());
+  EXPECT_FALSE(observer_->call_back_called());
+  EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+}
+
+TEST_F(DtlsIdentityStoreTest, DeleteStoreEarlyNoCrashRSA) {
+  EXPECT_FALSE(store_->HasFreeIdentityForTesting(rtc::KT_RSA));
+
+  store_->RequestIdentity(rtc::KT_RSA, observer_.get());
+  store_.reset();
+
+  worker_thread_->Stop();
+  EXPECT_FALSE(observer_->call_back_called());
+}
+
+TEST_F(DtlsIdentityStoreTest, DeleteStoreEarlyNoCrashECDSA) {
+  EXPECT_FALSE(store_->HasFreeIdentityForTesting(rtc::KT_ECDSA));
+
+  store_->RequestIdentity(rtc::KT_ECDSA, observer_.get());
+  store_.reset();
+
+  worker_thread_->Stop();
+  EXPECT_FALSE(observer_->call_back_called());
+}
+
diff --git a/webrtc/api/dtmfsender.cc b/webrtc/api/dtmfsender.cc
new file mode 100644
index 0000000..a10305c
--- /dev/null
+++ b/webrtc/api/dtmfsender.cc
@@ -0,0 +1,255 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/dtmfsender.h"
+
+#include <ctype.h>
+
+#include <string>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+enum {
+  MSG_DO_INSERT_DTMF = 0,
+};
+
+// RFC4733
+//  +-------+--------+------+---------+
+//  | Event | Code   | Type | Volume? |
+//  +-------+--------+------+---------+
+//  | 0--9  | 0--9   | tone | yes     |
+//  | *     | 10     | tone | yes     |
+//  | #     | 11     | tone | yes     |
+//  | A--D  | 12--15 | tone | yes     |
+//  +-------+--------+------+---------+
+// The "," is a special event defined by the WebRTC spec. It means to delay for
+// 2 seconds before processing the next tone. We use -1 as its code.
+static const int kDtmfCodeTwoSecondDelay = -1;
+static const int kDtmfTwoSecondInMs = 2000;
+static const char kDtmfValidTones[] = ",0123456789*#ABCDabcd";
+static const char kDtmfTonesTable[] = ",0123456789*#ABCD";
+// The duration cannot be more than 6000ms or less than 70ms. The gap between
+// tones must be at least 50 ms.
+static const int kDtmfDefaultDurationMs = 100;
+static const int kDtmfMinDurationMs = 70;
+static const int kDtmfMaxDurationMs = 6000;
+static const int kDtmfDefaultGapMs = 50;
+static const int kDtmfMinGapMs = 50;
+
+// Get DTMF code from the DTMF event character.
+bool GetDtmfCode(char tone, int* code) {
+  // Convert a-d to A-D.
+  char event = toupper(tone);
+  const char* p = strchr(kDtmfTonesTable, event);
+  if (!p) {
+    return false;
+  }
+  *code = p - kDtmfTonesTable - 1;
+  return true;
+}
+
+rtc::scoped_refptr<DtmfSender> DtmfSender::Create(
+    AudioTrackInterface* track,
+    rtc::Thread* signaling_thread,
+    DtmfProviderInterface* provider) {
+  if (!track || !signaling_thread) {
+    return NULL;
+  }
+  rtc::scoped_refptr<DtmfSender> dtmf_sender(
+      new rtc::RefCountedObject<DtmfSender>(track, signaling_thread,
+                                                  provider));
+  return dtmf_sender;
+}
+
+DtmfSender::DtmfSender(AudioTrackInterface* track,
+                       rtc::Thread* signaling_thread,
+                       DtmfProviderInterface* provider)
+    : track_(track),
+      observer_(NULL),
+      signaling_thread_(signaling_thread),
+      provider_(provider),
+      duration_(kDtmfDefaultDurationMs),
+      inter_tone_gap_(kDtmfDefaultGapMs) {
+  ASSERT(track_ != NULL);
+  ASSERT(signaling_thread_ != NULL);
+  // TODO(deadbeef): Once we can use shared_ptr and weak_ptr,
+  // do that instead of relying on a "destroyed" signal.
+  if (provider_) {
+    ASSERT(provider_->GetOnDestroyedSignal() != NULL);
+    provider_->GetOnDestroyedSignal()->connect(
+        this, &DtmfSender::OnProviderDestroyed);
+  }
+}
+
+DtmfSender::~DtmfSender() {
+  StopSending();
+}
+
+void DtmfSender::RegisterObserver(DtmfSenderObserverInterface* observer) {
+  observer_ = observer;
+}
+
+void DtmfSender::UnregisterObserver() {
+  observer_ = NULL;
+}
+
+bool DtmfSender::CanInsertDtmf() {
+  ASSERT(signaling_thread_->IsCurrent());
+  if (!provider_) {
+    return false;
+  }
+  return provider_->CanInsertDtmf(track_->id());
+}
+
+bool DtmfSender::InsertDtmf(const std::string& tones, int duration,
+                            int inter_tone_gap) {
+  ASSERT(signaling_thread_->IsCurrent());
+
+  if (duration > kDtmfMaxDurationMs ||
+      duration < kDtmfMinDurationMs ||
+      inter_tone_gap < kDtmfMinGapMs) {
+    LOG(LS_ERROR) << "InsertDtmf is called with invalid duration or tones gap. "
+        << "The duration cannot be more than " << kDtmfMaxDurationMs
+        << "ms or less than " << kDtmfMinDurationMs << "ms. "
+        << "The gap between tones must be at least " << kDtmfMinGapMs << "ms.";
+    return false;
+  }
+
+  if (!CanInsertDtmf()) {
+    LOG(LS_ERROR)
+        << "InsertDtmf is called on DtmfSender that can't send DTMF.";
+    return false;
+  }
+
+  tones_ = tones;
+  duration_ = duration;
+  inter_tone_gap_ = inter_tone_gap;
+  // Clear the previous queue.
+  signaling_thread_->Clear(this, MSG_DO_INSERT_DTMF);
+  // Kick off a new DTMF task queue.
+  signaling_thread_->Post(this, MSG_DO_INSERT_DTMF);
+  return true;
+}
+
+const AudioTrackInterface* DtmfSender::track() const {
+  return track_;
+}
+
+std::string DtmfSender::tones() const {
+  return tones_;
+}
+
+int DtmfSender::duration() const {
+  return duration_;
+}
+
+int DtmfSender::inter_tone_gap() const {
+  return inter_tone_gap_;
+}
+
+void DtmfSender::OnMessage(rtc::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_DO_INSERT_DTMF: {
+      DoInsertDtmf();
+      break;
+    }
+    default: {
+      ASSERT(false);
+      break;
+    }
+  }
+}
+
+void DtmfSender::DoInsertDtmf() {
+  ASSERT(signaling_thread_->IsCurrent());
+
+  // Get the first DTMF tone from the tone buffer. Unrecognized characters will
+  // be ignored and skipped.
+  size_t first_tone_pos = tones_.find_first_of(kDtmfValidTones);
+  int code = 0;
+  if (first_tone_pos == std::string::npos) {
+    tones_.clear();
+    // Fire a “OnToneChange” event with an empty string and stop.
+    if (observer_) {
+      observer_->OnToneChange(std::string());
+    }
+    return;
+  } else {
+    char tone = tones_[first_tone_pos];
+    if (!GetDtmfCode(tone, &code)) {
+      // The find_first_of(kDtmfValidTones) should have guarantee |tone| is
+      // a valid DTMF tone.
+      ASSERT(false);
+    }
+  }
+
+  int tone_gap = inter_tone_gap_;
+  if (code == kDtmfCodeTwoSecondDelay) {
+    // Special case defined by WebRTC - The character',' indicates a delay of 2
+    // seconds before processing the next character in the tones parameter.
+    tone_gap = kDtmfTwoSecondInMs;
+  } else {
+    if (!provider_) {
+      LOG(LS_ERROR) << "The DtmfProvider has been destroyed.";
+      return;
+    }
+    // The provider starts playout of the given tone on the
+    // associated RTP media stream, using the appropriate codec.
+    if (!provider_->InsertDtmf(track_->id(), code, duration_)) {
+      LOG(LS_ERROR) << "The DtmfProvider can no longer send DTMF.";
+      return;
+    }
+    // Wait for the number of milliseconds specified by |duration_|.
+    tone_gap += duration_;
+  }
+
+  // Fire a “OnToneChange” event with the tone that's just processed.
+  if (observer_) {
+    observer_->OnToneChange(tones_.substr(first_tone_pos, 1));
+  }
+
+  // Erase the unrecognized characters plus the tone that's just processed.
+  tones_.erase(0, first_tone_pos + 1);
+
+  // Continue with the next tone.
+  signaling_thread_->PostDelayed(tone_gap, this, MSG_DO_INSERT_DTMF);
+}
+
+void DtmfSender::OnProviderDestroyed() {
+  LOG(LS_INFO) << "The Dtmf provider is deleted. Clear the sending queue.";
+  StopSending();
+  provider_ = NULL;
+}
+
+void DtmfSender::StopSending() {
+  signaling_thread_->Clear(this);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/dtmfsender.h b/webrtc/api/dtmfsender.h
new file mode 100644
index 0000000..f0f0e68
--- /dev/null
+++ b/webrtc/api/dtmfsender.h
@@ -0,0 +1,139 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_DTMFSENDER_H_
+#define WEBRTC_API_DTMFSENDER_H_
+
+#include <string>
+
+#include "webrtc/api/dtmfsenderinterface.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/proxy.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/refcount.h"
+
+// DtmfSender is the native implementation of the RTCDTMFSender defined by
+// the WebRTC W3C Editor's Draft.
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html
+
+namespace rtc {
+class Thread;
+}
+
+namespace webrtc {
+
+// This interface is called by DtmfSender to talk to the actual audio channel
+// to send DTMF.
+class DtmfProviderInterface {
+ public:
+  // Returns true if the audio track with given id (|track_id|) is capable
+  // of sending DTMF. Otherwise returns false.
+  virtual bool CanInsertDtmf(const std::string& track_id) = 0;
+  // Sends DTMF |code| via the audio track with given id (|track_id|).
+  // The |duration| indicates the length of the DTMF tone in ms.
+  // Returns true on success and false on failure.
+  virtual bool InsertDtmf(const std::string& track_id,
+                          int code, int duration) = 0;
+  // Returns a |sigslot::signal0<>| signal. The signal should fire before
+  // the provider is destroyed.
+  virtual sigslot::signal0<>* GetOnDestroyedSignal() = 0;
+
+ protected:
+  virtual ~DtmfProviderInterface() {}
+};
+
+class DtmfSender
+    : public DtmfSenderInterface,
+      public sigslot::has_slots<>,
+      public rtc::MessageHandler {
+ public:
+  static rtc::scoped_refptr<DtmfSender> Create(
+      AudioTrackInterface* track,
+      rtc::Thread* signaling_thread,
+      DtmfProviderInterface* provider);
+
+  // Implements DtmfSenderInterface.
+  void RegisterObserver(DtmfSenderObserverInterface* observer) override;
+  void UnregisterObserver() override;
+  bool CanInsertDtmf() override;
+  bool InsertDtmf(const std::string& tones,
+                  int duration,
+                  int inter_tone_gap) override;
+  const AudioTrackInterface* track() const override;
+  std::string tones() const override;
+  int duration() const override;
+  int inter_tone_gap() const override;
+
+ protected:
+  DtmfSender(AudioTrackInterface* track,
+             rtc::Thread* signaling_thread,
+             DtmfProviderInterface* provider);
+  virtual ~DtmfSender();
+
+ private:
+  DtmfSender();
+
+  // Implements MessageHandler.
+  virtual void OnMessage(rtc::Message* msg);
+
+  // The DTMF sending task.
+  void DoInsertDtmf();
+
+  void OnProviderDestroyed();
+
+  void StopSending();
+
+  rtc::scoped_refptr<AudioTrackInterface> track_;
+  DtmfSenderObserverInterface* observer_;
+  rtc::Thread* signaling_thread_;
+  DtmfProviderInterface* provider_;
+  std::string tones_;
+  int duration_;
+  int inter_tone_gap_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(DtmfSender);
+};
+
+// Define proxy for DtmfSenderInterface.
+BEGIN_PROXY_MAP(DtmfSender)
+  PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*)
+  PROXY_METHOD0(void, UnregisterObserver)
+  PROXY_METHOD0(bool, CanInsertDtmf)
+  PROXY_METHOD3(bool, InsertDtmf, const std::string&, int, int)
+  PROXY_CONSTMETHOD0(const AudioTrackInterface*, track)
+  PROXY_CONSTMETHOD0(std::string, tones)
+  PROXY_CONSTMETHOD0(int, duration)
+  PROXY_CONSTMETHOD0(int, inter_tone_gap)
+END_PROXY()
+
+// Get DTMF code from the DTMF event character.
+bool GetDtmfCode(char tone, int* code);
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_DTMFSENDER_H_
diff --git a/webrtc/api/dtmfsender_unittest.cc b/webrtc/api/dtmfsender_unittest.cc
new file mode 100644
index 0000000..e754ca2
--- /dev/null
+++ b/webrtc/api/dtmfsender_unittest.cc
@@ -0,0 +1,359 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/dtmfsender.h"
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
+
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrack;
+using webrtc::DtmfProviderInterface;
+using webrtc::DtmfSender;
+using webrtc::DtmfSenderObserverInterface;
+
+static const char kTestAudioLabel[] = "test_audio_track";
+static const int kMaxWaitMs = 3000;
+
+class FakeDtmfObserver : public DtmfSenderObserverInterface {
+ public:
+  FakeDtmfObserver() : completed_(false) {}
+
+  // Implements DtmfSenderObserverInterface.
+  void OnToneChange(const std::string& tone) override {
+    LOG(LS_VERBOSE) << "FakeDtmfObserver::OnToneChange '" << tone << "'.";
+    tones_.push_back(tone);
+    if (tone.empty()) {
+      completed_ = true;
+    }
+  }
+
+  // getters
+  const std::vector<std::string>& tones() const {
+    return tones_;
+  }
+  bool completed() const {
+    return completed_;
+  }
+
+ private:
+  std::vector<std::string> tones_;
+  bool completed_;
+};
+
+class FakeDtmfProvider : public DtmfProviderInterface {
+ public:
+  struct DtmfInfo {
+    DtmfInfo(int code, int duration, int gap)
+      : code(code),
+        duration(duration),
+        gap(gap) {}
+    int code;
+    int duration;
+    int gap;
+  };
+
+  FakeDtmfProvider() : last_insert_dtmf_call_(0) {}
+
+  ~FakeDtmfProvider() {
+    SignalDestroyed();
+  }
+
+  // Implements DtmfProviderInterface.
+  bool CanInsertDtmf(const std::string& track_label) override {
+    return (can_insert_dtmf_tracks_.count(track_label) != 0);
+  }
+
+  bool InsertDtmf(const std::string& track_label,
+                  int code,
+                  int duration) override {
+    int gap = 0;
+    // TODO(ronghuawu): Make the timer (basically the rtc::TimeNanos)
+    // mockable and use a fake timer in the unit tests.
+    if (last_insert_dtmf_call_ > 0) {
+      gap = static_cast<int>(rtc::Time() - last_insert_dtmf_call_);
+    }
+    last_insert_dtmf_call_ = rtc::Time();
+
+    LOG(LS_VERBOSE) << "FakeDtmfProvider::InsertDtmf code=" << code
+                    << " duration=" << duration
+                    << " gap=" << gap << ".";
+    dtmf_info_queue_.push_back(DtmfInfo(code, duration, gap));
+    return true;
+  }
+
+  virtual sigslot::signal0<>* GetOnDestroyedSignal() {
+    return &SignalDestroyed;
+  }
+
+  // getter and setter
+  const std::vector<DtmfInfo>& dtmf_info_queue() const {
+    return dtmf_info_queue_;
+  }
+
+  // helper functions
+  void AddCanInsertDtmfTrack(const std::string& label) {
+    can_insert_dtmf_tracks_.insert(label);
+  }
+  void RemoveCanInsertDtmfTrack(const std::string& label) {
+    can_insert_dtmf_tracks_.erase(label);
+  }
+
+ private:
+  std::set<std::string> can_insert_dtmf_tracks_;
+  std::vector<DtmfInfo> dtmf_info_queue_;
+  int64_t last_insert_dtmf_call_;
+  sigslot::signal0<> SignalDestroyed;
+};
+
+class DtmfSenderTest : public testing::Test {
+ protected:
+  DtmfSenderTest()
+      : track_(AudioTrack::Create(kTestAudioLabel, NULL)),
+        observer_(new rtc::RefCountedObject<FakeDtmfObserver>()),
+        provider_(new FakeDtmfProvider()) {
+    provider_->AddCanInsertDtmfTrack(kTestAudioLabel);
+    dtmf_ = DtmfSender::Create(track_, rtc::Thread::Current(),
+                               provider_.get());
+    dtmf_->RegisterObserver(observer_.get());
+  }
+
+  ~DtmfSenderTest() {
+    if (dtmf_.get()) {
+      dtmf_->UnregisterObserver();
+    }
+  }
+
+  // Constructs a list of DtmfInfo from |tones|, |duration| and
+  // |inter_tone_gap|.
+  void GetDtmfInfoFromString(const std::string& tones, int duration,
+                             int inter_tone_gap,
+                             std::vector<FakeDtmfProvider::DtmfInfo>* dtmfs) {
+    // Init extra_delay as -inter_tone_gap - duration to ensure the first
+    // DtmfInfo's gap field will be 0.
+    int extra_delay = -1 * (inter_tone_gap + duration);
+
+    std::string::const_iterator it = tones.begin();
+    for (; it != tones.end(); ++it) {
+      char tone = *it;
+      int code = 0;
+      webrtc::GetDtmfCode(tone, &code);
+      if (tone == ',') {
+        extra_delay = 2000;  // 2 seconds
+      } else {
+        dtmfs->push_back(FakeDtmfProvider::DtmfInfo(code, duration,
+                         duration + inter_tone_gap + extra_delay));
+        extra_delay = 0;
+      }
+    }
+  }
+
+  void VerifyExpectedState(AudioTrackInterface* track,
+                          const std::string& tones,
+                          int duration, int inter_tone_gap) {
+    EXPECT_EQ(track, dtmf_->track());
+    EXPECT_EQ(tones, dtmf_->tones());
+    EXPECT_EQ(duration, dtmf_->duration());
+    EXPECT_EQ(inter_tone_gap, dtmf_->inter_tone_gap());
+  }
+
+  // Verify the provider got all the expected calls.
+  void VerifyOnProvider(const std::string& tones, int duration,
+                        int inter_tone_gap) {
+    std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+    GetDtmfInfoFromString(tones, duration, inter_tone_gap, &dtmf_queue_ref);
+    VerifyOnProvider(dtmf_queue_ref);
+  }
+
+  void VerifyOnProvider(
+      const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue_ref) {
+    const std::vector<FakeDtmfProvider::DtmfInfo>& dtmf_queue =
+        provider_->dtmf_info_queue();
+    ASSERT_EQ(dtmf_queue_ref.size(), dtmf_queue.size());
+    std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it_ref =
+        dtmf_queue_ref.begin();
+    std::vector<FakeDtmfProvider::DtmfInfo>::const_iterator it =
+        dtmf_queue.begin();
+    while (it_ref != dtmf_queue_ref.end() && it != dtmf_queue.end()) {
+      EXPECT_EQ(it_ref->code, it->code);
+      EXPECT_EQ(it_ref->duration, it->duration);
+      // Allow ~100ms error.
+      EXPECT_GE(it_ref->gap, it->gap - 100);
+      EXPECT_LE(it_ref->gap, it->gap + 100);
+      ++it_ref;
+      ++it;
+    }
+  }
+
+  // Verify the observer got all the expected callbacks.
+  void VerifyOnObserver(const std::string& tones_ref) {
+    const std::vector<std::string>& tones = observer_->tones();
+    // The observer will get an empty string at the end.
+    EXPECT_EQ(tones_ref.size() + 1, tones.size());
+    EXPECT_TRUE(tones.back().empty());
+    std::string::const_iterator it_ref = tones_ref.begin();
+    std::vector<std::string>::const_iterator it = tones.begin();
+    while (it_ref != tones_ref.end() && it != tones.end()) {
+      EXPECT_EQ(*it_ref, it->at(0));
+      ++it_ref;
+      ++it;
+    }
+  }
+
+  rtc::scoped_refptr<AudioTrackInterface> track_;
+  rtc::scoped_ptr<FakeDtmfObserver> observer_;
+  rtc::scoped_ptr<FakeDtmfProvider> provider_;
+  rtc::scoped_refptr<DtmfSender> dtmf_;
+};
+
+TEST_F(DtmfSenderTest, CanInsertDtmf) {
+  EXPECT_TRUE(dtmf_->CanInsertDtmf());
+  provider_->RemoveCanInsertDtmfTrack(kTestAudioLabel);
+  EXPECT_FALSE(dtmf_->CanInsertDtmf());
+}
+
+TEST_F(DtmfSenderTest, InsertDtmf) {
+  std::string tones = "@1%a&*$";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+  EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+  // The unrecognized characters should be ignored.
+  std::string known_tones = "1a*";
+  VerifyOnProvider(known_tones, duration, inter_tone_gap);
+  VerifyOnObserver(known_tones);
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfTwice) {
+  std::string tones1 = "12";
+  std::string tones2 = "ab";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
+  VerifyExpectedState(track_, tones1, duration, inter_tone_gap);
+  // Wait until the first tone got sent.
+  EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+  VerifyExpectedState(track_, "2", duration, inter_tone_gap);
+  // Insert with another tone buffer.
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
+  VerifyExpectedState(track_, tones2, duration, inter_tone_gap);
+  // Wait until it's completed.
+  EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+  std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+  GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
+  GetDtmfInfoFromString("ab", duration, inter_tone_gap, &dtmf_queue_ref);
+  VerifyOnProvider(dtmf_queue_ref);
+  VerifyOnObserver("1ab");
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWhileProviderIsDeleted) {
+  std::string tones = "@1%a&*$";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+  // Wait until the first tone got sent.
+  EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+  // Delete provider.
+  provider_.reset();
+  // The queue should be discontinued so no more tone callbacks.
+  WAIT(false, 200);
+  EXPECT_EQ(1U, observer_->tones().size());
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWhileSenderIsDeleted) {
+  std::string tones = "@1%a&*$";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+  // Wait until the first tone got sent.
+  EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+  // Delete the sender.
+  dtmf_ = NULL;
+  // The queue should be discontinued so no more tone callbacks.
+  WAIT(false, 200);
+  EXPECT_EQ(1U, observer_->tones().size());
+}
+
+TEST_F(DtmfSenderTest, InsertEmptyTonesToCancelPreviousTask) {
+  std::string tones1 = "12";
+  std::string tones2 = "";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap));
+  // Wait until the first tone got sent.
+  EXPECT_TRUE_WAIT(observer_->tones().size() == 1, kMaxWaitMs);
+  // Insert with another tone buffer.
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap));
+  // Wait until it's completed.
+  EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+  std::vector<FakeDtmfProvider::DtmfInfo> dtmf_queue_ref;
+  GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref);
+  VerifyOnProvider(dtmf_queue_ref);
+  VerifyOnObserver("1");
+}
+
+// Flaky when run in parallel.
+// See https://code.google.com/p/webrtc/issues/detail?id=4219.
+TEST_F(DtmfSenderTest, DISABLED_InsertDtmfWithCommaAsDelay) {
+  std::string tones = "3,4";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+  EXPECT_TRUE_WAIT(observer_->completed(), kMaxWaitMs);
+
+  VerifyOnProvider(tones, duration, inter_tone_gap);
+  VerifyOnObserver(tones);
+}
+
+TEST_F(DtmfSenderTest, TryInsertDtmfWhenItDoesNotWork) {
+  std::string tones = "3,4";
+  int duration = 100;
+  int inter_tone_gap = 50;
+  provider_->RemoveCanInsertDtmfTrack(kTestAudioLabel);
+  EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+}
+
+TEST_F(DtmfSenderTest, InsertDtmfWithInvalidDurationOrGap) {
+  std::string tones = "3,4";
+  int duration = 100;
+  int inter_tone_gap = 50;
+
+  EXPECT_FALSE(dtmf_->InsertDtmf(tones, 6001, inter_tone_gap));
+  EXPECT_FALSE(dtmf_->InsertDtmf(tones, 69, inter_tone_gap));
+  EXPECT_FALSE(dtmf_->InsertDtmf(tones, duration, 49));
+
+  EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap));
+}
diff --git a/webrtc/api/dtmfsenderinterface.h b/webrtc/api/dtmfsenderinterface.h
new file mode 100644
index 0000000..327c673
--- /dev/null
+++ b/webrtc/api/dtmfsenderinterface.h
@@ -0,0 +1,105 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_DTMFSENDERINTERFACE_H_
+#define WEBRTC_API_DTMFSENDERINTERFACE_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/refcount.h"
+
+// This file contains interfaces for DtmfSender.
+
+namespace webrtc {
+
+// DtmfSender callback interface. Application should implement this interface
+// to get notifications from the DtmfSender.
+class DtmfSenderObserverInterface {
+ public:
+  // Triggered when DTMF |tone| is sent.
+  // If |tone| is empty that means the DtmfSender has sent out all the given
+  // tones.
+  virtual void OnToneChange(const std::string& tone) = 0;
+
+ protected:
+  virtual ~DtmfSenderObserverInterface() {}
+};
+
+// The interface of native implementation of the RTCDTMFSender defined by the
+// WebRTC W3C Editor's Draft.
+class DtmfSenderInterface : public rtc::RefCountInterface {
+ public:
+  virtual void RegisterObserver(DtmfSenderObserverInterface* observer) = 0;
+  virtual void UnregisterObserver() = 0;
+
+  // Returns true if this DtmfSender is capable of sending DTMF.
+  // Otherwise returns false.
+  virtual bool CanInsertDtmf() = 0;
+
+  // Queues a task that sends the DTMF |tones|. The |tones| parameter is treated
+  // as a series of characters. The characters 0 through 9, A through D, #, and
+  // * generate the associated DTMF tones. The characters a to d are equivalent
+  // to A to D. The character ',' indicates a delay of 2 seconds before
+  // processing the next character in the tones parameter.
+  // Unrecognized characters are ignored.
+  // The |duration| parameter indicates the duration in ms to use for each
+  // character passed in the |tones| parameter.
+  // The duration cannot be more than 6000 or less than 70.
+  // The |inter_tone_gap| parameter indicates the gap between tones in ms.
+  // The |inter_tone_gap| must be at least 50 ms but should be as short as
+  // possible.
+  // If InsertDtmf is called on the same object while an existing task for this
+  // object to generate DTMF is still running, the previous task is canceled.
+  // Returns true on success and false on failure.
+  virtual bool InsertDtmf(const std::string& tones, int duration,
+                          int inter_tone_gap) = 0;
+
+  // Returns the track given as argument to the constructor.
+  virtual const AudioTrackInterface* track() const = 0;
+
+  // Returns the tones remaining to be played out.
+  virtual std::string tones() const = 0;
+
+  // Returns the current tone duration value in ms.
+  // This value will be the value last set via the InsertDtmf() method, or the
+  // default value of 100 ms if InsertDtmf() was never called.
+  virtual int duration() const = 0;
+
+  // Returns the current value of the between-tone gap in ms.
+  // This value will be the value last set via the InsertDtmf() method, or the
+  // default value of 50 ms if InsertDtmf() was never called.
+  virtual int inter_tone_gap() const = 0;
+
+ protected:
+  virtual ~DtmfSenderInterface() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_DTMFSENDERINTERFACE_H_
diff --git a/webrtc/api/fakemediacontroller.h b/webrtc/api/fakemediacontroller.h
new file mode 100644
index 0000000..ec1bd12
--- /dev/null
+++ b/webrtc/api/fakemediacontroller.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_FAKEMEDIACONTROLLER_H_
+#define WEBRTC_API_FAKEMEDIACONTROLLER_H_
+
+#include "webrtc/api/mediacontroller.h"
+#include "webrtc/base/checks.h"
+
+namespace cricket {
+
+class FakeMediaController : public webrtc::MediaControllerInterface {
+ public:
+  explicit FakeMediaController(cricket::ChannelManager* channel_manager,
+                               webrtc::Call* call)
+      : channel_manager_(channel_manager), call_(call) {
+    RTC_DCHECK(nullptr != channel_manager_);
+    RTC_DCHECK(nullptr != call_);
+  }
+  ~FakeMediaController() override {}
+  webrtc::Call* call_w() override { return call_; }
+  cricket::ChannelManager* channel_manager() const override {
+    return channel_manager_;
+  }
+
+ private:
+  cricket::ChannelManager* channel_manager_;
+  webrtc::Call* call_;
+};
+}  // namespace cricket
+#endif  // WEBRTC_API_FAKEMEDIACONTROLLER_H_
diff --git a/webrtc/api/fakemetricsobserver.cc b/webrtc/api/fakemetricsobserver.cc
new file mode 100644
index 0000000..6070ff41
--- /dev/null
+++ b/webrtc/api/fakemetricsobserver.cc
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/fakemetricsobserver.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+FakeMetricsObserver::FakeMetricsObserver() {
+  Reset();
+}
+
+void FakeMetricsObserver::Reset() {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  counters_.clear();
+  memset(histogram_samples_, 0, sizeof(histogram_samples_));
+}
+
+void FakeMetricsObserver::IncrementEnumCounter(
+    PeerConnectionEnumCounterType type,
+    int counter,
+    int counter_max) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  if (counters_.size() <= static_cast<size_t>(type)) {
+    counters_.resize(type + 1);
+  }
+  auto& counters = counters_[type];
+  ++counters[counter];
+}
+
+void FakeMetricsObserver::AddHistogramSample(PeerConnectionMetricsName type,
+    int value) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  RTC_DCHECK_EQ(histogram_samples_[type], 0);
+  histogram_samples_[type] = value;
+}
+
+int FakeMetricsObserver::GetEnumCounter(PeerConnectionEnumCounterType type,
+                                        int counter) const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  RTC_CHECK(counters_.size() > static_cast<size_t>(type));
+  const auto& it = counters_[type].find(counter);
+  if (it == counters_[type].end()) {
+    return 0;
+  }
+  return it->second;
+}
+
+int FakeMetricsObserver::GetHistogramSample(
+    PeerConnectionMetricsName type) const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  return histogram_samples_[type];
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/fakemetricsobserver.h b/webrtc/api/fakemetricsobserver.h
new file mode 100644
index 0000000..1f4c2ab
--- /dev/null
+++ b/webrtc/api/fakemetricsobserver.h
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_FAKEMETRICSOBSERVER_H_
+#define WEBRTC_API_FAKEMETRICSOBSERVER_H_
+
+#include <map>
+#include <string>
+
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc {
+
+class FakeMetricsObserver : public MetricsObserverInterface {
+ public:
+  FakeMetricsObserver();
+  void Reset();
+
+  void IncrementEnumCounter(PeerConnectionEnumCounterType,
+                            int counter,
+                            int counter_max) override;
+  void AddHistogramSample(PeerConnectionMetricsName type,
+                          int value) override;
+
+  // Accessors to be used by the tests.
+  int GetEnumCounter(PeerConnectionEnumCounterType type, int counter) const;
+  int GetHistogramSample(PeerConnectionMetricsName type) const;
+
+ protected:
+  ~FakeMetricsObserver() {}
+
+ private:
+  rtc::ThreadChecker thread_checker_;
+  // The vector contains maps for each counter type. In the map, it's a mapping
+  // from individual counter to its count, such that it's memory efficient when
+  // comes to sparse enum types, like the SSL ciphers in the IANA registry.
+  std::vector<std::map<int, int>> counters_;
+  int histogram_samples_[kPeerConnectionMetricsName_Max];
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_FAKEMETRICSOBSERVER_H_
diff --git a/webrtc/api/java/README b/webrtc/api/java/README
new file mode 100644
index 0000000..f367556
--- /dev/null
+++ b/webrtc/api/java/README
@@ -0,0 +1,10 @@
+This directory holds a Java implementation of the webrtc::PeerConnection API, as
+well as the JNI glue C++ code that lets the Java implementation reuse the C++
+implementation of the same API.
+
+To build the Java API and related tests, build with OS=android in $GYP_DEFINES.
+
+To use the Java API, start by looking at the public interface of
+org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
+
+To understand the implementation of the API, see the native code in jni/.
diff --git a/webrtc/api/java/android/org/webrtc/Camera2Enumerator.java b/webrtc/api/java/android/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 0000000..3444529
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,122 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Range;
+import android.util.Size;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@TargetApi(21)
+public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
+  private final static String TAG = "Camera2Enumerator";
+  private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+  private final CameraManager cameraManager;
+  // Each entry contains the supported formats for a given camera index. The formats are enumerated
+  // lazily in getSupportedFormats(), and cached for future reference.
+  private final Map<Integer, List<CaptureFormat>> cachedSupportedFormats =
+      new HashMap<Integer, List<CaptureFormat>>();
+
+  public static boolean isSupported() {
+    return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
+  }
+
+  public Camera2Enumerator(Context context) {
+    cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+  }
+
+  @Override
+  public List<CaptureFormat> getSupportedFormats(int cameraId) {
+    synchronized (cachedSupportedFormats) {
+      if (cachedSupportedFormats.containsKey(cameraId)) {
+        return cachedSupportedFormats.get(cameraId);
+      }
+      Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+      final long startTimeMs = SystemClock.elapsedRealtime();
+
+      final CameraCharacteristics cameraCharacteristics;
+      try {
+        cameraCharacteristics = cameraManager.getCameraCharacteristics(Integer.toString(cameraId));
+      } catch (Exception ex) {
+        Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+        return new ArrayList<CaptureFormat>();
+      }
+
+      // Calculate default max fps from auto-exposure ranges in case getOutputMinFrameDuration() is
+      // not supported.
+      final Range<Integer>[] fpsRanges =
+          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+      int defaultMaxFps = 0;
+      for (Range<Integer> fpsRange : fpsRanges) {
+        defaultMaxFps = Math.max(defaultMaxFps, fpsRange.getUpper());
+      }
+
+      final StreamConfigurationMap streamMap =
+          cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+      final Size[] sizes = streamMap.getOutputSizes(ImageFormat.YUV_420_888);
+      if (sizes == null) {
+        throw new RuntimeException("ImageFormat.YUV_420_888 not supported.");
+      }
+
+      final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+      for (Size size : sizes) {
+        long minFrameDurationNs = 0;
+        try {
+          minFrameDurationNs = streamMap.getOutputMinFrameDuration(ImageFormat.YUV_420_888, size);
+        } catch (Exception e) {
+          // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+        }
+        final int maxFps = (minFrameDurationNs == 0)
+                               ? defaultMaxFps
+                               : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs);
+        formatList.add(new CaptureFormat(size.getWidth(), size.getHeight(), 0, maxFps * 1000));
+      }
+      cachedSupportedFormats.put(cameraId, formatList);
+      final long endTimeMs = SystemClock.elapsedRealtime();
+      Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+          + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+      return formatList;
+    }
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 0000000..5f68c37
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,227 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+import static java.lang.Math.ceil;
+import android.graphics.ImageFormat;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import org.webrtc.Logging;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+  private final static String TAG = "CameraEnumerationAndroid";
+  // Synchronized on |CameraEnumerationAndroid.this|.
+  private static Enumerator enumerator = new CameraEnumerator();
+
+  public interface Enumerator {
+    /**
+     * Returns a list of supported CaptureFormats for the camera with index |cameraId|.
+     */
+    List<CaptureFormat> getSupportedFormats(int cameraId);
+  }
+
+  public static synchronized void setEnumerator(Enumerator enumerator) {
+    CameraEnumerationAndroid.enumerator = enumerator;
+  }
+
+  public static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+    return enumerator.getSupportedFormats(cameraId);
+  }
+
+  public static class CaptureFormat {
+    public final int width;
+    public final int height;
+    public final int maxFramerate;
+    public final int minFramerate;
+    // TODO(hbos): If VideoCapturerAndroid.startCapture is updated to support
+    // other image formats then this needs to be updated and
+    // VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
+    // all imageFormats.
+    public final int imageFormat = ImageFormat.NV21;
+
+    public CaptureFormat(int width, int height, int minFramerate,
+        int maxFramerate) {
+      this.width = width;
+      this.height = height;
+      this.minFramerate = minFramerate;
+      this.maxFramerate = maxFramerate;
+    }
+
+    // Calculates the frame size of this capture format.
+    public int frameSize() {
+      return frameSize(width, height, imageFormat);
+    }
+
+    // Calculates the frame size of the specified image format. Currently only
+    // supporting ImageFormat.NV21.
+    // The size is width * height * number of bytes per pixel.
+    // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+    public static int frameSize(int width, int height, int imageFormat) {
+      if (imageFormat != ImageFormat.NV21) {
+        throw new UnsupportedOperationException("Don't know how to calculate "
+            + "the frame size of non-NV21 image formats.");
+      }
+      return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+    }
+
+    @Override
+    public String toString() {
+      return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]";
+    }
+
+    public boolean isSameFormat(final CaptureFormat that) {
+      if (that == null) {
+        return false;
+      }
+      return width == that.width && height == that.height && maxFramerate == that.maxFramerate
+          && minFramerate == that.minFramerate;
+    }
+  }
+
+  // Returns device names that can be used to create a new VideoCapturerAndroid.
+  public static String[] getDeviceNames() {
+    String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
+    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+      names[i] = getDeviceName(i);
+    }
+    return names;
+  }
+
+  // Returns number of cameras on device.
+  public static int getDeviceCount() {
+    return android.hardware.Camera.getNumberOfCameras();
+  }
+
+  // Returns the name of the camera with camera index. Returns null if the
+  // camera can not be used.
+  public static String getDeviceName(int index) {
+    android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+    try {
+      android.hardware.Camera.getCameraInfo(index, info);
+    } catch (Exception e) {
+      Logging.e(TAG, "getCameraInfo failed on index " + index,e);
+      return null;
+    }
+
+    String facing =
+        (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+    return "Camera " + index + ", Facing " + facing
+        + ", Orientation " + info.orientation;
+  }
+
+  // Returns the name of the front facing camera. Returns null if the
+  // camera can not be used or does not exist.
+  public static String getNameOfFrontFacingDevice() {
+    return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
+  }
+
+  // Returns the name of the back facing camera. Returns null if the
+  // camera can not be used or does not exist.
+  public static String getNameOfBackFacingDevice() {
+    return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
+  }
+
+  public static String getSupportedFormatsAsJson(int id) throws JSONException {
+    List<CaptureFormat> formats = getSupportedFormats(id);
+    JSONArray json_formats = new JSONArray();
+    for (CaptureFormat format : formats) {
+      JSONObject json_format = new JSONObject();
+      json_format.put("width", format.width);
+      json_format.put("height", format.height);
+      json_format.put("framerate", (format.maxFramerate + 999) / 1000);
+      json_formats.put(json_format);
+    }
+    Logging.d(TAG, "Supported formats for camera " + id + ": "
+        +  json_formats.toString(2));
+    return json_formats.toString();
+  }
+
+  // Helper class for finding the closest supported format for the two functions below.
+  private static abstract class ClosestComparator<T> implements Comparator<T> {
+    // Difference between supported and requested parameter.
+    abstract int diff(T supportedParameter);
+
+    @Override
+    public int compare(T t1, T t2) {
+      return diff(t1) - diff(t2);
+    }
+  }
+
+  public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters,
+      final int framerate) {
+    List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+    if (listFpsRange.isEmpty()) {
+      Logging.w(TAG, "No supported preview fps range");
+      return new int[]{0, 0};
+    }
+    return Collections.min(listFpsRange,
+        new ClosestComparator<int[]>() {
+          @Override int diff(int[] range) {
+            final int maxFpsWeight = 10;
+            return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
+                + maxFpsWeight * abs(framerate
+                    - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+          }
+     });
+  }
+
+  public static android.hardware.Camera.Size getClosestSupportedSize(
+      List<android.hardware.Camera.Size> supportedSizes, final int requestedWidth,
+      final int requestedHeight) {
+    return Collections.min(supportedSizes,
+        new ClosestComparator<android.hardware.Camera.Size>() {
+          @Override int diff(android.hardware.Camera.Size size) {
+            return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+          }
+     });
+  }
+
+  private static String getNameOfDevice(int facing) {
+    final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+      try {
+        android.hardware.Camera.getCameraInfo(i, info);
+        if (info.facing == facing) {
+          return getDeviceName(i);
+        }
+      } catch (Exception e) {
+        Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
+      }
+    }
+    return null;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerator.java b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
new file mode 100644
index 0000000..54469cc
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
+  private final static String TAG = "CameraEnumerator";
+  // Each entry contains the supported formats for corresponding camera index. The formats for all
+  // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+  // reference.
+  private List<List<CaptureFormat>> cachedSupportedFormats;
+
+  @Override
+  public List<CaptureFormat> getSupportedFormats(int cameraId) {
+    synchronized (this) {
+      if (cachedSupportedFormats == null) {
+        cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+        for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
+          cachedSupportedFormats.add(enumerateFormats(i));
+        }
+      }
+    }
+    return cachedSupportedFormats.get(cameraId);
+  }
+
+  private List<CaptureFormat> enumerateFormats(int cameraId) {
+    Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+    final long startTimeMs = SystemClock.elapsedRealtime();
+    final android.hardware.Camera.Parameters parameters;
+    android.hardware.Camera camera = null;
+    try {
+      Logging.d(TAG, "Opening camera with index " + cameraId);
+      camera = android.hardware.Camera.open(cameraId);
+      parameters = camera.getParameters();
+    } catch (RuntimeException e) {
+      Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+      return new ArrayList<CaptureFormat>();
+    } finally {
+      if (camera != null) {
+        camera.release();
+      }
+    }
+
+    final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+    try {
+      int minFps = 0;
+      int maxFps = 0;
+      final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+      if (listFpsRange != null) {
+        // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+        // corresponding to the highest fps.
+        final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+        minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+        maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+      }
+      for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+        formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+      }
+    } catch (Exception e) {
+      Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+    }
+
+    final long endTimeMs = SystemClock.elapsedRealtime();
+    Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+        + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+    return formatList;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/EglBase.java b/webrtc/api/java/android/org/webrtc/EglBase.java
new file mode 100644
index 0000000..b675d09
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/EglBase.java
@@ -0,0 +1,139 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+
+import javax.microedition.khronos.egl.EGL10;
+
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public abstract class EglBase {
+  // EGL wrapper for an actual EGLContext.
+  public static class Context {
+  }
+
+  // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+  // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+  // This is similar to how GlSurfaceView does:
+  // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+  private static final int EGL_OPENGL_ES2_BIT = 4;
+  // Android-specific extension.
+  private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+  public static final int[] CONFIG_PLAIN = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_RGBA = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_PIXEL_BUFFER = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_RECORDABLE = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL_RECORDABLE_ANDROID, 1,
+    EGL10.EGL_NONE
+  };
+
+  // Create a new context with the specified config attributes, sharing data with sharedContext.
+  // |sharedContext| can be null.
+  public static EglBase create(Context sharedContext, int[] configAttributes) {
+    return (EglBase14.isEGL14Supported()
+        && (sharedContext == null || sharedContext instanceof EglBase14.Context))
+            ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
+            : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
+  }
+
+  public static EglBase create() {
+    return create(null, CONFIG_PLAIN);
+  }
+
+  public static EglBase create(Context sharedContext) {
+    return create(sharedContext, CONFIG_PLAIN);
+  }
+
+  public abstract void createSurface(Surface surface);
+
+  // Create EGLSurface from the Android SurfaceTexture.
+  public abstract void createSurface(SurfaceTexture surfaceTexture);
+
+  // Create dummy 1x1 pixel buffer surface so the context can be made current.
+  public abstract void createDummyPbufferSurface();
+
+  public abstract void createPbufferSurface(int width, int height);
+
+  public abstract Context getEglBaseContext();
+
+  public abstract boolean hasSurface();
+
+  public abstract int surfaceWidth();
+
+  public abstract int surfaceHeight();
+
+  public abstract void releaseSurface();
+
+  public abstract void release();
+
+  public abstract void makeCurrent();
+
+  // Detach the current EGL context, so that it can be made current on another thread.
+  public abstract void detachCurrent();
+
+  public abstract void swapBuffers();
+}
diff --git a/webrtc/api/java/android/org/webrtc/EglBase10.java b/webrtc/api/java/android/org/webrtc/EglBase10.java
new file mode 100644
index 0000000..d11292d
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/EglBase10.java
@@ -0,0 +1,302 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.SurfaceTexture;
+import android.graphics.Rect;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+final class EglBase10 extends EglBase {
+  // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+  private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+  private final EGL10 egl;
+  private EGLContext eglContext;
+  private EGLConfig eglConfig;
+  private EGLDisplay eglDisplay;
+  private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+  // EGL wrapper for an actual EGLContext.
+  public static class Context extends EglBase.Context {
+    private final EGLContext eglContext;
+
+    public Context(EGLContext eglContext) {
+      this.eglContext = eglContext;
+    }
+  }
+
+  // Create a new context with the specified config type, sharing data with sharedContext.
+  EglBase10(Context sharedContext, int[] configAttributes) {
+    this.egl = (EGL10) EGLContext.getEGL();
+    eglDisplay = getEglDisplay();
+    eglConfig = getEglConfig(eglDisplay, configAttributes);
+    eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+  }
+
+  @Override
+  public void createSurface(Surface surface) {
+    /**
+     * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+     * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+     * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+     */
+    class FakeSurfaceHolder implements SurfaceHolder {
+      private final Surface surface;
+
+      FakeSurfaceHolder(Surface surface) {
+        this.surface = surface;
+      }
+
+      @Override
+      public void addCallback(Callback callback) {}
+
+      @Override
+      public void removeCallback(Callback callback) {}
+
+      @Override
+      public boolean isCreating() {
+        return false;
+      }
+
+      @Deprecated
+      @Override
+      public void setType(int i) {}
+
+      @Override
+      public void setFixedSize(int i, int i2) {}
+
+      @Override
+      public void setSizeFromLayout() {}
+
+      @Override
+      public void setFormat(int i) {}
+
+      @Override
+      public void setKeepScreenOn(boolean b) {}
+
+      @Override
+      public Canvas lockCanvas() {
+        return null;
+      }
+
+      @Override
+      public Canvas lockCanvas(Rect rect) {
+        return null;
+      }
+
+      @Override
+      public void unlockCanvasAndPost(Canvas canvas) {}
+
+      @Override
+      public Rect getSurfaceFrame() {
+        return null;
+      }
+
+      @Override
+      public Surface getSurface() {
+        return surface;
+      }
+    }
+
+    createSurfaceInternal(new FakeSurfaceHolder(surface));
+  }
+
+  // Create EGLSurface from the Android SurfaceTexture.
+  @Override
+  public void createSurface(SurfaceTexture surfaceTexture) {
+    createSurfaceInternal(surfaceTexture);
+  }
+
+  // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+  private void createSurfaceInternal(Object nativeWindow) {
+    if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+      throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+    }
+    checkIsNotReleased();
+    if (eglSurface != EGL10.EGL_NO_SURFACE) {
+      throw new RuntimeException("Already has an EGLSurface");
+    }
+    int[] surfaceAttribs = {EGL10.EGL_NONE};
+    eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+    if (eglSurface == EGL10.EGL_NO_SURFACE) {
+      throw new RuntimeException("Failed to create window surface");
+    }
+  }
+
+  // Create dummy 1x1 pixel buffer surface so the context can be made current.
+  @Override
+  public void createDummyPbufferSurface() {
+    createPbufferSurface(1, 1);
+  }
+
+  @Override
+  public void createPbufferSurface(int width, int height) {
+    checkIsNotReleased();
+    if (eglSurface != EGL10.EGL_NO_SURFACE) {
+      throw new RuntimeException("Already has an EGLSurface");
+    }
+    int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+    eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+    if (eglSurface == EGL10.EGL_NO_SURFACE) {
+      throw new RuntimeException("Failed to create pixel buffer surface");
+    }
+  }
+
+  @Override
+  public org.webrtc.EglBase.Context getEglBaseContext() {
+    return new EglBase10.Context(eglContext);
+  }
+
+  @Override
+  public boolean hasSurface() {
+    return eglSurface != EGL10.EGL_NO_SURFACE;
+  }
+
+  @Override
+  public int surfaceWidth() {
+    final int widthArray[] = new int[1];
+    egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+    return widthArray[0];
+  }
+
+  @Override
+  public int surfaceHeight() {
+    final int heightArray[] = new int[1];
+    egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+    return heightArray[0];
+  }
+
+  @Override
+  public void releaseSurface() {
+    if (eglSurface != EGL10.EGL_NO_SURFACE) {
+      egl.eglDestroySurface(eglDisplay, eglSurface);
+      eglSurface = EGL10.EGL_NO_SURFACE;
+    }
+  }
+
+  private void checkIsNotReleased() {
+    if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+        || eglConfig == null) {
+      throw new RuntimeException("This object has been released");
+    }
+  }
+
+  @Override
+  public void release() {
+    checkIsNotReleased();
+    releaseSurface();
+    detachCurrent();
+    egl.eglDestroyContext(eglDisplay, eglContext);
+    egl.eglTerminate(eglDisplay);
+    eglContext = EGL10.EGL_NO_CONTEXT;
+    eglDisplay = EGL10.EGL_NO_DISPLAY;
+    eglConfig = null;
+  }
+
+  @Override
+  public void makeCurrent() {
+    checkIsNotReleased();
+    if (eglSurface == EGL10.EGL_NO_SURFACE) {
+      throw new RuntimeException("No EGLSurface - can't make current");
+    }
+    if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+      throw new RuntimeException("eglMakeCurrent failed");
+    }
+  }
+
+  // Detach the current EGL context, so that it can be made current on another thread.
+  @Override
+  public void detachCurrent() {
+    if (!egl.eglMakeCurrent(
+        eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+      throw new RuntimeException("eglMakeCurrent failed");
+    }
+  }
+
+  @Override
+  public void swapBuffers() {
+    checkIsNotReleased();
+    if (eglSurface == EGL10.EGL_NO_SURFACE) {
+      throw new RuntimeException("No EGLSurface - can't swap buffers");
+    }
+    egl.eglSwapBuffers(eglDisplay, eglSurface);
+  }
+
+  // Return an EGLDisplay, or die trying.
+  private EGLDisplay getEglDisplay() {
+    EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+    if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+      throw new RuntimeException("Unable to get EGL10 display");
+    }
+    int[] version = new int[2];
+    if (!egl.eglInitialize(eglDisplay, version)) {
+      throw new RuntimeException("Unable to initialize EGL10");
+    }
+    return eglDisplay;
+  }
+
+  // Return an EGLConfig, or die trying.
+  private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+    EGLConfig[] configs = new EGLConfig[1];
+    int[] numConfigs = new int[1];
+    if (!egl.eglChooseConfig(
+        eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+      throw new RuntimeException("Unable to find any matching EGL config");
+    }
+    return configs[0];
+  }
+
+  // Return an EGLConfig, or die trying.
+  private EGLContext createEglContext(
+      Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+    if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
+      throw new RuntimeException("Invalid sharedContext");
+    }
+    int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+    EGLContext rootContext =
+        sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
+    EGLContext eglContext =
+        egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+    if (eglContext == EGL10.EGL_NO_CONTEXT) {
+      throw new RuntimeException("Failed to create EGL context");
+    }
+    return eglContext;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/EglBase14.java b/webrtc/api/java/android/org/webrtc/EglBase14.java
new file mode 100644
index 0000000..8768ed4
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/EglBase14.java
@@ -0,0 +1,257 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@TargetApi(18)
+public final class EglBase14 extends EglBase {
+  private static final String TAG = "EglBase14";
+  private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
+  private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
+  private EGLContext eglContext;
+  private EGLConfig eglConfig;
+  private EGLDisplay eglDisplay;
+  private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+  // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
+  // time stamp on a surface is supported from 18 so we require 18.
+  public static boolean isEGL14Supported() {
+    Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+        + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
+    return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
+  }
+
+  public static class Context extends EglBase.Context {
+    private final android.opengl.EGLContext egl14Context;
+
+    public Context(android.opengl.EGLContext eglContext) {
+      this.egl14Context = eglContext;
+    }
+  }
+
+  // Create a new context with the specified config type, sharing data with sharedContext.
+  // |sharedContext| may be null.
+  public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
+    eglDisplay = getEglDisplay();
+    eglConfig = getEglConfig(eglDisplay, configAttributes);
+    eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+  }
+
+  // Create EGLSurface from the Android Surface.
+  @Override
+  public void createSurface(Surface surface) {
+    createSurfaceInternal(surface);
+  }
+
+  // Create EGLSurface from the Android SurfaceTexture.
+  @Override
+  public void createSurface(SurfaceTexture surfaceTexture) {
+    createSurfaceInternal(surfaceTexture);
+  }
+
+  // Create EGLSurface from either Surface or SurfaceTexture.
+  private void createSurfaceInternal(Object surface) {
+    if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+      throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+    }
+    checkIsNotReleased();
+    if (eglSurface != EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("Already has an EGLSurface");
+    }
+    int[] surfaceAttribs = {EGL14.EGL_NONE};
+    eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+    if (eglSurface == EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("Failed to create window surface");
+    }
+  }
+
+  @Override
+  public void createDummyPbufferSurface() {
+    createPbufferSurface(1, 1);
+  }
+
+  @Override
+  public void createPbufferSurface(int width, int height) {
+    checkIsNotReleased();
+    if (eglSurface != EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("Already has an EGLSurface");
+    }
+    int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+    eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+    if (eglSurface == EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("Failed to create pixel buffer surface");
+    }
+  }
+
+  @Override
+  public Context getEglBaseContext() {
+    return new EglBase14.Context(eglContext);
+  }
+
+  @Override
+  public boolean hasSurface() {
+    return eglSurface != EGL14.EGL_NO_SURFACE;
+  }
+
+  @Override
+  public int surfaceWidth() {
+    final int widthArray[] = new int[1];
+    EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+    return widthArray[0];
+  }
+
+  @Override
+  public int surfaceHeight() {
+    final int heightArray[] = new int[1];
+    EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+    return heightArray[0];
+  }
+
+  @Override
+  public void releaseSurface() {
+    if (eglSurface != EGL14.EGL_NO_SURFACE) {
+      EGL14.eglDestroySurface(eglDisplay, eglSurface);
+      eglSurface = EGL14.EGL_NO_SURFACE;
+    }
+  }
+
+  private void checkIsNotReleased() {
+    if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+        || eglConfig == null) {
+      throw new RuntimeException("This object has been released");
+    }
+  }
+
+  @Override
+  public void release() {
+    checkIsNotReleased();
+    releaseSurface();
+    detachCurrent();
+    EGL14.eglDestroyContext(eglDisplay, eglContext);
+    EGL14.eglReleaseThread();
+    EGL14.eglTerminate(eglDisplay);
+    eglContext = EGL14.EGL_NO_CONTEXT;
+    eglDisplay = EGL14.EGL_NO_DISPLAY;
+    eglConfig = null;
+  }
+
+  @Override
+  public void makeCurrent() {
+    checkIsNotReleased();
+    if (eglSurface == EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("No EGLSurface - can't make current");
+    }
+    if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+      throw new RuntimeException("eglMakeCurrent failed");
+    }
+  }
+
+  // Detach the current EGL context, so that it can be made current on another thread.
+  @Override
+  public void detachCurrent() {
+    if (!EGL14.eglMakeCurrent(
+        eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+      throw new RuntimeException("eglMakeCurrent failed");
+    }
+  }
+
+  @Override
+  public void swapBuffers() {
+    checkIsNotReleased();
+    if (eglSurface == EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("No EGLSurface - can't swap buffers");
+    }
+    EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+  }
+
+  public void swapBuffers(long timeStampNs) {
+    checkIsNotReleased();
+    if (eglSurface == EGL14.EGL_NO_SURFACE) {
+      throw new RuntimeException("No EGLSurface - can't swap buffers");
+    }
+    // See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+    EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+    EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+  }
+
+  // Return an EGLDisplay, or die trying.
+  private static EGLDisplay getEglDisplay() {
+    EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+    if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+      throw new RuntimeException("Unable to get EGL14 display");
+    }
+    int[] version = new int[2];
+    if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+      throw new RuntimeException("Unable to initialize EGL14");
+    }
+    return eglDisplay;
+  }
+
+  // Return an EGLConfig, or die trying.
+  private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+    EGLConfig[] configs = new EGLConfig[1];
+    int[] numConfigs = new int[1];
+    if (!EGL14.eglChooseConfig(
+        eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+      throw new RuntimeException("Unable to find any matching EGL config");
+    }
+    return configs[0];
+  }
+
+  // Return an EGLConfig, or die trying.
+  private static EGLContext createEglContext(
+      EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+    if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
+      throw new RuntimeException("Invalid sharedContext");
+    }
+    int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+    EGLContext rootContext =
+        sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
+    EGLContext eglContext =
+        EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+    if (eglContext == EGL14.EGL_NO_CONTEXT) {
+      throw new RuntimeException("Failed to create EGL context");
+    }
+    return eglContext;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlRectDrawer.java b/webrtc/api/java/android/org/webrtc/GlRectDrawer.java
new file mode 100644
index 0000000..6d3d5d2
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,230 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import org.webrtc.GlShader;
+import org.webrtc.GlUtil;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.Arrays;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+/**
+ * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
+ * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
+ * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
+ * calls, this is intentional to maximize performance. The function release() must be called
+ * manually to free the resources held by this object.
+ */
+public class GlRectDrawer implements RendererCommon.GlDrawer {
+  // Simple vertex shader, used for both YUV and OES.
+  private static final String VERTEX_SHADER_STRING =
+        "varying vec2 interp_tc;\n"
+      + "attribute vec4 in_pos;\n"
+      + "attribute vec4 in_tc;\n"
+      + "\n"
+      + "uniform mat4 texMatrix;\n"
+      + "\n"
+      + "void main() {\n"
+      + "    gl_Position = in_pos;\n"
+      + "    interp_tc = (texMatrix * in_tc).xy;\n"
+      + "}\n";
+
+  private static final String YUV_FRAGMENT_SHADER_STRING =
+        "precision mediump float;\n"
+      + "varying vec2 interp_tc;\n"
+      + "\n"
+      + "uniform sampler2D y_tex;\n"
+      + "uniform sampler2D u_tex;\n"
+      + "uniform sampler2D v_tex;\n"
+      + "\n"
+      + "void main() {\n"
+      // CSC according to http://www.fourcc.org/fccyvrgb.php
+      + "  float y = texture2D(y_tex, interp_tc).r;\n"
+      + "  float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+      + "  float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+      + "  gl_FragColor = vec4(y + 1.403 * v, "
+      + "                      y - 0.344 * u - 0.714 * v, "
+      + "                      y + 1.77 * u, 1);\n"
+      + "}\n";
+
+  private static final String RGB_FRAGMENT_SHADER_STRING =
+        "precision mediump float;\n"
+      + "varying vec2 interp_tc;\n"
+      + "\n"
+      + "uniform sampler2D rgb_tex;\n"
+      + "\n"
+      + "void main() {\n"
+      + "  gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
+      + "}\n";
+
+  private static final String OES_FRAGMENT_SHADER_STRING =
+        "#extension GL_OES_EGL_image_external : require\n"
+      + "precision mediump float;\n"
+      + "varying vec2 interp_tc;\n"
+      + "\n"
+      + "uniform samplerExternalOES oes_tex;\n"
+      + "\n"
+      + "void main() {\n"
+      + "  gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+      + "}\n";
+
+  // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
+  // top-right.
+  private static final FloatBuffer FULL_RECTANGLE_BUF =
+      GlUtil.createFloatBuffer(new float[] {
+            -1.0f, -1.0f,  // Bottom left.
+             1.0f, -1.0f,  // Bottom right.
+            -1.0f,  1.0f,  // Top left.
+             1.0f,  1.0f,  // Top right.
+          });
+
+  // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+  private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
+      GlUtil.createFloatBuffer(new float[] {
+            0.0f, 0.0f,  // Bottom left.
+            1.0f, 0.0f,  // Bottom right.
+            0.0f, 1.0f,  // Top left.
+            1.0f, 1.0f   // Top right.
+          });
+
+  private static class Shader {
+    public final GlShader glShader;
+    public final int texMatrixLocation;
+
+    public Shader(String fragmentShader) {
+      this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+      this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
+    }
+  }
+
+  // The keys are one of the fragments shaders above.
+  private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
+
+  /**
+   * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+   * allocated at the first call to this function.
+   */
+  @Override
+  public void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height) {
+    prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+    // updateTexImage() may be called from another thread in another EGL context, so we need to
+    // bind/unbind the texture in each draw call so that GLES understads it's a new texture.
+    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+    drawRectangle(x, y, width, height);
+    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+  }
+
+  /**
+   * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+   * are allocated at the first call to this function.
+   */
+  @Override
+  public void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height) {
+    prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+    drawRectangle(x, y, width, height);
+    // Unbind the texture as a precaution.
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+  }
+
+  /**
+   * Draw a YUV frame with specified texture transformation matrix. Required resources are
+   * allocated at the first call to this function.
+   */
+  @Override
+  public void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height) {
+    prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
+    // Bind the textures.
+    for (int i = 0; i < 3; ++i) {
+      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+    }
+    drawRectangle(x, y, width, height);
+    // Unbind the textures as a precaution..
+    for (int i = 0; i < 3; ++i) {
+      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+    }
+  }
+
+  private void drawRectangle(int x, int y, int width, int height) {
+    // Draw quad.
+    GLES20.glViewport(x, y, width, height);
+    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+  }
+
+  private void prepareShader(String fragmentShader, float[] texMatrix) {
+    final Shader shader;
+    if (shaders.containsKey(fragmentShader)) {
+      shader = shaders.get(fragmentShader);
+    } else {
+      // Lazy allocation.
+      shader = new Shader(fragmentShader);
+      shaders.put(fragmentShader, shader);
+      shader.glShader.useProgram();
+      // Initialize fragment shader uniform values.
+      if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
+      } else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
+      } else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
+      } else {
+        throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
+      }
+      GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+      // Initialize vertex shader attributes.
+      shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+      shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
+    }
+    shader.glShader.useProgram();
+    // Copy the texture transformation matrix over.
+    GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
+  }
+
+  /**
+   * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+   */
+  @Override
+  public void release() {
+    for (Shader shader : shaders.values()) {
+      shader.glShader.release();
+    }
+    shaders.clear();
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlShader.java b/webrtc/api/java/android/org/webrtc/GlShader.java
new file mode 100644
index 0000000..966f0f5
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlShader.java
@@ -0,0 +1,144 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import org.webrtc.Logging;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+  private static final String TAG = "GlShader";
+
+  private static int compileShader(int shaderType, String source) {
+    int[] result = new int[] {
+        GLES20.GL_FALSE
+    };
+    int shader = GLES20.glCreateShader(shaderType);
+    GLES20.glShaderSource(shader, source);
+    GLES20.glCompileShader(shader);
+    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
+    if (result[0] != GLES20.GL_TRUE) {
+      Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
+          GLES20.glGetShaderInfoLog(shader));
+      throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+    }
+    GlUtil.checkNoGLES2Error("compileShader");
+    return shader;
+  }
+
+  private int vertexShader;
+  private int fragmentShader;
+  private int program;
+
+  public GlShader(String vertexSource, String fragmentSource) {
+    vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+    fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+    program = GLES20.glCreateProgram();
+    if (program == 0) {
+      throw new RuntimeException("Could not create program");
+    }
+    GLES20.glAttachShader(program, vertexShader);
+    GLES20.glAttachShader(program, fragmentShader);
+    GLES20.glLinkProgram(program);
+    int[] linkStatus = new int[] {
+      GLES20.GL_FALSE
+    };
+    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+    if (linkStatus[0] != GLES20.GL_TRUE) {
+      Logging.e(TAG, "Could not link program: " +
+          GLES20.glGetProgramInfoLog(program));
+      throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+    }
+    GlUtil.checkNoGLES2Error("Creating GlShader");
+  }
+
+  public int getAttribLocation(String label) {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    int location = GLES20.glGetAttribLocation(program, label);
+    if (location < 0) {
+      throw new RuntimeException("Could not locate '" + label + "' in program");
+    }
+    return location;
+  }
+
+  /**
+   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
+   * |buffer| with |dimension| number of components per vertex.
+   */
+  public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    int location = getAttribLocation(label);
+    GLES20.glEnableVertexAttribArray(location);
+    GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
+    GlUtil.checkNoGLES2Error("setVertexAttribArray");
+  }
+
+  public int getUniformLocation(String label) {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    int location = GLES20.glGetUniformLocation(program, label);
+    if (location < 0) {
+      throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+    }
+    return location;
+  }
+
+  public void useProgram() {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    GLES20.glUseProgram(program);
+    GlUtil.checkNoGLES2Error("glUseProgram");
+  }
+
+  public void release() {
+    Logging.d(TAG, "Deleting shader.");
+    // Flag shaders for deletion (does not delete until no longer attached to a program).
+    if (vertexShader != -1) {
+      GLES20.glDeleteShader(vertexShader);
+      vertexShader = -1;
+    }
+    if (fragmentShader != -1) {
+      GLES20.glDeleteShader(fragmentShader);
+      fragmentShader = -1;
+    }
+    // Delete program, automatically detaching any shaders from it.
+    if (program != -1) {
+      GLES20.glDeleteProgram(program);
+      program = -1;
+    }
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlTextureFrameBuffer.java b/webrtc/api/java/android/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 0000000..fd52c37
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,142 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+  private final int frameBufferId;
+  private final int textureId;
+  private final int pixelFormat;
+  private int width;
+  private int height;
+
+  /**
+   * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+   * when calling this function. The framebuffer is not complete until setSize() is called.
+   */
+  public GlTextureFrameBuffer(int pixelFormat) {
+    switch (pixelFormat) {
+      case GLES20.GL_LUMINANCE:
+      case GLES20.GL_RGB:
+      case GLES20.GL_RGBA:
+        this.pixelFormat = pixelFormat;
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+    }
+
+    textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+    this.width = 0;
+    this.height = 0;
+
+    // Create framebuffer object and bind it.
+    final int frameBuffers[] = new int[1];
+    GLES20.glGenFramebuffers(1, frameBuffers, 0);
+    frameBufferId = frameBuffers[0];
+    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+    GlUtil.checkNoGLES2Error("Generate framebuffer");
+
+    // Attach the texture to the framebuffer as color attachment.
+    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+        GLES20.GL_TEXTURE_2D, textureId, 0);
+    GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
+
+    // Restore normal framebuffer.
+    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+  }
+
+  /**
+   * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+   * EGLContext must be bound on the current thread when calling this function. Must be called at
+   * least once before using the framebuffer. May be called multiple times to change size.
+   */
+  public void setSize(int width, int height) {
+    if (width == 0 || height == 0) {
+      throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+    }
+    if (width == this.width && height == this.height) {
+      return;
+    }
+    this.width = width;
+    this.height = height;
+
+    // Bind our framebuffer.
+    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+    GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+    // Allocate texture.
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+        GLES20.GL_UNSIGNED_BYTE, null);
+
+    // Check that the framebuffer is in a good state.
+    final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+    if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+      throw new IllegalStateException("Framebuffer not complete, status: " + status);
+    }
+
+    // Restore normal framebuffer.
+    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+  }
+
+  public int getWidth() {
+    return width;
+  }
+
+  public int getHeight() {
+    return height;
+  }
+
+  public int getFrameBufferId() {
+    return frameBufferId;
+  }
+
+  public int getTextureId() {
+    return textureId;
+  }
+
+  /**
+   * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+   * this function. This object should not be used after this call.
+   */
+  public void release() {
+    GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+    GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+    width = 0;
+    height = 0;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/GlUtil.java b/webrtc/api/java/android/org/webrtc/GlUtil.java
new file mode 100644
index 0000000..8b43579
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/GlUtil.java
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+  private GlUtil() {}
+
+  // Assert that no OpenGL ES 2.0 error has been raised.
+  public static void checkNoGLES2Error(String msg) {
+    int error = GLES20.glGetError();
+    if (error != GLES20.GL_NO_ERROR) {
+      throw new RuntimeException(msg + ": GLES20 error: " + error);
+    }
+  }
+
+  public static FloatBuffer createFloatBuffer(float[] coords) {
+    // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+    ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+    bb.order(ByteOrder.nativeOrder());
+    FloatBuffer fb = bb.asFloatBuffer();
+    fb.put(coords);
+    fb.position(0);
+    return fb;
+  }
+
+  /**
+   * Generate texture with standard parameters.
+   */
+  public static int generateTexture(int target) {
+    final int textureArray[] = new int[1];
+    GLES20.glGenTextures(1, textureArray, 0);
+    final int textureId = textureArray[0];
+    GLES20.glBindTexture(target, textureId);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+    checkNoGLES2Error("generateTexture");
+    return textureId;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/NetworkMonitor.java b/webrtc/api/java/android/org/webrtc/NetworkMonitor.java
new file mode 100644
index 0000000..a075b36
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/NetworkMonitor.java
@@ -0,0 +1,270 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
+
+import org.webrtc.Logging;
+
+import android.content.Context;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
+ *
+ * Triggers updates to the underlying network state from OS networking events.
+ *
+ * WARNING: This class is not thread-safe.
+ */
+public class NetworkMonitor {
+  /**
+   * Alerted when the connection type of the network changes.
+   * The alert is fired on the UI thread.
+   */
+  public interface NetworkObserver {
+    public void onConnectionTypeChanged(ConnectionType connectionType);
+  }
+
+  private static final String TAG = "NetworkMonitor";
+  private static NetworkMonitor instance;
+
+  private final Context applicationContext;
+
+  // Native observers of the connection type changes.
+  private final ArrayList<Long> nativeNetworkObservers;
+  // Java observers of the connection type changes.
+  private final ArrayList<NetworkObserver> networkObservers;
+
+  // Object that detects the connection type changes.
+  private NetworkMonitorAutoDetect autoDetector;
+
+  private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
+
+  private NetworkMonitor(Context context) {
+    assertIsTrue(context != null);
+    applicationContext =
+        context.getApplicationContext() == null ? context : context.getApplicationContext();
+
+    nativeNetworkObservers = new ArrayList<Long>();
+    networkObservers = new ArrayList<NetworkObserver>();
+  }
+
+  /**
+   * Initializes the singleton once.
+   * Called from the native code.
+   */
+  public static NetworkMonitor init(Context context) {
+    if (!isInitialized()) {
+      instance = new NetworkMonitor(context);
+    }
+    return instance;
+  }
+
+  public static boolean isInitialized() {
+    return instance != null;
+  }
+
+  /**
+   * Returns the singleton instance.
+   */
+  public static NetworkMonitor getInstance() {
+    return instance;
+  }
+
+  /**
+   * Enables auto detection of the current network state based on notifications from the system.
+   * Note that passing true here requires the embedding app have the platform ACCESS_NETWORK_STATE
+   * permission.
+   *
+   * @param shouldAutoDetect true if the NetworkMonitor should listen for system changes in
+   *  network connectivity.
+   */
+  public static void setAutoDetectConnectivityState(boolean shouldAutoDetect) {
+    getInstance().setAutoDetectConnectivityStateInternal(shouldAutoDetect);
+  }
+
+  private static void assertIsTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected to be true");
+    }
+  }
+
+  // Called by the native code.
+  private void startMonitoring(long nativeObserver) {
+    Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
+    nativeNetworkObservers.add(nativeObserver);
+    setAutoDetectConnectivityStateInternal(true);
+  }
+
+  // Called by the native code.
+  private void stopMonitoring(long nativeObserver) {
+    Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
+    setAutoDetectConnectivityStateInternal(false);
+    nativeNetworkObservers.remove(nativeObserver);
+  }
+
+  private ConnectionType getCurrentConnectionType() {
+    return currentConnectionType;
+  }
+
+  private int getCurrentDefaultNetId() {
+    return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
+  }
+
+  private void destroyAutoDetector() {
+    if (autoDetector != null) {
+      autoDetector.destroy();
+      autoDetector = null;
+    }
+  }
+
+  private void setAutoDetectConnectivityStateInternal(boolean shouldAutoDetect) {
+    if (!shouldAutoDetect) {
+      destroyAutoDetector();
+      return;
+    }
+    if (autoDetector == null) {
+      autoDetector = new NetworkMonitorAutoDetect(
+        new NetworkMonitorAutoDetect.Observer() {
+
+          @Override
+          public void onConnectionTypeChanged(ConnectionType newConnectionType) {
+            updateCurrentConnectionType(newConnectionType);
+          }
+
+          @Override
+          public void onNetworkConnect(NetworkInformation networkInfo) {
+            notifyObserversOfNetworkConnect(networkInfo);
+          }
+
+          @Override
+          public void onNetworkDisconnect(int networkHandle) {
+            notifyObserversOfNetworkDisconnect(networkHandle);
+          }
+        },
+        applicationContext);
+      final NetworkMonitorAutoDetect.NetworkState networkState =
+          autoDetector.getCurrentNetworkState();
+      updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
+      updateActiveNetworkList();
+    }
+  }
+
+  private void updateCurrentConnectionType(ConnectionType newConnectionType) {
+    currentConnectionType = newConnectionType;
+    notifyObserversOfConnectionTypeChange(newConnectionType);
+  }
+
+  /**
+   * Alerts all observers of a connection change.
+   */
+  private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
+    for (long nativeObserver : nativeNetworkObservers) {
+      nativeNotifyConnectionTypeChanged(nativeObserver);
+    }
+    for (NetworkObserver observer : networkObservers) {
+      observer.onConnectionTypeChanged(newConnectionType);
+    }
+  }
+
+  private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
+    for (long nativeObserver : nativeNetworkObservers) {
+      nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
+    }
+  }
+
+  private void notifyObserversOfNetworkDisconnect(int networkHandle) {
+    for (long nativeObserver : nativeNetworkObservers) {
+      nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
+    }
+  }
+
+  private void updateActiveNetworkList() {
+    List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
+    if (networkInfoList == null || networkInfoList.size() == 0) {
+      return;
+    }
+
+    NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
+    networkInfos = networkInfoList.toArray(networkInfos);
+    for (long nativeObserver : nativeNetworkObservers) {
+      nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
+    }
+  }
+
+  /**
+   * Adds an observer for any connection type changes.
+   */
+  public static void addNetworkObserver(NetworkObserver observer) {
+    getInstance().addNetworkObserverInternal(observer);
+  }
+
+  private void addNetworkObserverInternal(NetworkObserver observer) {
+    networkObservers.add(observer);
+  }
+
+  /**
+   * Removes an observer for any connection type changes.
+   */
+  public static void removeNetworkObserver(NetworkObserver observer) {
+    getInstance().removeNetworkObserverInternal(observer);
+  }
+
+  private void removeNetworkObserverInternal(NetworkObserver observer) {
+    networkObservers.remove(observer);
+  }
+
+  /**
+   * Checks if there currently is connectivity.
+   */
+  public static boolean isOnline() {
+    ConnectionType connectionType = getInstance().getCurrentConnectionType();
+    return connectionType != ConnectionType.CONNECTION_UNKNOWN
+        && connectionType != ConnectionType.CONNECTION_NONE;
+  }
+
+  private native void nativeNotifyConnectionTypeChanged(long nativePtr);
+  private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
+  private native void nativeNotifyOfNetworkDisconnect(long nativePtr, int networkHandle);
+  private native void nativeNotifyOfActiveNetworkList(long nativePtr,
+                                                      NetworkInformation[] networkInfos);
+
+  // For testing only.
+  static void resetInstanceForTests(Context context) {
+    instance = new NetworkMonitor(context);
+  }
+
+  // For testing only.
+  public static NetworkMonitorAutoDetect getAutoDetectorForTest() {
+    return getInstance().autoDetector;
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/NetworkMonitorAutoDetect.java b/webrtc/api/java/android/org/webrtc/NetworkMonitorAutoDetect.java
new file mode 100644
index 0000000..d71efc6
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/NetworkMonitorAutoDetect.java
@@ -0,0 +1,605 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import static android.net.NetworkCapabilities.NET_CAPABILITY_INTERNET;
+import static android.net.NetworkCapabilities.TRANSPORT_CELLULAR;
+
+
+import org.webrtc.Logging;
+
+import android.Manifest.permission;
+import android.annotation.SuppressLint;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.net.ConnectivityManager;
+import android.net.ConnectivityManager.NetworkCallback;
+import android.net.LinkAddress;
+import android.net.LinkProperties;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkInfo;
+import android.net.NetworkRequest;
+import android.net.wifi.WifiInfo;
+import android.net.wifi.WifiManager;
+import android.os.Build;
+import android.telephony.TelephonyManager;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
+ *
+ * Used by the NetworkMonitor to listen to platform changes in connectivity.
+ * Note that use of this class requires that the app have the platform
+ * ACCESS_NETWORK_STATE permission.
+ */
+public class NetworkMonitorAutoDetect extends BroadcastReceiver {
+  public static enum ConnectionType {
+    CONNECTION_UNKNOWN,
+    CONNECTION_ETHERNET,
+    CONNECTION_WIFI,
+    CONNECTION_4G,
+    CONNECTION_3G,
+    CONNECTION_2G,
+    CONNECTION_BLUETOOTH,
+    CONNECTION_NONE
+  }
+
+  public static class IPAddress {
+    public final byte[] address;
+    public IPAddress (byte[] address) {
+      this.address = address;
+    }
+  }
+
+  /** Java version of NetworkMonitor.NetworkInformation */
+  public static class NetworkInformation{
+    public final String name;
+    public final ConnectionType type;
+    public final int handle;
+    public final IPAddress[] ipAddresses;
+    public NetworkInformation(String name, ConnectionType type, int handle,
+                              IPAddress[] addresses) {
+      this.name = name;
+      this.type = type;
+      this.handle = handle;
+      this.ipAddresses = addresses;
+    }
+  };
+
+  static class NetworkState {
+    private final boolean connected;
+    // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
+    // further divided into 2G, 3G, or 4G from the subtype.
+    private final int type;
+    // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
+    // Will be useful to find the maximum bandwidth.
+    private final int subtype;
+
+    public NetworkState(boolean connected, int type, int subtype) {
+      this.connected = connected;
+      this.type = type;
+      this.subtype = subtype;
+    }
+
+    public boolean isConnected() {
+      return connected;
+    }
+
+    public int getNetworkType() {
+      return type;
+    }
+
+    public int getNetworkSubType() {
+      return subtype;
+    }
+  }
+  /**
+   * The methods in this class get called when the network changes if the callback
+   * is registered with a proper network request. It is only available in Android Lollipop
+   * and above.
+   */
+  @SuppressLint("NewApi")
+  private class SimpleNetworkCallback extends NetworkCallback {
+
+    @Override
+    public void onAvailable(Network network) {
+      Logging.d(TAG, "Network becomes available: " + network.toString());
+      onNetworkChanged(network);
+    }
+
+    @Override
+    public void onCapabilitiesChanged(
+        Network network, NetworkCapabilities networkCapabilities) {
+      // A capabilities change may indicate the ConnectionType has changed,
+      // so forward the new NetworkInformation along to the observer.
+      Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
+      onNetworkChanged(network);
+    }
+
+    @Override
+    public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
+      // A link property change may indicate the IP address changes.
+      // so forward the new NetworkInformation to the observer.
+      Logging.d(TAG, "link properties changed: " + linkProperties.toString());
+      onNetworkChanged(network);
+    }
+
+    @Override
+    public void onLosing(Network network, int maxMsToLive) {
+      // Tell the network is going to lose in MaxMsToLive milliseconds.
+      // We may use this signal later.
+      Logging.d(TAG, "Network with handle " + networkToNetId(network) +
+                " is about to lose in " + maxMsToLive + "ms");
+    }
+
+    @Override
+    public void onLost(Network network) {
+      int handle = networkToNetId(network);
+      Logging.d(TAG, "Network with handle " + handle + " is disconnected");
+      observer.onNetworkDisconnect(handle);
+    }
+
+    private void onNetworkChanged(Network network) {
+      NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
+      if (networkInformation.type != ConnectionType.CONNECTION_UNKNOWN
+          && networkInformation.type != ConnectionType.CONNECTION_NONE) {
+        observer.onNetworkConnect(networkInformation);
+      }
+    }
+  }
+
+  /** Queries the ConnectivityManager for information about the current connection. */
+  static class ConnectivityManagerDelegate {
+    /**
+     *  Note: In some rare Android systems connectivityManager is null.  We handle that
+     *  gracefully below.
+     */
+    private final ConnectivityManager connectivityManager;
+
+    ConnectivityManagerDelegate(Context context) {
+      connectivityManager =
+          (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
+    }
+
+    // For testing.
+    ConnectivityManagerDelegate() {
+      // All the methods below should be overridden.
+      connectivityManager = null;
+    }
+
+    /**
+     * Returns connection type and status information about the current
+     * default network.
+     */
+    NetworkState getNetworkState() {
+      if (connectivityManager == null) {
+        return new NetworkState(false, -1, -1);
+      }
+      return getNetworkState(connectivityManager.getActiveNetworkInfo());
+    }
+
+    /**
+     * Returns connection type and status information about |network|.
+     * Only callable on Lollipop and newer releases.
+     */
+    @SuppressLint("NewApi")
+    NetworkState getNetworkState(Network network) {
+      if (connectivityManager == null) {
+        return new NetworkState(false, -1, -1);
+      }
+      return getNetworkState(connectivityManager.getNetworkInfo(network));
+    }
+
+    /**
+     * Returns connection type and status information gleaned from networkInfo.
+     */
+    NetworkState getNetworkState(NetworkInfo networkInfo) {
+      if (networkInfo == null || !networkInfo.isConnected()) {
+        return new NetworkState(false, -1, -1);
+      }
+      return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
+    }
+
+    /**
+     * Returns all connected networks.
+     * Only callable on Lollipop and newer releases.
+     */
+    @SuppressLint("NewApi")
+    Network[] getAllNetworks() {
+      if (connectivityManager == null) {
+        return new Network[0];
+      }
+      return connectivityManager.getAllNetworks();
+    }
+
+    List<NetworkInformation> getActiveNetworkList() {
+      if (!supportNetworkCallback()) {
+        return null;
+      }
+      ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
+      for (Network network : getAllNetworks()) {
+        NetworkInformation info = networkToInfo(network);
+        if (info.name != null && info.type != ConnectionType.CONNECTION_NONE
+            && info.type != ConnectionType.CONNECTION_UNKNOWN) {
+          netInfoList.add(info);
+        }
+      }
+      return netInfoList;
+    }
+
+    /**
+     * Returns the NetID of the current default network. Returns
+     * INVALID_NET_ID if no current default network connected.
+     * Only callable on Lollipop and newer releases.
+     */
+    @SuppressLint("NewApi")
+    int getDefaultNetId() {
+      if (!supportNetworkCallback()) {
+        return INVALID_NET_ID;
+      }
+      // Android Lollipop had no API to get the default network; only an
+      // API to return the NetworkInfo for the default network. To
+      // determine the default network one can find the network with
+      // type matching that of the default network.
+      final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
+      if (defaultNetworkInfo == null) {
+        return INVALID_NET_ID;
+      }
+      final Network[] networks = getAllNetworks();
+      int defaultNetId = INVALID_NET_ID;
+      for (Network network : networks) {
+        if (!hasInternetCapability(network)) {
+          continue;
+        }
+        final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+        if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
+          // There should not be multiple connected networks of the
+          // same type. At least as of Android Marshmallow this is
+          // not supported. If this becomes supported this assertion
+          // may trigger. At that point we could consider using
+          // ConnectivityManager.getDefaultNetwork() though this
+          // may give confusing results with VPNs and is only
+          // available with Android Marshmallow.
+          assert defaultNetId == INVALID_NET_ID;
+          defaultNetId = networkToNetId(network);
+        }
+      }
+      return defaultNetId;
+    }
+
+    @SuppressLint("NewApi")
+    private NetworkInformation networkToInfo(Network network) {
+      LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
+      NetworkInformation networkInformation = new NetworkInformation(
+          linkProperties.getInterfaceName(),
+          getConnectionType(getNetworkState(network)),
+          networkToNetId(network),
+          getIPAddresses(linkProperties));
+      return networkInformation;
+    }
+
+    /**
+     * Returns true if {@code network} can provide Internet access. Can be used to
+     * ignore specialized networks (e.g. IMS, FOTA).
+     */
+    @SuppressLint("NewApi")
+    boolean hasInternetCapability(Network network) {
+      if (connectivityManager == null) {
+        return false;
+      }
+      final NetworkCapabilities capabilities =
+          connectivityManager.getNetworkCapabilities(network);
+      return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
+    }
+
+    /** Only callable on Lollipop and newer releases. */
+    @SuppressLint("NewApi")
+    public void registerNetworkCallback(NetworkCallback networkCallback) {
+      connectivityManager.registerNetworkCallback(
+          new NetworkRequest.Builder().addCapability(NET_CAPABILITY_INTERNET).build(),
+          networkCallback);
+    }
+
+    /** Only callable on Lollipop and newer releases. */
+    @SuppressLint("NewApi")
+    public void requestMobileNetwork(NetworkCallback networkCallback) {
+      NetworkRequest.Builder builder = new NetworkRequest.Builder();
+      builder.addCapability(NET_CAPABILITY_INTERNET).addTransportType(TRANSPORT_CELLULAR);
+      connectivityManager.requestNetwork(builder.build(), networkCallback);
+    }
+
+    @SuppressLint("NewApi")
+    IPAddress[] getIPAddresses(LinkProperties linkProperties) {
+      IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
+      int i = 0;
+      for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
+        ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
+        ++i;
+      }
+      return ipAddresses;
+    }
+
+    @SuppressLint("NewApi")
+    public void releaseCallback(NetworkCallback networkCallback) {
+      if (supportNetworkCallback()) {
+        Logging.d(TAG, "Unregister network callback");
+        connectivityManager.unregisterNetworkCallback(networkCallback);
+      }
+    }
+
+    public boolean supportNetworkCallback() {
+      return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
+    }
+  }
+
+
+  /** Queries the WifiManager for SSID of the current Wifi connection. */
+  static class WifiManagerDelegate {
+    private final Context context;
+    WifiManagerDelegate(Context context) {
+      this.context = context;
+    }
+
+    // For testing.
+    WifiManagerDelegate() {
+      // All the methods below should be overridden.
+      context = null;
+    }
+
+    String getWifiSSID() {
+      final Intent intent = context.registerReceiver(null,
+          new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+      if (intent != null) {
+        final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
+        if (wifiInfo != null) {
+          final String ssid = wifiInfo.getSSID();
+          if (ssid != null) {
+            return ssid;
+          }
+        }
+      }
+      return "";
+    }
+
+  }
+
+  static final int INVALID_NET_ID = -1;
+  private static final String TAG = "NetworkMonitorAutoDetect";
+
+  // Observer for the connection type change.
+  private final Observer observer;
+  private final IntentFilter intentFilter;
+  private final Context context;
+  // Used to request mobile network. It does not do anything except for keeping
+  // the callback for releasing the request.
+  private final NetworkCallback mobileNetworkCallback;
+  // Used to receive updates on all networks.
+  private final NetworkCallback allNetworkCallback;
+  // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
+  private ConnectivityManagerDelegate connectivityManagerDelegate;
+  private WifiManagerDelegate wifiManagerDelegate;
+
+  private boolean isRegistered;
+  private ConnectionType connectionType;
+  private String wifiSSID;
+
+  /**
+   * Observer interface by which observer is notified of network changes.
+   */
+  public static interface Observer {
+    /**
+     * Called when default network changes.
+     */
+    public void onConnectionTypeChanged(ConnectionType newConnectionType);
+    public void onNetworkConnect(NetworkInformation networkInfo);
+    public void onNetworkDisconnect(int networkHandle);
+  }
+
+  /**
+   * Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
+   */
+  @SuppressLint("NewApi")
+  public NetworkMonitorAutoDetect(Observer observer, Context context) {
+    this.observer = observer;
+    this.context = context;
+    connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
+    wifiManagerDelegate = new WifiManagerDelegate(context);
+
+    final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
+    connectionType = getConnectionType(networkState);
+    wifiSSID = getWifiSSID(networkState);
+    intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
+
+    registerReceiver();
+    if (connectivityManagerDelegate.supportNetworkCallback()) {
+      mobileNetworkCallback = new NetworkCallback();
+      connectivityManagerDelegate.requestMobileNetwork(mobileNetworkCallback);
+      allNetworkCallback = new SimpleNetworkCallback();
+      connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
+    } else {
+      mobileNetworkCallback = null;
+      allNetworkCallback = null;
+    }
+  }
+
+  /**
+   * Allows overriding the ConnectivityManagerDelegate for tests.
+   */
+  void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
+    connectivityManagerDelegate = delegate;
+  }
+
+  /**
+   * Allows overriding the WifiManagerDelegate for tests.
+   */
+  void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
+    wifiManagerDelegate = delegate;
+  }
+
+  /**
+   * Returns whether the object has registered to receive network connectivity intents.
+   * Visible for testing.
+   */
+  boolean isReceiverRegisteredForTesting() {
+    return isRegistered;
+  }
+
+  List<NetworkInformation> getActiveNetworkList() {
+    return connectivityManagerDelegate.getActiveNetworkList();
+  }
+
+  public void destroy() {
+    if (allNetworkCallback != null) {
+      connectivityManagerDelegate.releaseCallback(allNetworkCallback);
+    }
+    if (mobileNetworkCallback != null) {
+      connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
+    }
+    unregisterReceiver();
+  }
+
+  /**
+   * Registers a BroadcastReceiver in the given context.
+   */
+  private void registerReceiver() {
+    if (isRegistered) return;
+
+    isRegistered = true;
+    context.registerReceiver(this, intentFilter);
+  }
+
+  /**
+   * Unregisters the BroadcastReceiver in the given context.
+   */
+  private void unregisterReceiver() {
+    if (!isRegistered) return;
+
+    isRegistered = false;
+    context.unregisterReceiver(this);
+  }
+
+  public NetworkState getCurrentNetworkState() {
+    return connectivityManagerDelegate.getNetworkState();
+  }
+
+  /**
+   * Returns NetID of device's current default connected network used for
+   * communication.
+   * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
+   * when not implemented.
+   */
+  public int getDefaultNetId() {
+    return connectivityManagerDelegate.getDefaultNetId();
+  }
+
+  public static ConnectionType getConnectionType(NetworkState networkState) {
+    if (!networkState.isConnected()) {
+      return ConnectionType.CONNECTION_NONE;
+    }
+
+    switch (networkState.getNetworkType()) {
+      case ConnectivityManager.TYPE_ETHERNET:
+        return ConnectionType.CONNECTION_ETHERNET;
+      case ConnectivityManager.TYPE_WIFI:
+        return ConnectionType.CONNECTION_WIFI;
+      case ConnectivityManager.TYPE_WIMAX:
+        return ConnectionType.CONNECTION_4G;
+      case ConnectivityManager.TYPE_BLUETOOTH:
+        return ConnectionType.CONNECTION_BLUETOOTH;
+      case ConnectivityManager.TYPE_MOBILE:
+        // Use information from TelephonyManager to classify the connection.
+        switch (networkState.getNetworkSubType()) {
+          case TelephonyManager.NETWORK_TYPE_GPRS:
+          case TelephonyManager.NETWORK_TYPE_EDGE:
+          case TelephonyManager.NETWORK_TYPE_CDMA:
+          case TelephonyManager.NETWORK_TYPE_1xRTT:
+          case TelephonyManager.NETWORK_TYPE_IDEN:
+            return ConnectionType.CONNECTION_2G;
+          case TelephonyManager.NETWORK_TYPE_UMTS:
+          case TelephonyManager.NETWORK_TYPE_EVDO_0:
+          case TelephonyManager.NETWORK_TYPE_EVDO_A:
+          case TelephonyManager.NETWORK_TYPE_HSDPA:
+          case TelephonyManager.NETWORK_TYPE_HSUPA:
+          case TelephonyManager.NETWORK_TYPE_HSPA:
+          case TelephonyManager.NETWORK_TYPE_EVDO_B:
+          case TelephonyManager.NETWORK_TYPE_EHRPD:
+          case TelephonyManager.NETWORK_TYPE_HSPAP:
+            return ConnectionType.CONNECTION_3G;
+          case TelephonyManager.NETWORK_TYPE_LTE:
+            return ConnectionType.CONNECTION_4G;
+          default:
+            return ConnectionType.CONNECTION_UNKNOWN;
+        }
+      default:
+        return ConnectionType.CONNECTION_UNKNOWN;
+    }
+  }
+
+  private String getWifiSSID(NetworkState networkState) {
+    if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
+    return wifiManagerDelegate.getWifiSSID();
+  }
+
+  // BroadcastReceiver
+  @Override
+  public void onReceive(Context context, Intent intent) {
+    final NetworkState networkState = getCurrentNetworkState();
+    if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
+      connectionTypeChanged(networkState);
+    }
+  }
+
+  private void connectionTypeChanged(NetworkState networkState) {
+    ConnectionType newConnectionType = getConnectionType(networkState);
+    String newWifiSSID = getWifiSSID(networkState);
+    if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
+
+    connectionType = newConnectionType;
+    wifiSSID = newWifiSSID;
+    Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
+    observer.onConnectionTypeChanged(newConnectionType);
+  }
+
+  /**
+   * Extracts NetID of network. Only available on Lollipop and newer releases.
+   */
+  @SuppressLint("NewApi")
+  private static int networkToNetId(Network network) {
+    // NOTE(pauljensen): This depends on Android framework implementation details.
+    // Fortunately this functionality is unlikely to ever change.
+    // TODO(honghaiz): When we update to Android M SDK, use Network.getNetworkHandle().
+    return Integer.parseInt(network.toString());
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/OWNERS b/webrtc/api/java/android/org/webrtc/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/java/android/org/webrtc/RendererCommon.java b/webrtc/api/java/android/org/webrtc/RendererCommon.java
new file mode 100644
index 0000000..5ada4cc
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/RendererCommon.java
@@ -0,0 +1,260 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+  /** Interface for reporting rendering events. */
+  public static interface RendererEvents {
+    /**
+     * Callback fired once first frame is rendered.
+     */
+    public void onFirstFrameRendered();
+
+    /**
+     * Callback fired when rendered frame resolution or rotation has changed.
+     */
+    public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+  }
+
+  /** Interface for rendering frames on an EGLSurface. */
+  public static interface GlDrawer {
+    /**
+     * Functions for drawing frames with different sources. The rendering surface target is
+     * implied by the current EGL context of the calling thread and requires no explicit argument.
+     * The coordinates specify the viewport location on the surface target.
+     */
+    void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height);
+    void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height);
+    void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height);
+
+    /**
+     * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+     */
+    void release();
+  }
+
+  /**
+   * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+   * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+   */
+  public static class YuvUploader {
+    // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+    // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+    // that handles stride and compare performance with intermediate copy.
+    private ByteBuffer copyBuffer;
+
+    /**
+     * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
+     * |outputYuvTextures| must have been generated in advance.
+     */
+    public void uploadYuvData(
+        int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+      final int[] planeWidths = new int[] {width, width / 2, width / 2};
+      final int[] planeHeights = new int[] {height, height / 2, height / 2};
+      // Make a first pass to see if we need a temporary copy buffer.
+      int copyCapacityNeeded = 0;
+      for (int i = 0; i < 3; ++i) {
+        if (strides[i] > planeWidths[i]) {
+          copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+        }
+      }
+      // Allocate copy buffer if necessary.
+      if (copyCapacityNeeded > 0
+          && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+        copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+      }
+      // Upload each plane.
+      for (int i = 0; i < 3; ++i) {
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+        // GLES only accepts packed data, i.e. stride == planeWidth.
+        final ByteBuffer packedByteBuffer;
+        if (strides[i] == planeWidths[i]) {
+          // Input is packed already.
+          packedByteBuffer = planes[i];
+        } else {
+          VideoRenderer.nativeCopyPlane(
+              planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
+          packedByteBuffer = copyBuffer;
+        }
+        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+            planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+      }
+    }
+  }
+
+  // Types of video scaling:
+  // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+  //    maintaining the aspect ratio (black borders may be displayed).
+  // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+  //    maintaining the aspect ratio. Some portion of the video frame may be
+  //    clipped.
+  // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+  // possible of the view while maintaining aspect ratio, under the constraint that at least
+  // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
+  public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+  // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
+  // This limits excessive cropping when adjusting display size.
+  private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+  public static final float[] identityMatrix() {
+    return new float[] {
+        1, 0, 0, 0,
+        0, 1, 0, 0,
+        0, 0, 1, 0,
+        0, 0, 0, 1};
+  }
+  // Matrix with transform y' = 1 - y.
+  public static final float[] verticalFlipMatrix() {
+    return new float[] {
+        1,  0, 0, 0,
+        0, -1, 0, 0,
+        0,  0, 1, 0,
+        0,  1, 0, 1};
+  }
+
+  // Matrix with transform x' = 1 - x.
+  public static final float[] horizontalFlipMatrix() {
+    return new float[] {
+        -1, 0, 0, 0,
+         0, 1, 0, 0,
+         0, 0, 1, 0,
+         1, 0, 0, 1};
+  }
+
+  /**
+   * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
+   * clockwise when rendered.
+   */
+  public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
+    final float[] rotationMatrix = new float[16];
+    Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
+    adjustOrigin(rotationMatrix);
+    return multiplyMatrices(textureMatrix, rotationMatrix);
+  }
+
+  /**
+   * Returns new matrix with the result of a * b.
+   */
+  public static float[] multiplyMatrices(float[] a, float[] b) {
+    final float[] resultMatrix = new float[16];
+    Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
+    return resultMatrix;
+  }
+
+  /**
+   * Returns layout transformation matrix that applies an optional mirror effect and compensates
+   * for video vs display aspect ratio.
+   */
+  public static float[] getLayoutMatrix(
+      boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+    float scaleX = 1;
+    float scaleY = 1;
+    // Scale X or Y dimension so that video and display size have same aspect ratio.
+    if (displayAspectRatio > videoAspectRatio) {
+      scaleY = videoAspectRatio / displayAspectRatio;
+    } else {
+      scaleX = displayAspectRatio / videoAspectRatio;
+    }
+    // Apply optional horizontal flip.
+    if (mirror) {
+      scaleX *= -1;
+    }
+    final float matrix[] = new float[16];
+    Matrix.setIdentityM(matrix, 0);
+    Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+    adjustOrigin(matrix);
+    return matrix;
+  }
+
+  /**
+   * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+   */
+  public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
+      int maxDisplayWidth, int maxDisplayHeight) {
+    return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+        maxDisplayWidth, maxDisplayHeight);
+  }
+
+  /**
+   * Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+   * that are in the range 0 to 1.
+   */
+  private static void adjustOrigin(float[] matrix) {
+    // Note that OpenGL is using column-major order.
+    // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+    matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+    matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+    // Post translate with 0.5 to move coordinates to range [0, 1].
+    matrix[12] += 0.5f;
+    matrix[13] += 0.5f;
+  }
+
+  /**
+   * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+   * that must remain visible.
+   */
+  private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+    switch (scalingType) {
+      case SCALE_ASPECT_FIT:
+        return 1.0f;
+      case SCALE_ASPECT_FILL:
+        return 0.0f;
+      case SCALE_ASPECT_BALANCED:
+        return BALANCED_VISIBLE_FRACTION;
+      default:
+        throw new IllegalArgumentException();
+    }
+  }
+
+  /**
+   * Calculate display size based on minimum fraction of the video that must remain visible,
+   * video aspect ratio, and maximum display size.
+   */
+  private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
+      int maxDisplayWidth, int maxDisplayHeight) {
+    // If there is no constraint on the amount of cropping, fill the allowed display area.
+    if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+      return new Point(maxDisplayWidth, maxDisplayHeight);
+    }
+    // Each dimension is constrained on max display size and how much we are allowed to crop.
+    final int width = Math.min(maxDisplayWidth,
+        Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+    final int height = Math.min(maxDisplayHeight,
+        Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+    return new Point(width, height);
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/SurfaceTextureHelper.java b/webrtc/api/java/android/org/webrtc/SurfaceTextureHelper.java
new file mode 100644
index 0000000..b001d2a
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/SurfaceTextureHelper.java
@@ -0,0 +1,488 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
+ * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
+ * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
+ * called in order to receive a new frame. Call disconnect() to stop receiveing new frames and
+ * release all resources.
+ * Note that there is a C++ counter part of this class that optionally can be used. It is used for
+ * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
+ * when the webrtc::VideoFrame is no longer used.
+ */
+class SurfaceTextureHelper {
+  private static final String TAG = "SurfaceTextureHelper";
+  /**
+   * Callback interface for being notified that a new texture frame is available. The calls will be
+   * made on a dedicated thread with a bound EGLContext. The thread will be the same throughout the
+   * lifetime of the SurfaceTextureHelper instance, but different from the thread calling the
+   * SurfaceTextureHelper constructor. The callee is not allowed to make another EGLContext current
+   * on the calling thread.
+   */
+  public interface OnTextureFrameAvailableListener {
+    abstract void onTextureFrameAvailable(
+        int oesTextureId, float[] transformMatrix, long timestampNs);
+  }
+
+  public static SurfaceTextureHelper create(EglBase.Context sharedContext) {
+    return create(sharedContext, null);
+  }
+
+  /**
+   * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. If
+   * |handler| is non-null, the callback will be executed on that handler's thread. If |handler| is
+   * null, a dedicated private thread is created for the callbacks.
+   */
+  public static SurfaceTextureHelper create(final EglBase.Context sharedContext,
+      final Handler handler) {
+    final Handler finalHandler;
+    if (handler != null) {
+      finalHandler = handler;
+    } else {
+      final HandlerThread thread = new HandlerThread(TAG);
+      thread.start();
+      finalHandler = new Handler(thread.getLooper());
+    }
+    // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+    // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+    // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+    // is constructed on the |handler| thread.
+    return ThreadUtils.invokeUninterruptibly(finalHandler, new Callable<SurfaceTextureHelper>() {
+      @Override public SurfaceTextureHelper call() {
+        return new SurfaceTextureHelper(sharedContext, finalHandler, (handler == null));
+      }
+    });
+  }
+
+  // State for YUV conversion, instantiated on demand.
+  static private class YuvConverter {
+    private final EglBase eglBase;
+    private final GlShader shader;
+    private boolean released = false;
+
+    // Vertex coordinates in Normalized Device Coordinates, i.e.
+    // (-1, -1) is bottom-left and (1, 1) is top-right.
+    private static final FloatBuffer DEVICE_RECTANGLE =
+        GlUtil.createFloatBuffer(new float[] {
+              -1.0f, -1.0f,  // Bottom left.
+               1.0f, -1.0f,  // Bottom right.
+              -1.0f,  1.0f,  // Top left.
+               1.0f,  1.0f,  // Top right.
+            });
+
+    // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+    private static final FloatBuffer TEXTURE_RECTANGLE =
+        GlUtil.createFloatBuffer(new float[] {
+              0.0f, 0.0f,  // Bottom left.
+              1.0f, 0.0f,  // Bottom right.
+              0.0f, 1.0f,  // Top left.
+              1.0f, 1.0f   // Top right.
+            });
+
+    private static final String VERTEX_SHADER =
+        "varying vec2 interp_tc;\n"
+      + "attribute vec4 in_pos;\n"
+      + "attribute vec4 in_tc;\n"
+      + "\n"
+      + "uniform mat4 texMatrix;\n"
+      + "\n"
+      + "void main() {\n"
+      + "    gl_Position = in_pos;\n"
+      + "    interp_tc = (texMatrix * in_tc).xy;\n"
+      + "}\n";
+
+    private static final String FRAGMENT_SHADER =
+        "#extension GL_OES_EGL_image_external : require\n"
+      + "precision mediump float;\n"
+      + "varying vec2 interp_tc;\n"
+      + "\n"
+      + "uniform samplerExternalOES oesTex;\n"
+      // Difference in texture coordinate corresponding to one
+      // sub-pixel in the x direction.
+      + "uniform vec2 xUnit;\n"
+      // Color conversion coefficients, including constant term
+      + "uniform vec4 coeffs;\n"
+      + "\n"
+      + "void main() {\n"
+      // Since the alpha read from the texture is always 1, this could
+      // be written as a mat4 x vec4 multiply. However, that seems to
+      // give a worse framerate, possibly because the additional
+      // multiplies by 1.0 consume resources. TODO(nisse): Could also
+      // try to do it as a vec3 x mat3x4, followed by an add in of a
+      // constant vector.
+      + "  gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+      + "      texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+      + "  gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+      + "      texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+      + "  gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+      + "      texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+      + "  gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+      + "      texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+      + "}\n";
+
+    private int texMatrixLoc;
+    private int xUnitLoc;
+    private int coeffsLoc;;
+
+    YuvConverter (EglBase.Context sharedContext) {
+      eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
+      eglBase.createDummyPbufferSurface();
+      eglBase.makeCurrent();
+
+      shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
+      shader.useProgram();
+      texMatrixLoc = shader.getUniformLocation("texMatrix");
+      xUnitLoc = shader.getUniformLocation("xUnit");
+      coeffsLoc = shader.getUniformLocation("coeffs");
+      GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
+      GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+      // Initialize vertex shader attributes.
+      shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
+      // If the width is not a multiple of 4 pixels, the texture
+      // will be scaled up slightly and clipped at the right border.
+      shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
+      eglBase.detachCurrent();
+    }
+
+    synchronized void convert(ByteBuffer buf,
+        int width, int height, int stride, int textureId, float [] transformMatrix) {
+      if (released) {
+        throw new IllegalStateException(
+            "YuvConverter.convert called on released object");
+      }
+
+      // We draw into a buffer laid out like
+      //
+      //    +---------+
+      //    |         |
+      //    |  Y      |
+      //    |         |
+      //    |         |
+      //    +----+----+
+      //    | U  | V  |
+      //    |    |    |
+      //    +----+----+
+      //
+      // In memory, we use the same stride for all of Y, U and V. The
+      // U data starts at offset |height| * |stride| from the Y data,
+      // and the V data starts at at offset |stride/2| from the U
+      // data, with rows of U and V data alternating.
+      //
+      // Now, it would have made sense to allocate a pixel buffer with
+      // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+      // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+      // unsupported by devices. So do the following hack: Allocate an
+      // RGBA buffer, of width |stride|/4. To render each of these
+      // large pixels, sample the texture at 4 different x coordinates
+      // and store the results in the four components.
+      //
+      // Since the V data needs to start on a boundary of such a
+      // larger pixel, it is not sufficient that |stride| is even, it
+      // has to be a multiple of 8 pixels.
+
+      if (stride % 8 != 0) {
+        throw new IllegalArgumentException(
+            "Invalid stride, must be a multiple of 8");
+      }
+      if (stride < width){
+        throw new IllegalArgumentException(
+            "Invalid stride, must >= width");
+      }
+
+      int y_width = (width+3) / 4;
+      int uv_width = (width+7) / 8;
+      int uv_height = (height+1)/2;
+      int total_height = height + uv_height;
+      int size = stride * total_height;
+
+      if (buf.capacity() < size) {
+        throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
+      }
+      // Produce a frame buffer starting at top-left corner, not
+      // bottom-left.
+      transformMatrix =
+          RendererCommon.multiplyMatrices(transformMatrix,
+              RendererCommon.verticalFlipMatrix());
+
+      // Create new pBuffferSurface with the correct size if needed.
+      if (eglBase.hasSurface()) {
+        if (eglBase.surfaceWidth() != stride/4 ||
+            eglBase.surfaceHeight() != total_height){
+          eglBase.releaseSurface();
+          eglBase.createPbufferSurface(stride/4, total_height);
+        }
+      } else {
+        eglBase.createPbufferSurface(stride/4, total_height);
+      }
+
+      eglBase.makeCurrent();
+
+      GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+      GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+      GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
+
+      // Draw Y
+      GLES20.glViewport(0, 0, y_width, height);
+      // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
+      GLES20.glUniform2f(xUnitLoc,
+          transformMatrix[0] / width,
+          transformMatrix[1] / width);
+      // Y'UV444 to RGB888, see
+      // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
+      // We use the ITU-R coefficients for U and V */
+      GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
+      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+      // Draw U
+      GLES20.glViewport(0, height, uv_width, uv_height);
+      // Matrix * (1;0;0;0) / (2*width). Note that opengl uses column major order.
+      GLES20.glUniform2f(xUnitLoc,
+          transformMatrix[0] / (2.0f*width),
+          transformMatrix[1] / (2.0f*width));
+      GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
+      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+      // Draw V
+      GLES20.glViewport(stride/8, height, uv_width, uv_height);
+      GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
+      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+      GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
+          GLES20.GL_UNSIGNED_BYTE, buf);
+
+      GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+      // Unbind texture. Reportedly needed on some devices to get
+      // the texture updated from the camera.
+      GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+      eglBase.detachCurrent();
+    }
+
+    synchronized void release() {
+      released = true;
+      eglBase.makeCurrent();
+      shader.release();
+      eglBase.release();
+    }
+  }
+
+  private final Handler handler;
+  private boolean isOwningThread;
+  private final EglBase eglBase;
+  private final SurfaceTexture surfaceTexture;
+  private final int oesTextureId;
+  private YuvConverter yuvConverter;
+
+  private OnTextureFrameAvailableListener listener;
+  // The possible states of this class.
+  private boolean hasPendingTexture = false;
+  private volatile boolean isTextureInUse = false;
+  private boolean isQuitting = false;
+
+  private SurfaceTextureHelper(EglBase.Context sharedContext,
+      Handler handler, boolean isOwningThread) {
+    if (handler.getLooper().getThread() != Thread.currentThread()) {
+      throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+    }
+    this.handler = handler;
+    this.isOwningThread = isOwningThread;
+
+    eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+    eglBase.createDummyPbufferSurface();
+    eglBase.makeCurrent();
+
+    oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+    surfaceTexture = new SurfaceTexture(oesTextureId);
+  }
+
+  private YuvConverter getYuvConverter() {
+    // yuvConverter is assigned once
+    if (yuvConverter != null)
+      return yuvConverter;
+
+    synchronized(this) {
+      if (yuvConverter == null)
+        yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
+      return yuvConverter;
+    }
+  }
+
+  /**
+   *  Start to stream textures to the given |listener|.
+   *  A Listener can only be set once.
+   */
+  public void setListener(OnTextureFrameAvailableListener listener) {
+    if (this.listener != null) {
+      throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+    }
+    this.listener = listener;
+    surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+      @Override
+      public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+        hasPendingTexture = true;
+        tryDeliverTextureFrame();
+      }
+    });
+  }
+
+  /**
+   * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+   * producer such as a camera or decoder.
+   */
+  public SurfaceTexture getSurfaceTexture() {
+    return surfaceTexture;
+  }
+
+  /**
+   * Call this function to signal that you are done with the frame received in
+   * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
+   * this function in order to receive a new frame.
+   */
+  public void returnTextureFrame() {
+    handler.post(new Runnable() {
+      @Override public void run() {
+        isTextureInUse = false;
+        if (isQuitting) {
+          release();
+        } else {
+          tryDeliverTextureFrame();
+        }
+      }
+    });
+  }
+
+  public boolean isTextureInUse() {
+    return isTextureInUse;
+  }
+
+  /**
+   * Call disconnect() to stop receiving frames. Resources are released when the texture frame has
+   * been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
+   * onTextureFrameAvailable() after this function returns.
+   */
+  public void disconnect() {
+    if (!isOwningThread) {
+      throw new IllegalStateException("Must call disconnect(handler).");
+    }
+    if (handler.getLooper().getThread() == Thread.currentThread()) {
+      isQuitting = true;
+      if (!isTextureInUse) {
+        release();
+      }
+      return;
+    }
+    final CountDownLatch barrier = new CountDownLatch(1);
+    handler.postAtFrontOfQueue(new Runnable() {
+      @Override public void run() {
+        isQuitting = true;
+        barrier.countDown();
+        if (!isTextureInUse) {
+          release();
+        }
+      }
+    });
+    ThreadUtils.awaitUninterruptibly(barrier);
+  }
+
+  /**
+   * Call disconnect() to stop receiving frames and quit the looper used by |handler|.
+   * Resources are released when the texture frame has been returned by a call to
+   * returnTextureFrame(). You are guaranteed to not receive any more
+   * onTextureFrameAvailable() after this function returns.
+   */
+  public void disconnect(Handler handler) {
+    if (this.handler != handler) {
+      throw new IllegalStateException("Wrong handler.");
+    }
+    isOwningThread = true;
+    disconnect();
+  }
+
+  public void textureToYUV(ByteBuffer buf,
+      int width, int height, int stride, int textureId, float [] transformMatrix) {
+    if (textureId != oesTextureId)
+      throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
+
+    getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
+  }
+
+  private void tryDeliverTextureFrame() {
+    if (handler.getLooper().getThread() != Thread.currentThread()) {
+      throw new IllegalStateException("Wrong thread.");
+    }
+    if (isQuitting || !hasPendingTexture || isTextureInUse) {
+      return;
+    }
+    isTextureInUse = true;
+    hasPendingTexture = false;
+
+    eglBase.makeCurrent();
+    surfaceTexture.updateTexImage();
+
+    final float[] transformMatrix = new float[16];
+    surfaceTexture.getTransformMatrix(transformMatrix);
+    final long timestampNs = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
+        ? surfaceTexture.getTimestamp()
+        : TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+    listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+  }
+
+  private void release() {
+    if (handler.getLooper().getThread() != Thread.currentThread()) {
+      throw new IllegalStateException("Wrong thread.");
+    }
+    if (isTextureInUse || !isQuitting) {
+      throw new IllegalStateException("Unexpected release.");
+    }
+    synchronized (this) {
+      if (yuvConverter != null)
+        yuvConverter.release();
+    }
+    eglBase.makeCurrent();
+    GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+    surfaceTexture.release();
+    eglBase.release();
+    handler.getLooper().quit();
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java b/webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java
new file mode 100644
index 0000000..fa199b3
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -0,0 +1,580 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.content.res.Resources.NotFoundException;
+import android.graphics.Point;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import org.webrtc.Logging;
+
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially four different threads:
+ * Interaction from the main app in init, release, setMirror, and setScalingtype.
+ * Interaction from C++ webrtc::VideoRendererInterface in renderFrame and canApplyRotation.
+ * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ * Interaction with the layout framework in onMeasure and onSizeChanged.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+    implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
+  private static final String TAG = "SurfaceViewRenderer";
+
+  // Dedicated render thread.
+  private HandlerThread renderThread;
+  // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
+  // on |handlerLock|.
+  private final Object handlerLock = new Object();
+  private Handler renderThreadHandler;
+
+  // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
+  // from the render thread.
+  private EglBase eglBase;
+  private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+  private RendererCommon.GlDrawer drawer;
+  // Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
+  private int[] yuvTextures = null;
+
+  // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
+  private final Object frameLock = new Object();
+  private VideoRenderer.I420Frame pendingFrame;
+
+  // These variables are synchronized on |layoutLock|.
+  private final Object layoutLock = new Object();
+  // These dimension values are used to keep track of the state in these functions: onMeasure(),
+  // onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
+  // internally when the incoming frame size changes. requestLayout() can also be triggered
+  // externally. The layout change is a two pass process: first onMeasure() is called in a top-down
+  // traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
+  // onLayout() pass, each parent is responsible for positioning its children using the sizes
+  // computed in the measure pass.
+  // |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
+  // take effect.
+  private Point desiredLayoutSize = new Point();
+  // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
+  // onLayout() and surfaceChanged() respectively.
+  private final Point layoutSize = new Point();
+  // TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
+  // layout and surface size.
+  private final Point surfaceSize = new Point();
+  // |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
+  private boolean isSurfaceCreated;
+  // Last rendered frame dimensions, or 0 if no frame has been rendered yet.
+  private int frameWidth;
+  private int frameHeight;
+  private int frameRotation;
+  // |scalingType| determines how the video will fill the allowed layout area in onMeasure().
+  private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
+  // If true, mirrors the video stream horizontally.
+  private boolean mirror;
+  // Callback for reporting renderer events.
+  private RendererCommon.RendererEvents rendererEvents;
+
+  // These variables are synchronized on |statisticsLock|.
+  private final Object statisticsLock = new Object();
+  // Total number of video frames received in renderFrame() call.
+  private int framesReceived;
+  // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+  // yet.
+  private int framesDropped;
+  // Number of rendered video frames.
+  private int framesRendered;
+  // Time in ns when the first video frame was rendered.
+  private long firstFrameTimeNs;
+  // Time in ns spent in renderFrameOnRenderThread() function.
+  private long renderTimeNs;
+
+  // Runnable for posting frames to render thread.
+  private final Runnable renderFrameRunnable = new Runnable() {
+    @Override public void run() {
+      renderFrameOnRenderThread();
+    }
+  };
+  // Runnable for clearing Surface to black.
+  private final Runnable makeBlackRunnable = new Runnable() {
+    @Override public void run() {
+      makeBlack();
+    }
+  };
+
+  /**
+   * Standard View constructor. In order to render something, you must first call init().
+   */
+  public SurfaceViewRenderer(Context context) {
+    super(context);
+    getHolder().addCallback(this);
+  }
+
+  /**
+   * Standard View constructor. In order to render something, you must first call init().
+   */
+  public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+    super(context, attrs);
+    getHolder().addCallback(this);
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
+   * reinitialize the renderer after a previous init()/release() cycle.
+   */
+  public void init(
+      EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+    init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+   * init()/release() cycle.
+   */
+  public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
+      int[] configAttributes, RendererCommon.GlDrawer drawer) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        throw new IllegalStateException(getResourceName() + "Already initialized");
+      }
+      Logging.d(TAG, getResourceName() + "Initializing.");
+      this.rendererEvents = rendererEvents;
+      this.drawer = drawer;
+      renderThread = new HandlerThread(TAG);
+      renderThread.start();
+      eglBase = EglBase.create(sharedContext, configAttributes);
+      renderThreadHandler = new Handler(renderThread.getLooper());
+    }
+    tryCreateEglSurface();
+  }
+
+  /**
+   * Create and make an EGLSurface current if both init() and surfaceCreated() have been called.
+   */
+  public void tryCreateEglSurface() {
+    // |renderThreadHandler| is only created after |eglBase| is created in init(), so the
+    // following code will only execute if eglBase != null.
+    runOnRenderThread(new Runnable() {
+      @Override public void run() {
+        synchronized (layoutLock) {
+          if (isSurfaceCreated && !eglBase.hasSurface()) {
+            eglBase.createSurface(getHolder().getSurface());
+            eglBase.makeCurrent();
+            // Necessary for YUV frames with odd width.
+            GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+          }
+        }
+      }
+    });
+  }
+
+  /**
+   * Block until any pending frame is returned and all GL resources released, even if an interrupt
+   * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+   * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+   * don't call this function, the GL resources might leak.
+   */
+  public void release() {
+    final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        Logging.d(TAG, getResourceName() + "Already released");
+        return;
+      }
+      // Release EGL and GL resources on render thread.
+      // TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted
+      // when the EGL context is lost. It might be dangerous to delete them manually in
+      // Activity.onDestroy().
+      renderThreadHandler.postAtFrontOfQueue(new Runnable() {
+        @Override public void run() {
+          drawer.release();
+          drawer = null;
+          if (yuvTextures != null) {
+            GLES20.glDeleteTextures(3, yuvTextures, 0);
+            yuvTextures = null;
+          }
+          // Clear last rendered image to black.
+          makeBlack();
+          eglBase.release();
+          eglBase = null;
+          eglCleanupBarrier.countDown();
+        }
+      });
+      // Don't accept any more frames or messages to the render thread.
+      renderThreadHandler = null;
+    }
+    // Make sure the EGL/GL cleanup posted above is executed.
+    ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+    renderThread.quit();
+    synchronized (frameLock) {
+      if (pendingFrame != null) {
+        VideoRenderer.renderFrameDone(pendingFrame);
+        pendingFrame = null;
+      }
+    }
+    // The |renderThread| cleanup is not safe to cancel and we need to wait until it's done.
+    ThreadUtils.joinUninterruptibly(renderThread);
+    renderThread = null;
+    // Reset statistics and event reporting.
+    synchronized (layoutLock) {
+      frameWidth = 0;
+      frameHeight = 0;
+      frameRotation = 0;
+      rendererEvents = null;
+    }
+    resetStatistics();
+  }
+
+  /**
+   * Reset statistics. This will reset the logged statistics in logStatistics(), and
+   * RendererEvents.onFirstFrameRendered() will be called for the next frame.
+   */
+  public void resetStatistics() {
+    synchronized (statisticsLock) {
+      framesReceived = 0;
+      framesDropped = 0;
+      framesRendered = 0;
+      firstFrameTimeNs = 0;
+      renderTimeNs = 0;
+    }
+  }
+
+  /**
+   * Set if the video stream should be mirrored or not.
+   */
+  public void setMirror(final boolean mirror) {
+    synchronized (layoutLock) {
+      this.mirror = mirror;
+    }
+  }
+
+  /**
+   * Set how the video will fill the allowed layout area.
+   */
+  public void setScalingType(RendererCommon.ScalingType scalingType) {
+    synchronized (layoutLock) {
+      this.scalingType = scalingType;
+    }
+  }
+
+  // VideoRenderer.Callbacks interface.
+  @Override
+  public void renderFrame(VideoRenderer.I420Frame frame) {
+    synchronized (statisticsLock) {
+      ++framesReceived;
+    }
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        Logging.d(TAG, getResourceName()
+            + "Dropping frame - Not initialized or already released.");
+        VideoRenderer.renderFrameDone(frame);
+        return;
+      }
+      synchronized (frameLock) {
+        if (pendingFrame != null) {
+          // Drop old frame.
+          synchronized (statisticsLock) {
+            ++framesDropped;
+          }
+          VideoRenderer.renderFrameDone(pendingFrame);
+        }
+        pendingFrame = frame;
+        updateFrameDimensionsAndReportEvents(frame);
+        renderThreadHandler.post(renderFrameRunnable);
+      }
+    }
+  }
+
+  // Returns desired layout size given current measure specification and video aspect ratio.
+  private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
+    synchronized (layoutLock) {
+      final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
+      final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
+      final Point size =
+          RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
+      if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
+        size.x = maxWidth;
+      }
+      if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
+        size.y = maxHeight;
+      }
+      return size;
+    }
+  }
+
+  // View layout interface.
+  @Override
+  protected void onMeasure(int widthSpec, int heightSpec) {
+    synchronized (layoutLock) {
+      if (frameWidth == 0 || frameHeight == 0) {
+        super.onMeasure(widthSpec, heightSpec);
+        return;
+      }
+      desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
+      if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
+        // Clear the surface asap before the layout change to avoid stretched video and other
+        // render artifacs. Don't wait for it to finish because the IO thread should never be
+        // blocked, so it's a best-effort attempt.
+        synchronized (handlerLock) {
+          if (renderThreadHandler != null) {
+            renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
+          }
+        }
+      }
+      setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
+    }
+  }
+
+  @Override
+  protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+    synchronized (layoutLock) {
+      layoutSize.x = right - left;
+      layoutSize.y = bottom - top;
+    }
+    // Might have a pending frame waiting for a layout of correct size.
+    runOnRenderThread(renderFrameRunnable);
+  }
+
+  // SurfaceHolder.Callback interface.
+  @Override
+  public void surfaceCreated(final SurfaceHolder holder) {
+    Logging.d(TAG, getResourceName() + "Surface created.");
+    synchronized (layoutLock) {
+      isSurfaceCreated = true;
+    }
+    tryCreateEglSurface();
+  }
+
+  @Override
+  public void surfaceDestroyed(SurfaceHolder holder) {
+    Logging.d(TAG, getResourceName() + "Surface destroyed.");
+    synchronized (layoutLock) {
+      isSurfaceCreated = false;
+      surfaceSize.x = 0;
+      surfaceSize.y = 0;
+    }
+    runOnRenderThread(new Runnable() {
+      @Override public void run() {
+        eglBase.releaseSurface();
+      }
+    });
+  }
+
+  @Override
+  public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+    Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
+    synchronized (layoutLock) {
+      surfaceSize.x = width;
+      surfaceSize.y = height;
+    }
+    // Might have a pending frame waiting for a surface of correct size.
+    runOnRenderThread(renderFrameRunnable);
+  }
+
+  /**
+   * Private helper function to post tasks safely.
+   */
+  private void runOnRenderThread(Runnable runnable) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        renderThreadHandler.post(runnable);
+      }
+    }
+  }
+
+  private String getResourceName() {
+    try {
+      return getResources().getResourceEntryName(getId()) + ": ";
+    } catch (NotFoundException e) {
+      return "";
+    }
+  }
+
+  private void makeBlack() {
+    if (Thread.currentThread() != renderThread) {
+      throw new IllegalStateException(getResourceName() + "Wrong thread.");
+    }
+    if (eglBase != null && eglBase.hasSurface()) {
+      GLES20.glClearColor(0, 0, 0, 0);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      eglBase.swapBuffers();
+    }
+  }
+
+  /**
+   * Requests new layout if necessary. Returns true if layout and surface size are consistent.
+   */
+  private boolean checkConsistentLayout() {
+    if (Thread.currentThread() != renderThread) {
+      throw new IllegalStateException(getResourceName() + "Wrong thread.");
+    }
+    synchronized (layoutLock) {
+      // Return false while we are in the middle of a layout change.
+      return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
+    }
+  }
+
+  /**
+   * Renders and releases |pendingFrame|.
+   */
+  private void renderFrameOnRenderThread() {
+    if (Thread.currentThread() != renderThread) {
+      throw new IllegalStateException(getResourceName() + "Wrong thread.");
+    }
+    // Fetch and render |pendingFrame|.
+    final VideoRenderer.I420Frame frame;
+    synchronized (frameLock) {
+      if (pendingFrame == null) {
+        return;
+      }
+      frame = pendingFrame;
+      pendingFrame = null;
+    }
+    if (eglBase == null || !eglBase.hasSurface()) {
+      Logging.d(TAG, getResourceName() + "No surface to draw on");
+      VideoRenderer.renderFrameDone(frame);
+      return;
+    }
+    if (!checkConsistentLayout()) {
+      // Output intermediate black frames while the layout is updated.
+      makeBlack();
+      VideoRenderer.renderFrameDone(frame);
+      return;
+    }
+    // After a surface size change, the EGLSurface might still have a buffer of the old size in the
+    // pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
+    // changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
+    synchronized (layoutLock) {
+      if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
+        makeBlack();
+      }
+    }
+
+    final long startTimeNs = System.nanoTime();
+    final float[] texMatrix;
+    synchronized (layoutLock) {
+      final float[] rotatedSamplingMatrix =
+          RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
+      final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
+          mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
+      texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+    }
+
+    // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
+    // a workaround for bug 5147. Performance will be slightly worse.
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    if (frame.yuvFrame) {
+      // Make sure YUV textures are allocated.
+      if (yuvTextures == null) {
+        yuvTextures = new int[3];
+        for (int i = 0; i < 3; i++)  {
+          yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+        }
+      }
+      yuvUploader.uploadYuvData(
+          yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
+      drawer.drawYuv(yuvTextures, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
+    } else {
+      drawer.drawOes(frame.textureId, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
+    }
+
+    eglBase.swapBuffers();
+    VideoRenderer.renderFrameDone(frame);
+    synchronized (statisticsLock) {
+      if (framesRendered == 0) {
+        firstFrameTimeNs = startTimeNs;
+        synchronized (layoutLock) {
+          Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
+          if (rendererEvents != null) {
+            rendererEvents.onFirstFrameRendered();
+          }
+        }
+      }
+      ++framesRendered;
+      renderTimeNs += (System.nanoTime() - startTimeNs);
+      if (framesRendered % 300 == 0) {
+        logStatistics();
+      }
+    }
+  }
+
+  // Return current frame aspect ratio, taking rotation into account.
+  private float frameAspectRatio() {
+    synchronized (layoutLock) {
+      if (frameWidth == 0 || frameHeight == 0) {
+        return 0.0f;
+      }
+      return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight
+                                        : (float) frameHeight / frameWidth;
+    }
+  }
+
+  // Update frame dimensions and report any changes to |rendererEvents|.
+  private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
+    synchronized (layoutLock) {
+      if (frameWidth != frame.width || frameHeight != frame.height
+          || frameRotation != frame.rotationDegree) {
+        Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+            + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
+        if (rendererEvents != null) {
+          rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
+        }
+        frameWidth = frame.width;
+        frameHeight = frame.height;
+        frameRotation = frame.rotationDegree;
+        post(new Runnable() {
+          @Override public void run() {
+            requestLayout();
+          }
+        });
+      }
+    }
+  }
+
+  private void logStatistics() {
+    synchronized (statisticsLock) {
+      Logging.d(TAG, getResourceName() + "Frames received: "
+          + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+      if (framesReceived > 0 && framesRendered > 0) {
+        final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
+        Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
+            " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+        Logging.d(TAG, getResourceName() + "Average render time: "
+            + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
+      }
+    }
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/ThreadUtils.java b/webrtc/api/java/android/org/webrtc/ThreadUtils.java
new file mode 100644
index 0000000..e60ead9
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/ThreadUtils.java
@@ -0,0 +1,192 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.SystemClock;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+public class ThreadUtils {
+  /**
+   * Utility class to be used for checking that a method is called on the correct thread.
+   */
+  public static class ThreadChecker {
+    private Thread thread = Thread.currentThread();
+
+    public void checkIsOnValidThread() {
+      if (thread == null) {
+        thread = Thread.currentThread();
+      }
+      if (Thread.currentThread() != thread) {
+        throw new IllegalStateException("Wrong thread");
+      }
+    }
+
+    public void detachThread() {
+      thread = null;
+    }
+  }
+
+  /**
+   * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
+   * to complete without getting interrupted..
+   */
+  public interface BlockingOperation {
+    void run() throws InterruptedException;
+  }
+
+  /**
+   * Utility method to make sure a blocking operation is executed to completion without getting
+   * interrupted. This should be used in cases where the operation is waiting for some critical
+   * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during
+   * the blocking operation, this function will re-run the operation until completion, and only then
+   * re-interrupt the thread.
+   */
+  public static void executeUninterruptibly(BlockingOperation operation) {
+    boolean wasInterrupted = false;
+    while (true) {
+      try {
+        operation.run();
+        break;
+      } catch (InterruptedException e) {
+        // Someone is asking us to return early at our convenience. We can't cancel this operation,
+        // but we should preserve the information and pass it along.
+        wasInterrupted = true;
+      }
+    }
+    // Pass interruption information along.
+    if (wasInterrupted) {
+      Thread.currentThread().interrupt();
+    }
+  }
+
+  public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) {
+    final long startTimeMs = SystemClock.elapsedRealtime();
+    long timeRemainingMs = timeoutMs;
+    boolean wasInterrupted = false;
+    while (timeRemainingMs > 0) {
+      try {
+        thread.join(timeRemainingMs);
+        break;
+      } catch (InterruptedException e) {
+        // Someone is asking us to return early at our convenience. We can't cancel this operation,
+        // but we should preserve the information and pass it along.
+        wasInterrupted = true;
+        final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+        timeRemainingMs = timeoutMs - elapsedTimeMs;
+      }
+    }
+    // Pass interruption information along.
+    if (wasInterrupted) {
+      Thread.currentThread().interrupt();
+    }
+    return !thread.isAlive();
+  }
+
+  public static void joinUninterruptibly(final Thread thread) {
+    executeUninterruptibly(new BlockingOperation() {
+      @Override
+      public void run() throws InterruptedException {
+        thread.join();
+      }
+    });
+  }
+
+  public static void awaitUninterruptibly(final CountDownLatch latch) {
+    executeUninterruptibly(new BlockingOperation() {
+      @Override
+      public void run() throws InterruptedException {
+        latch.await();
+      }
+    });
+  }
+
+  public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) {
+    final long startTimeMs = SystemClock.elapsedRealtime();
+    long timeRemainingMs = timeoutMs;
+    boolean wasInterrupted = false;
+    boolean result = false;
+    do {
+      try {
+        result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS);
+        break;
+      } catch (InterruptedException e) {
+        // Someone is asking us to return early at our convenience. We can't cancel this operation,
+        // but we should preserve the information and pass it along.
+        wasInterrupted = true;
+        final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+        timeRemainingMs = timeoutMs - elapsedTimeMs;
+      }
+    } while (timeRemainingMs > 0);
+    // Pass interruption information along.
+    if (wasInterrupted) {
+      Thread.currentThread().interrupt();
+    }
+    return result;
+  }
+
+  /**
+   * Post |callable| to |handler| and wait for the result.
+   */
+  public static <V> V invokeUninterruptibly(final Handler handler, final Callable<V> callable) {
+    class Result {
+      public V value;
+    }
+    final Result result = new Result();
+    final CountDownLatch barrier = new CountDownLatch(1);
+    handler.post(new Runnable() {
+      @Override public void run() {
+        try {
+          result.value = callable.call();
+        } catch (Exception e) {
+          throw new RuntimeException("Callable threw exception: " + e);
+        }
+        barrier.countDown();
+      }
+    });
+    awaitUninterruptibly(barrier);
+    return result.value;
+  }
+
+  /**
+   * Post |runner| to |handler| and wait for the result.
+   */
+  public static void invokeUninterruptibly(final Handler handler, final Runnable runner) {
+    final CountDownLatch barrier = new CountDownLatch(1);
+    handler.post(new Runnable() {
+      @Override public void run() {
+          runner.run();
+        barrier.countDown();
+      }
+    });
+    awaitUninterruptibly(barrier);
+  }
+}
diff --git a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
new file mode 100644
index 0000000..36f60ed
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -0,0 +1,793 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import org.json.JSONException;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+import org.webrtc.Logging;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+// Android specific implementation of VideoCapturer.
+// An instance of this class can be created by an application using
+// VideoCapturerAndroid.create();
+// This class extends VideoCapturer with a method to easily switch between the
+// front and back camera. It also provides methods for enumerating valid device
+// names.
+//
+// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
+// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
+// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
+// the camera has been stopped.
+@SuppressWarnings("deprecation")
+public class VideoCapturerAndroid extends VideoCapturer implements
+    android.hardware.Camera.PreviewCallback,
+    SurfaceTextureHelper.OnTextureFrameAvailableListener {
+  private final static String TAG = "VideoCapturerAndroid";
+  private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+  private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
+
+  private android.hardware.Camera camera;  // Only non-null while capturing.
+  private HandlerThread cameraThread;
+  private final Handler cameraThreadHandler;
+  private Context applicationContext;
+  // Synchronization lock for |id|.
+  private final Object cameraIdLock = new Object();
+  private int id;
+  private android.hardware.Camera.CameraInfo info;
+  private final CameraStatistics cameraStatistics;
+  // Remember the requested format in case we want to switch cameras.
+  private int requestedWidth;
+  private int requestedHeight;
+  private int requestedFramerate;
+  // The capture format will be the closest supported format to the requested format.
+  private CaptureFormat captureFormat;
+  private final Object pendingCameraSwitchLock = new Object();
+  private volatile boolean pendingCameraSwitch;
+  private CapturerObserver frameObserver = null;
+  private final CameraEventsHandler eventsHandler;
+  private boolean firstFrameReported;
+  // Arbitrary queue depth.  Higher number means more memory allocated & held,
+  // lower number means more sensitivity to processing time in the client (and
+  // potentially stalling the capturer if it runs out of buffers to write to).
+  private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+  private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
+  private final boolean isCapturingToTexture;
+  final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
+  // The camera API can output one old frame after the camera has been switched or the resolution
+  // has been changed. This flag is used for dropping the first frame after camera restart.
+  private boolean dropNextFrame = false;
+  // |openCameraOnCodecThreadRunner| is used for retrying to open the camera if it is in use by
+  // another application when startCaptureOnCameraThread is called.
+  private Runnable openCameraOnCodecThreadRunner;
+  private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+  private final static int OPEN_CAMERA_DELAY_MS = 500;
+  private int openCameraAttempts;
+
+  // Camera error callback.
+  private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
+      new android.hardware.Camera.ErrorCallback() {
+    @Override
+    public void onError(int error, android.hardware.Camera camera) {
+      String errorMessage;
+      if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
+        errorMessage = "Camera server died!";
+      } else {
+        errorMessage = "Camera error: " + error;
+      }
+      Logging.e(TAG, errorMessage);
+      if (eventsHandler != null) {
+        eventsHandler.onCameraError(errorMessage);
+      }
+    }
+  };
+
+  // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+  private final Runnable cameraObserver = new Runnable() {
+    private int freezePeriodCount;
+    @Override
+    public void run() {
+      int cameraFramesCount = cameraStatistics.getAndResetFrameCount();
+      int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
+          / CAMERA_OBSERVER_PERIOD_MS;
+
+      Logging.d(TAG, "Camera fps: " + cameraFps +".");
+      if (cameraFramesCount == 0) {
+        ++freezePeriodCount;
+        if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
+            && eventsHandler != null) {
+          Logging.e(TAG, "Camera freezed.");
+          if (surfaceHelper.isTextureInUse()) {
+            // This can only happen if we are capturing to textures.
+            eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+          } else {
+            eventsHandler.onCameraFreezed("Camera failure.");
+          }
+          return;
+        }
+      } else {
+        freezePeriodCount = 0;
+      }
+      cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+    }
+  };
+
+  private static class CameraStatistics {
+    private int frameCount = 0;
+    private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+    CameraStatistics() {
+      threadChecker.detachThread();
+    }
+
+    public void addFrame() {
+      threadChecker.checkIsOnValidThread();
+      ++frameCount;
+    }
+
+    public int getAndResetFrameCount() {
+      threadChecker.checkIsOnValidThread();
+      int count = frameCount;
+      frameCount = 0;
+      return count;
+    }
+  }
+
+  public static interface CameraEventsHandler {
+    // Camera error handler - invoked when camera can not be opened
+    // or any camera exception happens on camera thread.
+    void onCameraError(String errorDescription);
+
+    // Invoked when camera stops receiving frames
+    void onCameraFreezed(String errorDescription);
+
+    // Callback invoked when camera is opening.
+    void onCameraOpening(int cameraId);
+
+    // Callback invoked when first camera frame is available after camera is opened.
+    void onFirstFrameAvailable();
+
+    // Callback invoked when camera closed.
+    void onCameraClosed();
+  }
+
+  // Camera switch handler - one of these functions are invoked with the result of switchCamera().
+  // The callback may be called on an arbitrary thread.
+  public interface CameraSwitchHandler {
+    // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
+    void onCameraSwitchDone(boolean isFrontCamera);
+    // Invoked on failure, e.g. camera is stopped or only one camera available.
+    void onCameraSwitchError(String errorDescription);
+  }
+
+  public static VideoCapturerAndroid create(String name,
+      CameraEventsHandler eventsHandler) {
+    return VideoCapturerAndroid.create(name, eventsHandler, null);
+  }
+
+  public static VideoCapturerAndroid create(String name,
+      CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
+    final int cameraId = lookupDeviceName(name);
+    if (cameraId == -1) {
+      return null;
+    }
+
+    final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
+        sharedEglContext);
+    capturer.setNativeCapturer(
+        nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
+    return capturer;
+  }
+
+  public void printStackTrace() {
+    if (cameraThread != null) {
+      StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
+      if (cameraStackTraces.length > 0) {
+        Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
+        for (StackTraceElement stackTrace : cameraStackTraces) {
+          Logging.d(TAG, stackTrace.toString());
+        }
+      }
+    }
+  }
+
+  // Switch camera to the next valid camera id. This can only be called while
+  // the camera is running.
+  public void switchCamera(final CameraSwitchHandler handler) {
+    if (android.hardware.Camera.getNumberOfCameras() < 2) {
+      if (handler != null) {
+        handler.onCameraSwitchError("No camera to switch to.");
+      }
+      return;
+    }
+    synchronized (pendingCameraSwitchLock) {
+      if (pendingCameraSwitch) {
+        // Do not handle multiple camera switch request to avoid blocking
+        // camera thread by handling too many switch request from a queue.
+        Logging.w(TAG, "Ignoring camera switch request.");
+        if (handler != null) {
+          handler.onCameraSwitchError("Pending camera switch already in progress.");
+        }
+        return;
+      }
+      pendingCameraSwitch = true;
+    }
+    cameraThreadHandler.post(new Runnable() {
+      @Override public void run() {
+        if (camera == null) {
+          if (handler != null) {
+            handler.onCameraSwitchError("Camera is stopped.");
+          }
+          return;
+        }
+        switchCameraOnCameraThread();
+        synchronized (pendingCameraSwitchLock) {
+          pendingCameraSwitch = false;
+        }
+        if (handler != null) {
+          handler.onCameraSwitchDone(
+              info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
+        }
+      }
+    });
+  }
+
+  // Requests a new output format from the video capturer. Captured frames
+  // by the camera will be scaled/or dropped by the video capturer.
+  // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+  // the same result as |width| = 480, |height| = 640.
+  // TODO(magjed/perkj): Document what this function does. Change name?
+  public void onOutputFormatRequest(final int width, final int height, final int framerate) {
+    cameraThreadHandler.post(new Runnable() {
+      @Override public void run() {
+        onOutputFormatRequestOnCameraThread(width, height, framerate);
+      }
+    });
+  }
+
+  // Reconfigure the camera to capture in a new format. This should only be called while the camera
+  // is running.
+  public void changeCaptureFormat(final int width, final int height, final int framerate) {
+    cameraThreadHandler.post(new Runnable() {
+      @Override public void run() {
+        startPreviewOnCameraThread(width, height, framerate);
+      }
+    });
+  }
+
+  // Helper function to retrieve the current camera id synchronously. Note that the camera id might
+  // change at any point by switchCamera() calls.
+  int getCurrentCameraId() {
+    synchronized (cameraIdLock) {
+      return id;
+    }
+  }
+
+  public List<CaptureFormat> getSupportedFormats() {
+    return CameraEnumerationAndroid.getSupportedFormats(getCurrentCameraId());
+  }
+
+  // Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
+  public boolean isCapturingToTexture() {
+    return isCapturingToTexture;
+  }
+
+  // Called from native code.
+  private String getSupportedFormatsAsJson() throws JSONException {
+    return CameraEnumerationAndroid.getSupportedFormatsAsJson(getCurrentCameraId());
+  }
+
+  // Called from native VideoCapturer_nativeCreateVideoCapturer.
+  private VideoCapturerAndroid(int cameraId) {
+    this(cameraId, null, null);
+  }
+
+  private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
+      EglBase.Context sharedContext) {
+    this.id = cameraId;
+    this.eventsHandler = eventsHandler;
+    cameraThread = new HandlerThread(TAG);
+    cameraThread.start();
+    cameraThreadHandler = new Handler(cameraThread.getLooper());
+    isCapturingToTexture = (sharedContext != null);
+    cameraStatistics = new CameraStatistics();
+    surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
+    if (isCapturingToTexture) {
+      surfaceHelper.setListener(this);
+    }
+    Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
+  }
+
+  private void checkIsOnCameraThread() {
+    if (Thread.currentThread() != cameraThread) {
+      throw new IllegalStateException("Wrong thread");
+    }
+  }
+
+  // Returns the camera index for camera with name |deviceName|, or -1 if no such camera can be
+  // found. If |deviceName| is empty, the first available device is used.
+  private static int lookupDeviceName(String deviceName) {
+    Logging.d(TAG, "lookupDeviceName: " + deviceName);
+    if (deviceName == null || android.hardware.Camera.getNumberOfCameras() == 0) {
+      return -1;
+    }
+    if (deviceName.isEmpty()) {
+      return 0;
+    }
+    for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+      if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
+        return i;
+      }
+    }
+    return -1;
+  }
+
+  // Called by native code to quit the camera thread. This needs to be done manually, otherwise the
+  // thread and handler will not be garbage collected.
+  private void release() {
+    Logging.d(TAG, "release");
+    if (isReleased()) {
+      throw new IllegalStateException("Already released");
+    }
+    ThreadUtils.invokeUninterruptibly(cameraThreadHandler, new Runnable() {
+      @Override
+      public void run() {
+        if (camera != null) {
+          throw new IllegalStateException("Release called while camera is running");
+        }
+      }
+    });
+    surfaceHelper.disconnect(cameraThreadHandler);
+    cameraThread = null;
+  }
+
+  // Used for testing purposes to check if release() has been called.
+  public boolean isReleased() {
+    return (cameraThread == null);
+  }
+
+  // Called by native code.
+  //
+  // Note that this actually opens the camera, and Camera callbacks run on the
+  // thread that calls open(), so this is done on the CameraThread.
+  void startCapture(
+      final int width, final int height, final int framerate,
+      final Context applicationContext, final CapturerObserver frameObserver) {
+    Logging.d(TAG, "startCapture requested: " + width + "x" + height
+        + "@" + framerate);
+    if (applicationContext == null) {
+      throw new RuntimeException("applicationContext not set.");
+    }
+    if (frameObserver == null) {
+      throw new RuntimeException("frameObserver not set.");
+    }
+
+    cameraThreadHandler.post(new Runnable() {
+      @Override public void run() {
+        startCaptureOnCameraThread(width, height, framerate, frameObserver,
+            applicationContext);
+      }
+    });
+  }
+
+  private void startCaptureOnCameraThread(
+      final int width, final int height, final int framerate, final CapturerObserver frameObserver,
+      final Context applicationContext) {
+    Throwable error = null;
+    checkIsOnCameraThread();
+    if (camera != null) {
+      throw new RuntimeException("Camera has already been started.");
+    }
+    this.applicationContext = applicationContext;
+    this.frameObserver = frameObserver;
+    this.firstFrameReported = false;
+
+    try {
+      try {
+        synchronized (cameraIdLock) {
+          Logging.d(TAG, "Opening camera " + id);
+          if (eventsHandler != null) {
+            eventsHandler.onCameraOpening(id);
+          }
+          camera = android.hardware.Camera.open(id);
+          info = new android.hardware.Camera.CameraInfo();
+          android.hardware.Camera.getCameraInfo(id, info);
+        }
+      } catch (RuntimeException e) {
+        openCameraAttempts++;
+        if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
+          Logging.e(TAG, "Camera.open failed, retrying", e);
+          openCameraOnCodecThreadRunner = new Runnable() {
+            @Override public void run() {
+              startCaptureOnCameraThread(width, height, framerate, frameObserver,
+                  applicationContext);
+            }
+          };
+          cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
+          return;
+        }
+        openCameraAttempts = 0;
+        throw e;
+      }
+
+      try {
+        camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
+      } catch (IOException e) {
+        Logging.e(TAG, "setPreviewTexture failed", error);
+        throw new RuntimeException(e);
+      }
+
+      Logging.d(TAG, "Camera orientation: " + info.orientation +
+          " .Device orientation: " + getDeviceOrientation());
+      camera.setErrorCallback(cameraErrorCallback);
+      startPreviewOnCameraThread(width, height, framerate);
+      frameObserver.onCapturerStarted(true);
+
+      // Start camera observer.
+      cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+      return;
+    } catch (RuntimeException e) {
+      error = e;
+    }
+    Logging.e(TAG, "startCapture failed", error);
+    stopCaptureOnCameraThread();
+    frameObserver.onCapturerStarted(false);
+    if (eventsHandler != null) {
+      eventsHandler.onCameraError("Camera can not be started.");
+    }
+    return;
+  }
+
+  // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
+  private void startPreviewOnCameraThread(int width, int height, int framerate) {
+    checkIsOnCameraThread();
+    Logging.d(
+        TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
+    if (camera == null) {
+      Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera.");
+      return;
+    }
+
+    requestedWidth = width;
+    requestedHeight = height;
+    requestedFramerate = framerate;
+
+    // Find closest supported format for |width| x |height| @ |framerate|.
+    final android.hardware.Camera.Parameters parameters = camera.getParameters();
+    final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
+    final android.hardware.Camera.Size previewSize =
+        CameraEnumerationAndroid.getClosestSupportedSize(
+            parameters.getSupportedPreviewSizes(), width, height);
+    final CaptureFormat captureFormat = new CaptureFormat(
+        previewSize.width, previewSize.height,
+        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+        range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+
+    // Check if we are already using this capture format, then we don't need to do anything.
+    if (captureFormat.isSameFormat(this.captureFormat)) {
+      return;
+    }
+
+    // Update camera parameters.
+    Logging.d(TAG, "isVideoStabilizationSupported: " +
+        parameters.isVideoStabilizationSupported());
+    if (parameters.isVideoStabilizationSupported()) {
+      parameters.setVideoStabilization(true);
+    }
+    // Note: setRecordingHint(true) actually decrease frame rate on N5.
+    // parameters.setRecordingHint(true);
+    if (captureFormat.maxFramerate > 0) {
+      parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
+    }
+    parameters.setPreviewSize(captureFormat.width, captureFormat.height);
+
+    if (!isCapturingToTexture) {
+      parameters.setPreviewFormat(captureFormat.imageFormat);
+    }
+    // Picture size is for taking pictures and not for preview/video, but we need to set it anyway
+    // as a workaround for an aspect ratio problem on Nexus 7.
+    final android.hardware.Camera.Size pictureSize =
+        CameraEnumerationAndroid.getClosestSupportedSize(
+            parameters.getSupportedPictureSizes(), width, height);
+    parameters.setPictureSize(pictureSize.width, pictureSize.height);
+
+    // Temporarily stop preview if it's already running.
+    if (this.captureFormat != null) {
+      camera.stopPreview();
+      dropNextFrame = true;
+      // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
+      // queue, but sometimes we receive a frame with the old resolution after this call anyway.
+      camera.setPreviewCallbackWithBuffer(null);
+    }
+
+    // (Re)start preview.
+    Logging.d(TAG, "Start capturing: " + captureFormat);
+    this.captureFormat = captureFormat;
+
+    List<String> focusModes = parameters.getSupportedFocusModes();
+    if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+      parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+    }
+
+    camera.setParameters(parameters);
+    if (!isCapturingToTexture) {
+      queuedBuffers.clear();
+      final int frameSize = captureFormat.frameSize();
+      for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+        final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+        queuedBuffers.add(buffer.array());
+        camera.addCallbackBuffer(buffer.array());
+      }
+      camera.setPreviewCallbackWithBuffer(this);
+    }
+    camera.startPreview();
+  }
+
+  // Called by native code.  Returns true when camera is known to be stopped.
+  void stopCapture() throws InterruptedException {
+    Logging.d(TAG, "stopCapture");
+    final CountDownLatch barrier = new CountDownLatch(1);
+    cameraThreadHandler.post(new Runnable() {
+        @Override public void run() {
+          stopCaptureOnCameraThread();
+          barrier.countDown();
+        }
+    });
+    barrier.await();
+    Logging.d(TAG, "stopCapture done");
+  }
+
+  private void stopCaptureOnCameraThread() {
+    checkIsOnCameraThread();
+    Logging.d(TAG, "stopCaptureOnCameraThread");
+    if (openCameraOnCodecThreadRunner != null) {
+      cameraThreadHandler.removeCallbacks(openCameraOnCodecThreadRunner);
+    }
+    openCameraAttempts = 0;
+    if (camera == null) {
+      Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
+      return;
+    }
+
+    cameraThreadHandler.removeCallbacks(cameraObserver);
+    cameraStatistics.getAndResetFrameCount();
+    Logging.d(TAG, "Stop preview.");
+    camera.stopPreview();
+    camera.setPreviewCallbackWithBuffer(null);
+    queuedBuffers.clear();
+    captureFormat = null;
+
+    Logging.d(TAG, "Release camera.");
+    camera.release();
+    camera = null;
+    if (eventsHandler != null) {
+      eventsHandler.onCameraClosed();
+    }
+  }
+
+  private void switchCameraOnCameraThread() {
+    checkIsOnCameraThread();
+    Logging.d(TAG, "switchCameraOnCameraThread");
+    stopCaptureOnCameraThread();
+    synchronized (cameraIdLock) {
+      id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
+    }
+    dropNextFrame = true;
+    startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
+        applicationContext);
+    Logging.d(TAG, "switchCameraOnCameraThread done");
+  }
+
+  private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
+    checkIsOnCameraThread();
+    if (camera == null) {
+      Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
+      return;
+    }
+    Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
+        "@" + framerate);
+    frameObserver.onOutputFormatRequest(width, height, framerate);
+  }
+
+  // Exposed for testing purposes only.
+  Handler getCameraThreadHandler() {
+    return cameraThreadHandler;
+  }
+
+  private int getDeviceOrientation() {
+    int orientation = 0;
+
+    WindowManager wm = (WindowManager) applicationContext.getSystemService(
+        Context.WINDOW_SERVICE);
+    switch(wm.getDefaultDisplay().getRotation()) {
+      case Surface.ROTATION_90:
+        orientation = 90;
+        break;
+      case Surface.ROTATION_180:
+        orientation = 180;
+        break;
+      case Surface.ROTATION_270:
+        orientation = 270;
+        break;
+      case Surface.ROTATION_0:
+      default:
+        orientation = 0;
+        break;
+    }
+    return orientation;
+  }
+
+  private int getFrameOrientation() {
+    int rotation = getDeviceOrientation();
+    if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
+      rotation = 360 - rotation;
+    }
+    return (info.orientation + rotation) % 360;
+  }
+
+  // Called on cameraThread so must not "synchronized".
+  @Override
+  public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
+    checkIsOnCameraThread();
+    if (camera == null || !queuedBuffers.contains(data)) {
+      // The camera has been stopped or |data| is an old invalid buffer.
+      return;
+    }
+    if (camera != callbackCamera) {
+      throw new RuntimeException("Unexpected camera in callback!");
+    }
+
+    final long captureTimeNs =
+        TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+    if (eventsHandler != null && !firstFrameReported) {
+      eventsHandler.onFirstFrameAvailable();
+      firstFrameReported = true;
+    }
+
+    cameraStatistics.addFrame();
+    frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
+        getFrameOrientation(), captureTimeNs);
+    camera.addCallbackBuffer(data);
+  }
+
+  @Override
+  public void onTextureFrameAvailable(
+      int oesTextureId, float[] transformMatrix, long timestampNs) {
+    checkIsOnCameraThread();
+    if (camera == null) {
+      // Camera is stopped, we need to return the buffer immediately.
+      surfaceHelper.returnTextureFrame();
+      return;
+    }
+    if (dropNextFrame)  {
+     surfaceHelper.returnTextureFrame();
+     dropNextFrame = false;
+     return;
+    }
+
+    int rotation = getFrameOrientation();
+    if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
+      // Undo the mirror that the OS "helps" us with.
+      // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+      transformMatrix =
+          RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
+    }
+    cameraStatistics.addFrame();
+    frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+        transformMatrix, rotation, timestampNs);
+  }
+
+  // Interface used for providing callbacks to an observer.
+  interface CapturerObserver {
+    // Notify if the camera have been started successfully or not.
+    // Called on a Java thread owned by VideoCapturerAndroid.
+    abstract void onCapturerStarted(boolean success);
+
+    // Delivers a captured frame. Called on a Java thread owned by
+    // VideoCapturerAndroid.
+    abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
+        long timeStamp);
+
+    // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
+    // owned by VideoCapturerAndroid.
+    abstract void onTextureFrameCaptured(
+        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+        long timestamp);
+
+    // Requests an output format from the video capturer. Captured frames
+    // by the camera will be scaled/or dropped by the video capturer.
+    // Called on a Java thread owned by VideoCapturerAndroid.
+    abstract void onOutputFormatRequest(int width, int height, int framerate);
+  }
+
+  // An implementation of CapturerObserver that forwards all calls from
+  // Java to the C layer.
+  static class NativeObserver implements CapturerObserver {
+    private final long nativeCapturer;
+
+    public NativeObserver(long nativeCapturer) {
+      this.nativeCapturer = nativeCapturer;
+    }
+
+    @Override
+    public void onCapturerStarted(boolean success) {
+      nativeCapturerStarted(nativeCapturer, success);
+    }
+
+    @Override
+    public void onByteBufferFrameCaptured(byte[] data, int width, int height,
+        int rotation, long timeStamp) {
+      nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
+          timeStamp);
+    }
+
+    @Override
+    public void onTextureFrameCaptured(
+        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+        long timestamp) {
+      nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
+          rotation, timestamp);
+    }
+
+    @Override
+    public void onOutputFormatRequest(int width, int height, int framerate) {
+      nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
+    }
+
+    private native void nativeCapturerStarted(long nativeCapturer,
+        boolean success);
+    private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
+        byte[] data, int length, int width, int height, int rotation, long timeStamp);
+    private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
+        int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
+    private native void nativeOnOutputFormatRequest(long nativeCapturer,
+        int width, int height, int framerate);
+  }
+
+  private static native long nativeCreateVideoCapturer(
+      VideoCapturerAndroid videoCapturer,
+      SurfaceTextureHelper surfaceHelper);
+}
diff --git a/webrtc/api/java/android/org/webrtc/VideoRendererGui.java b/webrtc/api/java/android/org/webrtc/VideoRendererGui.java
new file mode 100644
index 0000000..bb6f01c
--- /dev/null
+++ b/webrtc/api/java/android/org/webrtc/VideoRendererGui.java
@@ -0,0 +1,666 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.annotation.SuppressLint;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.opengl.EGL14;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+
+import org.webrtc.Logging;
+import org.webrtc.VideoRenderer.I420Frame;
+
+/**
+ * Efficiently renders YUV frames using the GPU for CSC.
+ * Clients will want first to call setView() to pass GLSurfaceView
+ * and then for each video stream either create instance of VideoRenderer using
+ * createGui() call or VideoRenderer.Callbacks interface using create() call.
+ * Only one instance of the class can be created.
+ */
+public class VideoRendererGui implements GLSurfaceView.Renderer {
+  // |instance|, |instance.surface|, |eglContext|, and |eglContextReady| are synchronized on
+  // |VideoRendererGui.class|.
+  private static VideoRendererGui instance = null;
+  private static Runnable eglContextReady = null;
+  private static final String TAG = "VideoRendererGui";
+  private GLSurfaceView surface;
+  private static EglBase.Context eglContext = null;
+  // Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
+  // If true then for every newly created yuv image renderer createTexture()
+  // should be called. The variable is accessed on multiple threads and
+  // all accesses are synchronized on yuvImageRenderers' object lock.
+  private boolean onSurfaceCreatedCalled;
+  private int screenWidth;
+  private int screenHeight;
+  // List of yuv renderers.
+  private final ArrayList<YuvImageRenderer> yuvImageRenderers;
+  // Render and draw threads.
+  private static Thread renderFrameThread;
+  private static Thread drawThread;
+
+  private VideoRendererGui(GLSurfaceView surface) {
+    this.surface = surface;
+    // Create an OpenGL ES 2.0 context.
+    surface.setPreserveEGLContextOnPause(true);
+    surface.setEGLContextClientVersion(2);
+    surface.setRenderer(this);
+    surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+
+    yuvImageRenderers = new ArrayList<YuvImageRenderer>();
+  }
+
+  /**
+   * Class used to display stream of YUV420 frames at particular location
+   * on a screen. New video frames are sent to display using renderFrame()
+   * call.
+   */
+  private static class YuvImageRenderer implements VideoRenderer.Callbacks {
+    // |surface| is synchronized on |this|.
+    private GLSurfaceView surface;
+    private int id;
+    // TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
+    // currently leaking resources to avoid a rare crash in release() where the EGLContext has
+    // become invalid beforehand.
+    private int[] yuvTextures = { 0, 0, 0 };
+    private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+    private final RendererCommon.GlDrawer drawer;
+    // Resources for making a deep copy of incoming OES texture frame.
+    private GlTextureFrameBuffer textureCopy;
+
+    // Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
+    // threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
+    // renderFrame() if the previous frame has not been rendered yet.
+    private I420Frame pendingFrame;
+    private final Object pendingFrameLock = new Object();
+    // Type of video frame used for recent frame rendering.
+    private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
+    private RendererType rendererType;
+    private RendererCommon.ScalingType scalingType;
+    private boolean mirror;
+    private RendererCommon.RendererEvents rendererEvents;
+    // Flag if renderFrame() was ever called.
+    boolean seenFrame;
+    // Total number of video frames received in renderFrame() call.
+    private int framesReceived;
+    // Number of video frames dropped by renderFrame() because previous
+    // frame has not been rendered yet.
+    private int framesDropped;
+    // Number of rendered video frames.
+    private int framesRendered;
+    // Time in ns when the first video frame was rendered.
+    private long startTimeNs = -1;
+    // Time in ns spent in draw() function.
+    private long drawTimeNs;
+    // Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
+    // data to rendering planes.
+    private long copyTimeNs;
+    // The allowed view area in percentage of screen size.
+    private final Rect layoutInPercentage;
+    // The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
+    // |layoutInPercentage|.
+    private final Rect displayLayout = new Rect();
+    // Cached layout transformation matrix, calculated from current layout parameters.
+    private float[] layoutMatrix;
+    // Flag if layout transformation matrix update is needed.
+    private boolean updateLayoutProperties;
+    // Layout properties update lock. Guards |updateLayoutProperties|, |screenWidth|,
+    // |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
+    private final Object updateLayoutLock = new Object();
+    // Texture sampling matrix.
+    private float[] rotatedSamplingMatrix;
+    // Viewport dimensions.
+    private int screenWidth;
+    private int screenHeight;
+    // Video dimension.
+    private int videoWidth;
+    private int videoHeight;
+
+    // This is the degree that the frame should be rotated clockwisely to have
+    // it rendered up right.
+    private int rotationDegree;
+
+    private YuvImageRenderer(
+        GLSurfaceView surface, int id,
+        int x, int y, int width, int height,
+        RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
+      Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
+      this.surface = surface;
+      this.id = id;
+      this.scalingType = scalingType;
+      this.mirror = mirror;
+      this.drawer = drawer;
+      layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+      updateLayoutProperties = false;
+      rotationDegree = 0;
+    }
+
+    public synchronized void reset() {
+      seenFrame = false;
+    }
+
+    private synchronized void release() {
+      surface = null;
+      drawer.release();
+      synchronized (pendingFrameLock) {
+        if (pendingFrame != null) {
+          VideoRenderer.renderFrameDone(pendingFrame);
+          pendingFrame = null;
+        }
+      }
+    }
+
+    private void createTextures() {
+      Logging.d(TAG, "  YuvImageRenderer.createTextures " + id + " on GL thread:" +
+          Thread.currentThread().getId());
+
+      // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
+      for (int i = 0; i < 3; i++)  {
+        yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+      }
+      // Generate texture and framebuffer for offscreen texture copy.
+      textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
+    }
+
+    private void updateLayoutMatrix() {
+      synchronized(updateLayoutLock) {
+        if (!updateLayoutProperties) {
+          return;
+        }
+        // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
+        // bounding box (ceil left/top and floor right/bottom) to not break constraints.
+        displayLayout.set(
+            (screenWidth * layoutInPercentage.left + 99) / 100,
+            (screenHeight * layoutInPercentage.top + 99) / 100,
+            (screenWidth * layoutInPercentage.right) / 100,
+            (screenHeight * layoutInPercentage.bottom) / 100);
+        Logging.d(TAG, "ID: "  + id + ". AdjustTextureCoords. Allowed display size: "
+            + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+            + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
+        final float videoAspectRatio = (rotationDegree % 180 == 0)
+            ? (float) videoWidth / videoHeight
+            : (float) videoHeight / videoWidth;
+        // Adjust display size based on |scalingType|.
+        final Point displaySize = RendererCommon.getDisplaySize(scalingType,
+            videoAspectRatio, displayLayout.width(), displayLayout.height());
+        displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
+                            (displayLayout.height() - displaySize.y) / 2);
+        Logging.d(TAG, "  Adjusted display size: " + displayLayout.width() + " x "
+            + displayLayout.height());
+        layoutMatrix = RendererCommon.getLayoutMatrix(
+            mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
+        updateLayoutProperties = false;
+        Logging.d(TAG, "  AdjustTextureCoords done");
+      }
+    }
+
+    private void draw() {
+      if (!seenFrame) {
+        // No frame received yet - nothing to render.
+        return;
+      }
+      long now = System.nanoTime();
+
+      final boolean isNewFrame;
+      synchronized (pendingFrameLock) {
+        isNewFrame = (pendingFrame != null);
+        if (isNewFrame && startTimeNs == -1) {
+          startTimeNs = now;
+        }
+
+        if (isNewFrame) {
+          rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+              pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
+          if (pendingFrame.yuvFrame) {
+            rendererType = RendererType.RENDERER_YUV;
+            yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+                pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
+          } else {
+            rendererType = RendererType.RENDERER_TEXTURE;
+            // External texture rendering. Make a deep copy of the external texture.
+            // Reallocate offscreen texture if necessary.
+            textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
+
+            // Bind our offscreen framebuffer.
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
+            GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+            // Copy the OES texture content. This will also normalize the sampling matrix.
+             drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
+                 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
+             rotatedSamplingMatrix = RendererCommon.identityMatrix();
+
+             // Restore normal framebuffer.
+             GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+             GLES20.glFinish();
+          }
+          copyTimeNs += (System.nanoTime() - now);
+          VideoRenderer.renderFrameDone(pendingFrame);
+          pendingFrame = null;
+        }
+      }
+
+      updateLayoutMatrix();
+      final float[] texMatrix =
+          RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+      // OpenGL defaults to lower left origin - flip viewport position vertically.
+      final int viewportY = screenHeight - displayLayout.bottom;
+      if (rendererType == RendererType.RENDERER_YUV) {
+        drawer.drawYuv(yuvTextures, texMatrix,
+            displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+      } else {
+        drawer.drawRgb(textureCopy.getTextureId(), texMatrix,
+            displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+      }
+
+      if (isNewFrame) {
+        framesRendered++;
+        drawTimeNs += (System.nanoTime() - now);
+        if ((framesRendered % 300) == 0) {
+          logStatistics();
+        }
+      }
+    }
+
+    private void logStatistics() {
+      long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
+      Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
+          ". Frames received: " + framesReceived +
+          ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+      if (framesReceived > 0 && framesRendered > 0) {
+        Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
+            " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+        Logging.d(TAG, "Draw time: " +
+            (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
+            (int) (copyTimeNs / (1000 * framesReceived)) + " us");
+      }
+    }
+
+    public void setScreenSize(final int screenWidth, final int screenHeight) {
+      synchronized(updateLayoutLock) {
+        if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
+          return;
+        }
+        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
+            screenWidth + " x " + screenHeight);
+        this.screenWidth = screenWidth;
+        this.screenHeight = screenHeight;
+        updateLayoutProperties = true;
+      }
+    }
+
+    public void setPosition(int x, int y, int width, int height,
+        RendererCommon.ScalingType scalingType, boolean mirror) {
+      final Rect layoutInPercentage =
+          new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+      synchronized(updateLayoutLock) {
+        if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
+            && mirror == this.mirror) {
+          return;
+        }
+        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
+            ") " +  width + " x " + height + ". Scaling: " + scalingType +
+            ". Mirror: " + mirror);
+        this.layoutInPercentage.set(layoutInPercentage);
+        this.scalingType = scalingType;
+        this.mirror = mirror;
+        updateLayoutProperties = true;
+      }
+    }
+
+    private void setSize(final int videoWidth, final int videoHeight, final int rotation) {
+      if (videoWidth == this.videoWidth && videoHeight == this.videoHeight
+          && rotation == rotationDegree) {
+        return;
+      }
+      if (rendererEvents != null) {
+        Logging.d(TAG, "ID: " + id +
+            ". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
+        rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+      }
+
+      synchronized (updateLayoutLock) {
+        Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
+            videoWidth + " x " + videoHeight + " rotation " + rotation);
+
+        this.videoWidth = videoWidth;
+        this.videoHeight = videoHeight;
+        rotationDegree = rotation;
+        updateLayoutProperties = true;
+        Logging.d(TAG, "  YuvImageRenderer.setSize done.");
+      }
+    }
+
+    @Override
+    public synchronized void renderFrame(I420Frame frame) {
+      if (surface == null) {
+        // This object has been released.
+        VideoRenderer.renderFrameDone(frame);
+        return;
+      }
+      if (renderFrameThread == null) {
+        renderFrameThread = Thread.currentThread();
+      }
+      if (!seenFrame && rendererEvents != null) {
+        Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
+        rendererEvents.onFirstFrameRendered();
+      }
+      framesReceived++;
+      synchronized (pendingFrameLock) {
+        // Check input frame parameters.
+        if (frame.yuvFrame) {
+          if (frame.yuvStrides[0] < frame.width ||
+              frame.yuvStrides[1] < frame.width / 2 ||
+              frame.yuvStrides[2] < frame.width / 2) {
+            Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
+                frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
+            VideoRenderer.renderFrameDone(frame);
+            return;
+          }
+        }
+
+        if (pendingFrame != null) {
+          // Skip rendering of this frame if previous frame was not rendered yet.
+          framesDropped++;
+          VideoRenderer.renderFrameDone(frame);
+          seenFrame = true;
+          return;
+        }
+        pendingFrame = frame;
+      }
+      setSize(frame.width, frame.height, frame.rotationDegree);
+      seenFrame = true;
+
+      // Request rendering.
+      surface.requestRender();
+    }
+  }
+
+  /** Passes GLSurfaceView to video renderer. */
+  public static synchronized void setView(GLSurfaceView surface,
+      Runnable eglContextReadyCallback) {
+    Logging.d(TAG, "VideoRendererGui.setView");
+    instance = new VideoRendererGui(surface);
+    eglContextReady = eglContextReadyCallback;
+  }
+
+  public static synchronized EglBase.Context getEglBaseContext() {
+    return eglContext;
+  }
+
+  /** Releases GLSurfaceView video renderer. */
+  public static synchronized void dispose() {
+    if (instance == null){
+      return;
+    }
+    Logging.d(TAG, "VideoRendererGui.dispose");
+    synchronized (instance.yuvImageRenderers) {
+      for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+        yuvImageRenderer.release();
+      }
+      instance.yuvImageRenderers.clear();
+    }
+    renderFrameThread = null;
+    drawThread = null;
+    instance.surface = null;
+    eglContext = null;
+    eglContextReady = null;
+    instance = null;
+  }
+
+  /**
+   * Creates VideoRenderer with top left corner at (x, y) and resolution
+   * (width, height). All parameters are in percentage of screen resolution.
+   */
+  public static VideoRenderer createGui(int x, int y, int width, int height,
+      RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
+    YuvImageRenderer javaGuiRenderer = create(
+        x, y, width, height, scalingType, mirror);
+    return new VideoRenderer(javaGuiRenderer);
+  }
+
+  public static VideoRenderer.Callbacks createGuiRenderer(
+      int x, int y, int width, int height,
+      RendererCommon.ScalingType scalingType, boolean mirror) {
+    return create(x, y, width, height, scalingType, mirror);
+  }
+
+  /**
+   * Creates VideoRenderer.Callbacks with top left corner at (x, y) and
+   * resolution (width, height). All parameters are in percentage of
+   * screen resolution.
+   */
+  public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+      RendererCommon.ScalingType scalingType, boolean mirror) {
+    return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
+  }
+
+  /**
+   * Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
+   * All parameters are in percentage of screen resolution. The custom |drawer| will be used for
+   * drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
+   */
+  public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+      RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
+    // Check display region parameters.
+    if (x < 0 || x > 100 || y < 0 || y > 100 ||
+        width < 0 || width > 100 || height < 0 || height > 100 ||
+        x + width > 100 || y + height > 100) {
+      throw new RuntimeException("Incorrect window parameters.");
+    }
+
+    if (instance == null) {
+      throw new RuntimeException(
+          "Attempt to create yuv renderer before setting GLSurfaceView");
+    }
+    final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
+        instance.surface, instance.yuvImageRenderers.size(),
+        x, y, width, height, scalingType, mirror, drawer);
+    synchronized (instance.yuvImageRenderers) {
+      if (instance.onSurfaceCreatedCalled) {
+        // onSurfaceCreated has already been called for VideoRendererGui -
+        // need to create texture for new image and add image to the
+        // rendering list.
+        final CountDownLatch countDownLatch = new CountDownLatch(1);
+        instance.surface.queueEvent(new Runnable() {
+          @Override
+          public void run() {
+            yuvImageRenderer.createTextures();
+            yuvImageRenderer.setScreenSize(
+                instance.screenWidth, instance.screenHeight);
+            countDownLatch.countDown();
+          }
+        });
+        // Wait for task completion.
+        try {
+          countDownLatch.await();
+        } catch (InterruptedException e) {
+          throw new RuntimeException(e);
+        }
+      }
+      // Add yuv renderer to rendering list.
+      instance.yuvImageRenderers.add(yuvImageRenderer);
+    }
+    return yuvImageRenderer;
+  }
+
+  public static synchronized void update(
+      VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
+      RendererCommon.ScalingType scalingType, boolean mirror) {
+    Logging.d(TAG, "VideoRendererGui.update");
+    if (instance == null) {
+      throw new RuntimeException(
+          "Attempt to update yuv renderer before setting GLSurfaceView");
+    }
+    synchronized (instance.yuvImageRenderers) {
+      for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+        if (yuvImageRenderer == renderer) {
+          yuvImageRenderer.setPosition(x, y, width, height, scalingType, mirror);
+        }
+      }
+    }
+  }
+
+  public static synchronized void setRendererEvents(
+      VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
+    Logging.d(TAG, "VideoRendererGui.setRendererEvents");
+    if (instance == null) {
+      throw new RuntimeException(
+          "Attempt to set renderer events before setting GLSurfaceView");
+    }
+    synchronized (instance.yuvImageRenderers) {
+      for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+        if (yuvImageRenderer == renderer) {
+          yuvImageRenderer.rendererEvents = rendererEvents;
+        }
+      }
+    }
+  }
+
+  public static synchronized void remove(VideoRenderer.Callbacks renderer) {
+    Logging.d(TAG, "VideoRendererGui.remove");
+    if (instance == null) {
+      throw new RuntimeException(
+          "Attempt to remove renderer before setting GLSurfaceView");
+    }
+    synchronized (instance.yuvImageRenderers) {
+      final int index = instance.yuvImageRenderers.indexOf(renderer);
+      if (index == -1) {
+        Logging.w(TAG, "Couldn't remove renderer (not present in current list)");
+      } else {
+        instance.yuvImageRenderers.remove(index).release();
+      }
+    }
+  }
+
+  public static synchronized void reset(VideoRenderer.Callbacks renderer) {
+    Logging.d(TAG, "VideoRendererGui.reset");
+    if (instance == null) {
+      throw new RuntimeException(
+          "Attempt to reset renderer before setting GLSurfaceView");
+    }
+    synchronized (instance.yuvImageRenderers) {
+      for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+        if (yuvImageRenderer == renderer) {
+          yuvImageRenderer.reset();
+        }
+      }
+    }
+  }
+
+  private static void printStackTrace(Thread thread, String threadName) {
+    if (thread != null) {
+      StackTraceElement[] stackTraces = thread.getStackTrace();
+      if (stackTraces.length > 0) {
+        Logging.d(TAG, threadName + " stacks trace:");
+        for (StackTraceElement stackTrace : stackTraces) {
+          Logging.d(TAG, stackTrace.toString());
+        }
+      }
+    }
+  }
+
+  public static synchronized void printStackTraces() {
+    if (instance == null) {
+      return;
+    }
+    printStackTrace(renderFrameThread, "Render frame thread");
+    printStackTrace(drawThread, "Draw thread");
+  }
+
+  @SuppressLint("NewApi")
+  @Override
+  public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+    Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
+    // Store render EGL context.
+    synchronized (VideoRendererGui.class) {
+      if (EglBase14.isEGL14Supported()) {
+        eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
+      } else {
+        eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
+      }
+
+      Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
+    }
+
+    synchronized (yuvImageRenderers) {
+      // Create textures for all images.
+      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+        yuvImageRenderer.createTextures();
+      }
+      onSurfaceCreatedCalled = true;
+    }
+    GlUtil.checkNoGLES2Error("onSurfaceCreated done");
+    GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+    GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
+
+    // Fire EGL context ready event.
+    synchronized (VideoRendererGui.class) {
+      if (eglContextReady != null) {
+        eglContextReady.run();
+      }
+    }
+  }
+
+  @Override
+  public void onSurfaceChanged(GL10 unused, int width, int height) {
+    Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
+        width + " x " + height + "  ");
+    screenWidth = width;
+    screenHeight = height;
+    synchronized (yuvImageRenderers) {
+      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+        yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
+      }
+    }
+  }
+
+  @Override
+  public void onDrawFrame(GL10 unused) {
+    if (drawThread == null) {
+      drawThread = Thread.currentThread();
+    }
+    GLES20.glViewport(0, 0, screenWidth, screenHeight);
+    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+    synchronized (yuvImageRenderers) {
+      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+        yuvImageRenderer.draw();
+      }
+    }
+  }
+
+}
diff --git a/webrtc/api/java/jni/OWNERS b/webrtc/api/java/jni/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/java/jni/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/java/jni/androidmediacodeccommon.h b/webrtc/api/java/jni/androidmediacodeccommon.h
new file mode 100644
index 0000000..7044fb4
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediacodeccommon.h
@@ -0,0 +1,113 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+
+#include <android/log.h>
+#include <string>
+
+#include "webrtc/base/thread.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+namespace webrtc_jni {
+
+// Uncomment this define to enable verbose logging for every encoded/decoded
+// video frame.
+//#define TRACK_BUFFER_TIMING
+
+#define TAG_COMMON "MediaCodecVideo"
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecVideoEncoder.java
+enum COLOR_FORMATTYPE {
+  COLOR_FormatYUV420Planar = 0x13,
+  COLOR_FormatYUV420SemiPlanar = 0x15,
+  COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
+  // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+  // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+  // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
+  // but requires some (16, 32?) byte alignment.
+  COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
+};
+
+// Arbitrary interval to poll the codec for new outputs.
+enum { kMediaCodecPollMs = 10 };
+// Media codec maximum output buffer ready timeout.
+enum { kMediaCodecTimeoutMs = 1000 };
+// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
+enum { kMediaCodecStatisticsIntervalMs = 3000 };
+// Maximum amount of pending frames for VP8 decoder.
+enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for VP9 decoder.
+enum { kMaxPendingFramesVp9 = 1 };
+// Maximum amount of pending frames for H.264 decoder.
+enum { kMaxPendingFramesH264 = 8 };
+// Maximum amount of decoded frames for which per-frame logging is enabled.
+enum { kMaxDecodedLogFrames = 10 };
+// Maximum amount of encoded frames for which per-frame logging is enabled.
+enum { kMaxEncodedLogFrames = 10 };
+
+static inline int64_t GetCurrentTimeMs() {
+  return webrtc::TickTime::Now().Ticks() / 1000000LL;
+}
+
+static inline void AllowBlockingCalls() {
+  rtc::Thread* current_thread = rtc::Thread::Current();
+  if (current_thread != NULL)
+    current_thread->SetAllowBlockingCalls(true);
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static inline jobject JavaEnumFromIndexAndClassName(
+    JNIEnv* jni, const std::string& state_class_fragment, int index) {
+  const std::string state_class = "org/webrtc/" + state_class_fragment;
+  return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+                           state_class, index);
+}
+
+// Checks for any Java exception, prints stack backtrace and clears
+// currently thrown exception.
+static inline bool CheckException(JNIEnv* jni) {
+  if (jni->ExceptionCheck()) {
+    LOG_TAG(rtc::LS_ERROR, TAG_COMMON) << "Java JNI exception.";
+    jni->ExceptionDescribe();
+    jni->ExceptionClear();
+    return true;
+  }
+  return false;
+}
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.cc b/webrtc/api/java/jni/androidmediadecoder_jni.cc
new file mode 100644
index 0000000..b9973be
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediadecoder_jni.cc
@@ -0,0 +1,945 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <algorithm>
+#include <vector>
+
+// NOTICE: androidmediadecoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/java/jni/androidmediadecoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::DecodedImageCallback;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::TickTime;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// Logging macros.
+#define TAG_DECODER "MediaCodecVideoDecoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+  __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER)
+
+enum { kMaxWarningLogFrames = 2 };
+
+class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
+                               public rtc::MessageHandler {
+ public:
+  explicit MediaCodecVideoDecoder(
+      JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
+  virtual ~MediaCodecVideoDecoder();
+
+  int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
+      override;
+
+  int32_t Decode(
+      const EncodedImage& inputImage, bool missingFrames,
+      const RTPFragmentationHeader* fragmentation,
+      const CodecSpecificInfo* codecSpecificInfo = NULL,
+      int64_t renderTimeMs = -1) override;
+
+  int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
+      override;
+
+  int32_t Release() override;
+
+  bool PrefersLateDecoding() const override { return true; }
+
+  // rtc::MessageHandler implementation.
+  void OnMessage(rtc::Message* msg) override;
+
+  const char* ImplementationName() const override;
+
+ private:
+  // CHECK-fail if not running on |codec_thread_|.
+  void CheckOnCodecThread();
+
+  int32_t InitDecodeOnCodecThread();
+  int32_t ReleaseOnCodecThread();
+  int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
+  // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+  // true on success.
+  bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
+  int32_t ProcessHWErrorOnCodecThread();
+  void EnableFrameLogOnWarning();
+
+  // Type of video codec.
+  VideoCodecType codecType_;
+
+  // Render EGL context - owned by factory, should not be allocated/destroyed
+  // by VideoDecoder.
+  jobject render_egl_context_;
+
+  bool key_frame_required_;
+  bool inited_;
+  bool sw_fallback_required_;
+  bool use_surface_;
+  VideoCodec codec_;
+  webrtc::I420BufferPool decoded_frame_pool_;
+  rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+  DecodedImageCallback* callback_;
+  int frames_received_;  // Number of frames received by decoder.
+  int frames_decoded_;  // Number of frames decoded by decoder.
+  // Number of decoded frames for which log information is displayed.
+  int frames_decoded_logged_;
+  int64_t start_time_ms_;  // Start time for statistics.
+  int current_frames_;  // Number of frames in the current statistics interval.
+  int current_bytes_;  // Encoded bytes in the current statistics interval.
+  int current_decoding_time_ms_;  // Overall decoding time in the current second
+  int current_delay_time_ms_;  // Overall delay time in the current second.
+  uint32_t max_pending_frames_;  // Maximum number of pending input frames.
+
+  // State that is constant for the lifetime of this object once the ctor
+  // returns.
+  scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
+  ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
+  ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
+  jmethodID j_init_decode_method_;
+  jmethodID j_release_method_;
+  jmethodID j_dequeue_input_buffer_method_;
+  jmethodID j_queue_input_buffer_method_;
+  jmethodID j_dequeue_byte_buffer_method_;
+  jmethodID j_dequeue_texture_buffer_method_;
+  jmethodID j_return_decoded_byte_buffer_method_;
+  // MediaCodecVideoDecoder fields.
+  jfieldID j_input_buffers_field_;
+  jfieldID j_output_buffers_field_;
+  jfieldID j_color_format_field_;
+  jfieldID j_width_field_;
+  jfieldID j_height_field_;
+  jfieldID j_stride_field_;
+  jfieldID j_slice_height_field_;
+  // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
+  jfieldID j_texture_id_field_;
+  jfieldID j_transform_matrix_field_;
+  jfieldID j_texture_presentation_timestamp_ms_field_;
+  jfieldID j_texture_timestamp_ms_field_;
+  jfieldID j_texture_ntp_timestamp_ms_field_;
+  jfieldID j_texture_decode_time_ms_field_;
+  jfieldID j_texture_frame_delay_ms_field_;
+  // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
+  jfieldID j_info_index_field_;
+  jfieldID j_info_offset_field_;
+  jfieldID j_info_size_field_;
+  jfieldID j_presentation_timestamp_ms_field_;
+  jfieldID j_timestamp_ms_field_;
+  jfieldID j_ntp_timestamp_ms_field_;
+  jfieldID j_byte_buffer_decode_time_ms_field_;
+
+  // Global references; must be deleted in Release().
+  std::vector<jobject> input_buffers_;
+};
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+    JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
+    codecType_(codecType),
+    render_egl_context_(render_egl_context),
+    key_frame_required_(true),
+    inited_(false),
+    sw_fallback_required_(false),
+    codec_thread_(new Thread()),
+    j_media_codec_video_decoder_class_(
+        jni,
+        FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
+          j_media_codec_video_decoder_(
+              jni,
+              jni->NewObject(*j_media_codec_video_decoder_class_,
+                   GetMethodID(jni,
+                              *j_media_codec_video_decoder_class_,
+                              "<init>",
+                              "()V"))) {
+  ScopedLocalRefFrame local_ref_frame(jni);
+  codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
+  RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
+
+  j_init_decode_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "initDecode",
+      "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
+      "IILorg/webrtc/SurfaceTextureHelper;)Z");
+  j_release_method_ =
+      GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
+  j_dequeue_input_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
+  j_queue_input_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
+  j_dequeue_byte_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
+      "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+  j_dequeue_texture_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+      "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
+  j_return_decoded_byte_buffer_method_ =
+      GetMethodID(jni, *j_media_codec_video_decoder_class_,
+                  "returnDecodedOutputBuffer", "(I)V");
+
+  j_input_buffers_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_,
+      "inputBuffers", "[Ljava/nio/ByteBuffer;");
+  j_output_buffers_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_,
+      "outputBuffers", "[Ljava/nio/ByteBuffer;");
+  j_color_format_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
+  j_width_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "width", "I");
+  j_height_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "height", "I");
+  j_stride_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "stride", "I");
+  j_slice_height_field_ = GetFieldID(
+      jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+
+  jclass j_decoded_texture_buffer_class = FindClass(jni,
+      "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+  j_texture_id_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "textureID", "I");
+  j_transform_matrix_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+  j_texture_presentation_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
+  j_texture_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
+  j_texture_ntp_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
+  j_texture_decode_time_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+  j_texture_frame_delay_ms_field_ = GetFieldID(
+      jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
+
+  jclass j_decoded_output_buffer_class = FindClass(jni,
+      "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+  j_info_index_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "index", "I");
+  j_info_offset_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "offset", "I");
+  j_info_size_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "size", "I");
+  j_presentation_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
+  j_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "timeStampMs", "J");
+  j_ntp_timestamp_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
+  j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+      jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
+
+  CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
+  use_surface_ = (render_egl_context_ != NULL);
+  ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
+  memset(&codec_, 0, sizeof(codec_));
+  AllowBlockingCalls();
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+  // Call Release() to ensure no more callbacks to us after we are deleted.
+  Release();
+}
+
+int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
+    int32_t numberOfCores) {
+  ALOGD << "InitDecode.";
+  if (inst == NULL) {
+    ALOGE << "NULL VideoCodec instance";
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // Factory should guard against other codecs being used with us.
+  RTC_CHECK(inst->codecType == codecType_)
+      << "Unsupported codec " << inst->codecType << " for " << codecType_;
+
+  if (sw_fallback_required_) {
+    ALOGE << "InitDecode() - fallback to SW decoder";
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  // Save VideoCodec instance for later.
+  if (&codec_ != inst) {
+    codec_ = *inst;
+  }
+  // If maxFramerate is not set then assume 30 fps.
+  codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30;
+
+  // Call Java init.
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
+  CheckOnCodecThread();
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
+      << codec_.width << " x " << codec_.height << ". Fps: " <<
+      (int)codec_.maxFramerate;
+
+  // Release previous codec first if it was allocated before.
+  int ret_val = ReleaseOnCodecThread();
+  if (ret_val < 0) {
+    ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
+    sw_fallback_required_ = true;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  // Always start with a complete key frame.
+  key_frame_required_ = true;
+  frames_received_ = 0;
+  frames_decoded_ = 0;
+  frames_decoded_logged_ = kMaxDecodedLogFrames;
+
+  jobject java_surface_texture_helper_ = nullptr;
+  if (use_surface_) {
+    java_surface_texture_helper_ = jni->CallStaticObjectMethod(
+        FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+        GetStaticMethodID(jni,
+                          FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+                          "create",
+                          "(Lorg/webrtc/EglBase$Context;)"
+                          "Lorg/webrtc/SurfaceTextureHelper;"),
+        render_egl_context_);
+    RTC_CHECK(java_surface_texture_helper_ != nullptr);
+    surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
+        jni, java_surface_texture_helper_);
+  }
+
+  jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+      jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
+  bool success = jni->CallBooleanMethod(
+      *j_media_codec_video_decoder_,
+      j_init_decode_method_,
+      j_video_codec_enum,
+      codec_.width,
+      codec_.height,
+      java_surface_texture_helper_);
+  if (CheckException(jni) || !success) {
+    ALOGE << "Codec initialization error - fallback to SW codec.";
+    sw_fallback_required_ = true;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  inited_ = true;
+
+  switch (codecType_) {
+    case kVideoCodecVP8:
+      max_pending_frames_ = kMaxPendingFramesVp8;
+      break;
+    case kVideoCodecVP9:
+      max_pending_frames_ = kMaxPendingFramesVp9;
+      break;
+    case kVideoCodecH264:
+      max_pending_frames_ = kMaxPendingFramesH264;
+      break;
+    default:
+      max_pending_frames_ = 0;
+  }
+  start_time_ms_ = GetCurrentTimeMs();
+  current_frames_ = 0;
+  current_bytes_ = 0;
+  current_decoding_time_ms_ = 0;
+  current_delay_time_ms_ = 0;
+
+  jobjectArray input_buffers = (jobjectArray)GetObjectField(
+      jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
+  size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+  ALOGD << "Maximum amount of pending frames: " << max_pending_frames_;
+  input_buffers_.resize(num_input_buffers);
+  for (size_t i = 0; i < num_input_buffers; ++i) {
+    input_buffers_[i] =
+        jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+    if (CheckException(jni)) {
+      ALOGE << "NewGlobalRef error - fallback to SW codec.";
+      sw_fallback_required_ = true;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  }
+
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Release() {
+  ALOGD << "DecoderRelease request";
+  return codec_thread_->Invoke<int32_t>(
+        Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  CheckOnCodecThread();
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ALOGD << "DecoderReleaseOnCodecThread: Frames received: " <<
+      frames_received_ << ". Frames decoded: " << frames_decoded_;
+  ScopedLocalRefFrame local_ref_frame(jni);
+  for (size_t i = 0; i < input_buffers_.size(); i++) {
+    jni->DeleteGlobalRef(input_buffers_[i]);
+  }
+  input_buffers_.clear();
+  jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+  surface_texture_helper_ = nullptr;
+  inited_ = false;
+  rtc::MessageQueueManager::Clear(this);
+  if (CheckException(jni)) {
+    ALOGE << "Decoder release exception";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  ALOGD << "DecoderReleaseOnCodecThread done";
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::CheckOnCodecThread() {
+  RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
+      << "Running on wrong thread!";
+}
+
+void MediaCodecVideoDecoder::EnableFrameLogOnWarning() {
+  // Log next 2 output frames.
+  frames_decoded_logged_ = std::max(
+      frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames);
+}
+
+int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
+  CheckOnCodecThread();
+  int ret_val = ReleaseOnCodecThread();
+  if (ret_val < 0) {
+    ALOGE << "ProcessHWError: Release failure";
+  }
+  if (codecType_ == kVideoCodecH264) {
+    // For now there is no SW H.264 which can be used as fallback codec.
+    // So try to restart hw codec for now.
+    ret_val = InitDecodeOnCodecThread();
+    ALOGE << "Reset H.264 codec done. Status: " << ret_val;
+    if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
+      // H.264 codec was succesfully reset - return regular error code.
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    } else {
+      // Fail to restart H.264 codec - return error code which should stop the
+      // call.
+      return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+    }
+  } else {
+    sw_fallback_required_ = true;
+    ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
+    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+  }
+}
+
+int32_t MediaCodecVideoDecoder::Decode(
+    const EncodedImage& inputImage,
+    bool missingFrames,
+    const RTPFragmentationHeader* fragmentation,
+    const CodecSpecificInfo* codecSpecificInfo,
+    int64_t renderTimeMs) {
+  if (sw_fallback_required_) {
+    ALOGE << "Decode() - fallback to SW codec";
+    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+  }
+  if (callback_ == NULL) {
+    ALOGE << "Decode() - callback_ is NULL";
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (inputImage._buffer == NULL && inputImage._length > 0) {
+    ALOGE << "Decode() - inputImage is incorrect";
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (!inited_) {
+    ALOGE << "Decode() - decoder is not initialized";
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  // Check if encoded frame dimension has changed.
+  if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
+      (inputImage._encodedWidth != codec_.width ||
+      inputImage._encodedHeight != codec_.height)) {
+    codec_.width = inputImage._encodedWidth;
+    codec_.height = inputImage._encodedHeight;
+    int32_t ret = InitDecode(&codec_, 1);
+    if (ret < 0) {
+      ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
+      sw_fallback_required_ = true;
+      return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+    }
+  }
+
+  // Always start with a complete key frame.
+  if (key_frame_required_) {
+    if (inputImage._frameType != webrtc::kVideoFrameKey) {
+      ALOGE << "Decode() - key frame is required";
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    if (!inputImage._completeFrame) {
+      ALOGE << "Decode() - complete frame is required";
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    key_frame_required_ = false;
+  }
+  if (inputImage._length == 0) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  return codec_thread_->Invoke<int32_t>(Bind(
+      &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
+}
+
+int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
+    const EncodedImage& inputImage) {
+  CheckOnCodecThread();
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  // Try to drain the decoder and wait until output is not too
+  // much behind the input.
+  if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+    ALOGW << "Decoder is too far behind. Try to drain. Received: " <<
+        frames_received_ << ". Decoded: " << frames_decoded_;
+    EnableFrameLogOnWarning();
+  }
+  const int64 drain_start = GetCurrentTimeMs();
+  while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
+         (GetCurrentTimeMs() - drain_start) < kMediaCodecTimeoutMs) {
+    if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+      ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+          frames_received_ << ". Frames decoded: " << frames_decoded_;
+      return ProcessHWErrorOnCodecThread();
+    }
+  }
+  if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+    ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+        frames_received_ << ". Frames decoded: " << frames_decoded_;
+    return ProcessHWErrorOnCodecThread();
+  }
+
+  // Get input buffer.
+  int j_input_buffer_index = jni->CallIntMethod(
+      *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+  if (CheckException(jni) || j_input_buffer_index < 0) {
+    ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index <<
+        ". Retry DeliverPendingOutputs.";
+    EnableFrameLogOnWarning();
+    // Try to drain the decoder.
+    if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+      ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+          frames_received_ << ". Frames decoded: " << frames_decoded_;
+      return ProcessHWErrorOnCodecThread();
+    }
+    // Try dequeue input buffer one last time.
+    j_input_buffer_index = jni->CallIntMethod(
+        *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+    if (CheckException(jni) || j_input_buffer_index < 0) {
+      ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index;
+      return ProcessHWErrorOnCodecThread();
+    }
+  }
+
+  // Copy encoded data to Java ByteBuffer.
+  jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+  uint8_t* buffer =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+  RTC_CHECK(buffer) << "Indirect buffer??";
+  int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
+  if (CheckException(jni) || buffer_capacity < inputImage._length) {
+    ALOGE << "Input frame size "<<  inputImage._length <<
+        " is bigger than buffer size " << buffer_capacity;
+    return ProcessHWErrorOnCodecThread();
+  }
+  jlong presentation_timestamp_us = static_cast<jlong>(
+      static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
+  memcpy(buffer, inputImage._buffer, inputImage._length);
+
+  if (frames_decoded_ < frames_decoded_logged_) {
+    ALOGD << "Decoder frame in # " << frames_received_ <<
+        ". Type: " << inputImage._frameType <<
+        ". Buffer # " << j_input_buffer_index <<
+        ". TS: " << presentation_timestamp_us / 1000 <<
+        ". Size: " << inputImage._length;
+  }
+
+  // Save input image timestamps for later output.
+  frames_received_++;
+  current_bytes_ += inputImage._length;
+
+  // Feed input to decoder.
+  bool success = jni->CallBooleanMethod(
+      *j_media_codec_video_decoder_,
+      j_queue_input_buffer_method_,
+      j_input_buffer_index,
+      inputImage._length,
+      presentation_timestamp_us,
+      static_cast<int64_t> (inputImage._timeStamp),
+      inputImage.ntp_time_ms_);
+  if (CheckException(jni) || !success) {
+    ALOGE << "queueInputBuffer error";
+    return ProcessHWErrorOnCodecThread();
+  }
+
+  // Try to drain the decoder
+  if (!DeliverPendingOutputs(jni, 0)) {
+    ALOGE << "DeliverPendingOutputs error";
+    return ProcessHWErrorOnCodecThread();
+  }
+
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoDecoder::DeliverPendingOutputs(
+    JNIEnv* jni, int dequeue_timeout_ms) {
+  if (frames_received_ <= frames_decoded_) {
+    // No need to query for output buffers - decoder is drained.
+    return true;
+  }
+  // Get decoder output.
+  jobject j_decoder_output_buffer =
+      jni->CallObjectMethod(*j_media_codec_video_decoder_,
+          use_surface_ ? j_dequeue_texture_buffer_method_
+                       : j_dequeue_byte_buffer_method_,
+          dequeue_timeout_ms);
+
+  if (CheckException(jni)) {
+    ALOGE << "dequeueOutputBuffer() error";
+    return false;
+  }
+  if (IsNull(jni, j_decoder_output_buffer)) {
+    // No decoded frame ready.
+    return true;
+  }
+
+  // Get decoded video frame properties.
+  int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
+      j_color_format_field_);
+  int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
+  int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
+  int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
+  int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
+      j_slice_height_field_);
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
+  int64_t presentation_timestamps_ms = 0;
+  int64_t output_timestamps_ms = 0;
+  int64_t output_ntp_timestamps_ms = 0;
+  int decode_time_ms = 0;
+  int64_t frame_delayed_ms = 0;
+  if (use_surface_) {
+    // Extract data from Java DecodedTextureBuffer.
+    presentation_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer,
+        j_texture_presentation_timestamp_ms_field_);
+    output_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
+    output_ntp_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
+    decode_time_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
+
+    const int texture_id =
+        GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+    if (texture_id != 0) {  // |texture_id| == 0 represents a dropped frame.
+      const jfloatArray j_transform_matrix =
+          reinterpret_cast<jfloatArray>(GetObjectField(
+              jni, j_decoder_output_buffer, j_transform_matrix_field_));
+      frame_delayed_ms = GetLongField(
+          jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
+
+      // Create webrtc::VideoFrameBuffer with native texture handle.
+      frame_buffer = surface_texture_helper_->CreateTextureFrame(
+          width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+    } else {
+      EnableFrameLogOnWarning();
+    }
+  } else {
+    // Extract data from Java ByteBuffer and create output yuv420 frame -
+    // for non surface decoding only.
+    const int output_buffer_index = GetIntField(
+        jni, j_decoder_output_buffer, j_info_index_field_);
+    const int output_buffer_offset = GetIntField(
+        jni, j_decoder_output_buffer, j_info_offset_field_);
+    const int output_buffer_size = GetIntField(
+        jni, j_decoder_output_buffer, j_info_size_field_);
+    presentation_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
+    output_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_timestamp_ms_field_);
+    output_ntp_timestamps_ms = GetLongField(
+        jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
+
+    decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+                                  j_byte_buffer_decode_time_ms_field_);
+
+    if (output_buffer_size < width * height * 3 / 2) {
+      ALOGE << "Insufficient output buffer size: " << output_buffer_size;
+      return false;
+    }
+    jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
+        jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
+    jobject output_buffer =
+        jni->GetObjectArrayElement(output_buffers, output_buffer_index);
+    uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
+        output_buffer));
+    if (CheckException(jni)) {
+      return false;
+    }
+    payload += output_buffer_offset;
+
+    // Create yuv420 frame.
+    frame_buffer = decoded_frame_pool_.CreateBuffer(width, height);
+    if (color_format == COLOR_FormatYUV420Planar) {
+      RTC_CHECK_EQ(0, stride % 2);
+      RTC_CHECK_EQ(0, slice_height % 2);
+      const int uv_stride = stride / 2;
+      const int u_slice_height = slice_height / 2;
+      const uint8_t* y_ptr = payload;
+      const uint8_t* u_ptr = y_ptr + stride * slice_height;
+      const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height;
+      libyuv::I420Copy(y_ptr, stride,
+                       u_ptr, uv_stride,
+                       v_ptr, uv_stride,
+                       frame_buffer->MutableData(webrtc::kYPlane),
+                       frame_buffer->stride(webrtc::kYPlane),
+                       frame_buffer->MutableData(webrtc::kUPlane),
+                       frame_buffer->stride(webrtc::kUPlane),
+                       frame_buffer->MutableData(webrtc::kVPlane),
+                       frame_buffer->stride(webrtc::kVPlane),
+                       width, height);
+    } else {
+      // All other supported formats are nv12.
+      const uint8_t* y_ptr = payload;
+      const uint8_t* uv_ptr = y_ptr + stride * slice_height;
+      libyuv::NV12ToI420(
+          y_ptr, stride,
+          uv_ptr, stride,
+          frame_buffer->MutableData(webrtc::kYPlane),
+          frame_buffer->stride(webrtc::kYPlane),
+          frame_buffer->MutableData(webrtc::kUPlane),
+          frame_buffer->stride(webrtc::kUPlane),
+          frame_buffer->MutableData(webrtc::kVPlane),
+          frame_buffer->stride(webrtc::kVPlane),
+          width, height);
+    }
+    // Return output byte buffer back to codec.
+    jni->CallVoidMethod(
+        *j_media_codec_video_decoder_,
+        j_return_decoded_byte_buffer_method_,
+        output_buffer_index);
+    if (CheckException(jni)) {
+      ALOGE << "returnDecodedOutputBuffer error";
+      return false;
+    }
+  }
+  VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+  decoded_frame.set_timestamp(output_timestamps_ms);
+  decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
+
+  if (frames_decoded_ < frames_decoded_logged_) {
+    ALOGD << "Decoder frame out # " << frames_decoded_ <<
+        ". " << width << " x " << height <<
+        ". " << stride << " x " <<  slice_height <<
+        ". Color: " << color_format <<
+        ". TS: " << presentation_timestamps_ms <<
+        ". DecTime: " << (int)decode_time_ms <<
+        ". DelayTime: " << (int)frame_delayed_ms;
+  }
+
+  // Calculate and print decoding statistics - every 3 seconds.
+  frames_decoded_++;
+  current_frames_++;
+  current_decoding_time_ms_ += decode_time_ms;
+  current_delay_time_ms_ += frame_delayed_ms;
+  int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
+  if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+      current_frames_ > 0) {
+    int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+    int current_fps =
+        (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+    ALOGD << "Frames decoded: " << frames_decoded_ <<
+        ". Received: " <<  frames_received_ <<
+        ". Bitrate: " << current_bitrate << " kbps" <<
+        ". Fps: " << current_fps <<
+        ". DecTime: " << (current_decoding_time_ms_ / current_frames_) <<
+        ". DelayTime: " << (current_delay_time_ms_ / current_frames_) <<
+        " for last " << statistic_time_ms << " ms.";
+    start_time_ms_ = GetCurrentTimeMs();
+    current_frames_ = 0;
+    current_bytes_ = 0;
+    current_decoding_time_ms_ = 0;
+    current_delay_time_ms_ = 0;
+  }
+
+  // |.IsZeroSize())| returns true when a frame has been dropped.
+  if (!decoded_frame.IsZeroSize()) {
+    // Callback - output decoded frame.
+    const int32_t callback_status =
+        callback_->Decoded(decoded_frame, decode_time_ms);
+    if (callback_status > 0) {
+      ALOGE << "callback error";
+    }
+  }
+  return true;
+}
+
+int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
+    DecodedImageCallback* callback) {
+  callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  if (!inited_) {
+    return;
+  }
+  // We only ever send one message to |this| directly (not through a Bind()'d
+  // functor), so expect no ID/data.
+  RTC_CHECK(!msg->message_id) << "Unexpected message!";
+  RTC_CHECK(!msg->pdata) << "Unexpected message!";
+  CheckOnCodecThread();
+
+  if (!DeliverPendingOutputs(jni, 0)) {
+    ALOGE << "OnMessage: DeliverPendingOutputs error";
+    ProcessHWErrorOnCodecThread();
+    return;
+  }
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
+  ALOGD << "MediaCodecVideoDecoderFactory ctor";
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+  supported_codec_types_.clear();
+
+  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
+  if (CheckException(jni)) {
+    is_vp8_hw_supported = false;
+  }
+  if (is_vp8_hw_supported) {
+    ALOGD << "VP8 HW Decoder supported.";
+    supported_codec_types_.push_back(kVideoCodecVP8);
+  }
+
+  bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
+  if (CheckException(jni)) {
+    is_vp9_hw_supported = false;
+  }
+  if (is_vp9_hw_supported) {
+    ALOGD << "VP9 HW Decoder supported.";
+    supported_codec_types_.push_back(kVideoCodecVP9);
+  }
+
+  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
+  if (CheckException(jni)) {
+    is_h264_hw_supported = false;
+  }
+  if (is_h264_hw_supported) {
+    ALOGD << "H264 HW Decoder supported.";
+    supported_codec_types_.push_back(kVideoCodecH264);
+  }
+}
+
+MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
+  ALOGD << "MediaCodecVideoDecoderFactory dtor";
+}
+
+void MediaCodecVideoDecoderFactory::SetEGLContext(
+    JNIEnv* jni, jobject render_egl_context) {
+  ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
+  if (!egl_.CreateEglBase(jni, render_egl_context)) {
+    ALOGW << "Invalid EGL context - HW surface decoding is disabled.";
+  }
+}
+
+webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
+    VideoCodecType type) {
+  if (supported_codec_types_.empty()) {
+    ALOGW << "No HW video decoder for type " << (int)type;
+    return nullptr;
+  }
+  for (VideoCodecType codec_type : supported_codec_types_) {
+    if (codec_type == type) {
+      ALOGD << "Create HW video decoder for type " << (int)type;
+      return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type,
+                                        egl_.egl_base_context());
+    }
+  }
+  ALOGW << "Can not find HW video decoder for type " << (int)type;
+  return nullptr;
+}
+
+void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
+    webrtc::VideoDecoder* decoder) {
+  ALOGD << "Destroy video decoder.";
+  delete decoder;
+}
+
+const char* MediaCodecVideoDecoder::ImplementationName() const {
+  return "MediaCodec";
+}
+
+}  // namespace webrtc_jni
+
diff --git a/webrtc/api/java/jni/androidmediadecoder_jni.h b/webrtc/api/java/jni/androidmediadecoder_jni.h
new file mode 100644
index 0000000..c79490e
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediadecoder_jni.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based decoder factory.
+class MediaCodecVideoDecoderFactory
+    : public cricket::WebRtcVideoDecoderFactory {
+ public:
+  MediaCodecVideoDecoderFactory();
+  virtual ~MediaCodecVideoDecoderFactory();
+
+  void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+  // WebRtcVideoDecoderFactory implementation.
+  webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
+      override;
+
+  void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
+
+ private:
+  EglBase egl_;
+  std::vector<webrtc::VideoCodecType> supported_codec_types_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc
new file mode 100644
index 0000000..a06b026
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc
@@ -0,0 +1,1265 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// NOTICE: androidmediaencoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/java/jni/androidmediaencoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// H.264 start code length.
+#define H264_SC_LENGTH 4
+// Maximum allowed NALUs in one output frame.
+#define MAX_NALUS_PERFRAME 32
+// Maximum supported HW video encoder resolution.
+#define MAX_VIDEO_WIDTH 1280
+#define MAX_VIDEO_HEIGHT 1280
+// Maximum supported HW video encoder fps.
+#define MAX_VIDEO_FPS 30
+// Maximum allowed fps value in SetRates() call.
+#define MAX_ALLOWED_VIDEO_FPS 60
+// Maximum allowed frames in encoder input queue.
+#define MAX_ENCODER_Q_SIZE 2
+// Maximum allowed latency in ms.
+#define MAX_ENCODER_LATENCY_MS 70
+// Maximum amount of dropped frames caused by full encoder queue - exceeding
+// this threshold means that encoder probably got stuck and need to be reset.
+#define ENCODER_STALL_FRAMEDROP_THRESHOLD 60
+
+// Logging macros.
+#define TAG_ENCODER "MediaCodecVideoEncoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+  __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER)
+
+namespace {
+// Maximum time limit between incoming frames before requesting a key frame.
+const size_t kFrameDiffThresholdMs = 1100;
+const int kMinKeyFrameInterval = 2;
+}  // namespace
+
+// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
+// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
+// HW-backed video encode.  This C++ class is implemented as a very thin shim,
+// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
+// MediaCodecVideoEncoder is created, operated, and destroyed on a single
+// thread, currently the libjingle Worker thread.
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
+                               public rtc::MessageHandler {
+ public:
+  virtual ~MediaCodecVideoEncoder();
+  MediaCodecVideoEncoder(JNIEnv* jni,
+                         VideoCodecType codecType,
+                         jobject egl_context);
+
+  // webrtc::VideoEncoder implementation.  Everything trampolines to
+  // |codec_thread_| for execution.
+  int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+                     int32_t /* number_of_cores */,
+                     size_t /* max_payload_size */) override;
+  int32_t Encode(const webrtc::VideoFrame& input_image,
+                 const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+                 const std::vector<webrtc::FrameType>* frame_types) override;
+  int32_t RegisterEncodeCompleteCallback(
+      webrtc::EncodedImageCallback* callback) override;
+  int32_t Release() override;
+  int32_t SetChannelParameters(uint32_t /* packet_loss */,
+                               int64_t /* rtt */) override;
+  int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
+
+  // rtc::MessageHandler implementation.
+  void OnMessage(rtc::Message* msg) override;
+
+  void OnDroppedFrame() override;
+
+  int GetTargetFramerate() override;
+
+  bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
+  const char* ImplementationName() const override;
+
+ private:
+  // CHECK-fail if not running on |codec_thread_|.
+  void CheckOnCodecThread();
+
+ private:
+  // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+  // InitEncodeOnCodecThread() in an attempt to restore the codec to an
+  // operable state.  Necessary after all manner of OMX-layer errors.
+  bool ResetCodecOnCodecThread();
+
+  // Implementation of webrtc::VideoEncoder methods above, all running on the
+  // codec thread exclusively.
+  //
+  // If width==0 then this is assumed to be a re-initialization and the
+  // previously-current values are reused instead of the passed parameters
+  // (makes it easier to reason about thread-safety).
+  int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+      bool use_surface);
+  // Reconfigure to match |frame| in width, height. Also reconfigures the
+  // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+  // for byte buffer/texture. Returns false if reconfiguring fails.
+  bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
+  int32_t EncodeOnCodecThread(
+      const webrtc::VideoFrame& input_image,
+      const std::vector<webrtc::FrameType>* frame_types);
+  bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+  bool EncodeTextureOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame);
+
+  int32_t RegisterEncodeCompleteCallbackOnCodecThread(
+      webrtc::EncodedImageCallback* callback);
+  int32_t ReleaseOnCodecThread();
+  int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
+
+  // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
+  int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
+  jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
+  bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
+  jlong GetOutputBufferInfoPresentationTimestampUs(
+      JNIEnv* jni, jobject j_output_buffer_info);
+
+  // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+  // true on success.
+  bool DeliverPendingOutputs(JNIEnv* jni);
+
+  // Search for H.264 start codes.
+  int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
+
+  // Displays encoder statistics.
+  void LogStatistics(bool force_log);
+
+  // Type of video codec.
+  VideoCodecType codecType_;
+
+  // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
+  // |codec_thread_| synchronously.
+  webrtc::EncodedImageCallback* callback_;
+
+  // State that is constant for the lifetime of this object once the ctor
+  // returns.
+  scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
+  rtc::ThreadChecker codec_thread_checker_;
+  ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
+  ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
+  jmethodID j_init_encode_method_;
+  jmethodID j_get_input_buffers_method_;
+  jmethodID j_dequeue_input_buffer_method_;
+  jmethodID j_encode_buffer_method_;
+  jmethodID j_encode_texture_method_;
+  jmethodID j_release_method_;
+  jmethodID j_set_rates_method_;
+  jmethodID j_dequeue_output_buffer_method_;
+  jmethodID j_release_output_buffer_method_;
+  jfieldID j_color_format_field_;
+  jfieldID j_info_index_field_;
+  jfieldID j_info_buffer_field_;
+  jfieldID j_info_is_key_frame_field_;
+  jfieldID j_info_presentation_timestamp_us_field_;
+
+  // State that is valid only between InitEncode() and the next Release().
+  // Touched only on codec_thread_ so no explicit synchronization necessary.
+  int width_;   // Frame width in pixels.
+  int height_;  // Frame height in pixels.
+  bool inited_;
+  bool use_surface_;
+  uint16_t picture_id_;
+  enum libyuv::FourCC encoder_fourcc_;  // Encoder color space format.
+  int last_set_bitrate_kbps_;  // Last-requested bitrate in kbps.
+  int last_set_fps_;  // Last-requested frame rate.
+  int64_t current_timestamp_us_;  // Current frame timestamps in us.
+  int frames_received_;  // Number of frames received by encoder.
+  int frames_encoded_;  // Number of frames encoded by encoder.
+  int frames_dropped_media_encoder_;  // Number of frames dropped by encoder.
+  // Number of dropped frames caused by full queue.
+  int consecutive_full_queue_frame_drops_;
+  int frames_in_queue_;  // Number of frames in encoder queue.
+  int64_t stat_start_time_ms_;  // Start time for statistics.
+  int current_frames_;  // Number of frames in the current statistics interval.
+  int current_bytes_;  // Encoded bytes in the current statistics interval.
+  int current_acc_qp_; // Accumulated QP in the current statistics interval.
+  int current_encoding_time_ms_;  // Overall encoding time in the current second
+  int64_t last_input_timestamp_ms_;  // Timestamp of last received yuv frame.
+  int64_t last_output_timestamp_ms_;  // Timestamp of last encoded frame.
+  std::vector<int32_t> timestamps_;  // Video frames timestamp queue.
+  std::vector<int64_t> render_times_ms_;  // Video frames render time queue.
+  std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
+                                             // encoder input.
+  int32_t output_timestamp_;  // Last output frame timestamp from timestamps_ Q.
+  int64_t output_render_time_ms_; // Last output frame render time from
+                                  // render_times_ms_ queue.
+  // Frame size in bytes fed to MediaCodec.
+  int yuv_size_;
+  // True only when between a callback_->Encoded() call return a positive value
+  // and the next Encode() call being ignored.
+  bool drop_next_input_frame_;
+  // Global references; must be deleted in Release().
+  std::vector<jobject> input_buffers_;
+  webrtc::QualityScaler quality_scaler_;
+  // Dynamic resolution change, off by default.
+  bool scale_;
+
+  // H264 bitstream parser, used to extract QP from encoded bitstreams.
+  webrtc::H264BitstreamParser h264_bitstream_parser_;
+
+  // VP9 variables to populate codec specific structure.
+  webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
+                           // non-flexible VP9 mode.
+  uint8_t tl0_pic_idx_;
+  size_t gof_idx_;
+
+  // EGL context - owned by factory, should not be allocated/destroyed
+  // by MediaCodecVideoEncoder.
+  jobject egl_context_;
+
+  // Temporary fix for VP8.
+  // Sends a key frame if frames are largely spaced apart (possibly
+  // corresponding to a large image change).
+  int64_t last_frame_received_ms_;
+  int frames_received_since_last_key_;
+  webrtc::VideoCodecMode codec_mode_;
+};
+
+MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
+  // Call Release() to ensure no more callbacks to us after we are deleted.
+  Release();
+}
+
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(
+    JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
+    codecType_(codecType),
+    callback_(NULL),
+    codec_thread_(new Thread()),
+    j_media_codec_video_encoder_class_(
+        jni,
+        FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
+    j_media_codec_video_encoder_(
+        jni,
+        jni->NewObject(*j_media_codec_video_encoder_class_,
+                       GetMethodID(jni,
+                                   *j_media_codec_video_encoder_class_,
+                                   "<init>",
+                                   "()V"))),
+    inited_(false),
+    use_surface_(false),
+    picture_id_(0),
+    egl_context_(egl_context) {
+  ScopedLocalRefFrame local_ref_frame(jni);
+  // It would be nice to avoid spinning up a new thread per MediaCodec, and
+  // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
+  // 2732 means that deadlocks abound.  This class synchronously trampolines
+  // to |codec_thread_|, so if anything else can be coming to _us_ from
+  // |codec_thread_|, or from any thread holding the |_sendCritSect| described
+  // in the bug, we have a problem.  For now work around that with a dedicated
+  // thread.
+  codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
+  RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
+  codec_thread_checker_.DetachFromThread();
+  jclass j_output_buffer_info_class =
+      FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+  j_init_encode_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "initEncode",
+      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+      "IIIILorg/webrtc/EglBase14$Context;)Z");
+  j_get_input_buffers_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "getInputBuffers",
+      "()[Ljava/nio/ByteBuffer;");
+  j_dequeue_input_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
+  j_encode_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+  j_encode_texture_method_ = GetMethodID(
+        jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+        "(ZI[FJ)Z");
+  j_release_method_ =
+      GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
+  j_set_rates_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
+  j_dequeue_output_buffer_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "dequeueOutputBuffer",
+      "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+  j_release_output_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
+
+  j_color_format_field_ =
+      GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
+  j_info_index_field_ =
+      GetFieldID(jni, j_output_buffer_info_class, "index", "I");
+  j_info_buffer_field_ = GetFieldID(
+      jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
+  j_info_is_key_frame_field_ =
+      GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
+  j_info_presentation_timestamp_us_field_ = GetFieldID(
+      jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
+  CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+  srand(time(NULL));
+  AllowBlockingCalls();
+}
+
+int32_t MediaCodecVideoEncoder::InitEncode(
+    const webrtc::VideoCodec* codec_settings,
+    int32_t /* number_of_cores */,
+    size_t /* max_payload_size */) {
+  const int kMinWidth = 320;
+  const int kMinHeight = 180;
+  const int kLowQpThresholdDenominator = 3;
+  if (codec_settings == NULL) {
+    ALOGE << "NULL VideoCodec instance";
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // Factory should guard against other codecs being used with us.
+  RTC_CHECK(codec_settings->codecType == codecType_)
+      << "Unsupported codec " << codec_settings->codecType << " for "
+      << codecType_;
+
+  ALOGD << "InitEncode request";
+  codec_mode_ = codec_settings->mode;
+  scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName(
+        "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled");
+  ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
+  if (scale_) {
+    if (codecType_ == kVideoCodecVP8) {
+      // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
+      // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is
+      // always = 127. Note that in SW, QP is that of the user-level range [0,
+      // 63].
+      const int kMaxQp = 127;
+      const int kBadQpThreshold = 95;
+      quality_scaler_.Init(
+          kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
+    } else if (codecType_ == kVideoCodecH264) {
+      // H264 QP is in the range [0, 51].
+      const int kMaxQp = 51;
+      const int kBadQpThreshold = 40;
+      quality_scaler_.Init(
+          kMaxQp / kLowQpThresholdDenominator, kBadQpThreshold, false);
+    } else {
+      // When adding codec support to additional hardware codecs, also configure
+      // their QP thresholds for scaling.
+      RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
+    }
+    quality_scaler_.SetMinResolution(kMinWidth, kMinHeight);
+    quality_scaler_.ReportFramerate(codec_settings->maxFramerate);
+  }
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
+           this,
+           codec_settings->width,
+           codec_settings->height,
+           codec_settings->startBitrate,
+           codec_settings->maxFramerate,
+           false /* use_surface */));
+}
+
+int32_t MediaCodecVideoEncoder::Encode(
+    const webrtc::VideoFrame& frame,
+    const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+    const std::vector<webrtc::FrameType>* frame_types) {
+  return codec_thread_->Invoke<int32_t>(Bind(
+      &MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
+    webrtc::EncodedImageCallback* callback) {
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
+           this,
+           callback));
+}
+
+int32_t MediaCodecVideoEncoder::Release() {
+  ALOGD << "EncoderRelease request";
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
+                                                     int64_t /* rtt */) {
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
+                                         uint32_t frame_rate) {
+  return codec_thread_->Invoke<int32_t>(
+      Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
+           this,
+           new_bit_rate,
+           frame_rate));
+}
+
+void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  // We only ever send one message to |this| directly (not through a Bind()'d
+  // functor), so expect no ID/data.
+  RTC_CHECK(!msg->message_id) << "Unexpected message!";
+  RTC_CHECK(!msg->pdata) << "Unexpected message!";
+  if (!inited_) {
+    return;
+  }
+
+  // It would be nice to recover from a failure here if one happened, but it's
+  // unclear how to signal such a failure to the app, so instead we stay silent
+  // about it and let the next app-called API method reveal the borkedness.
+  DeliverPendingOutputs(jni);
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+}
+
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  ALOGE << "ResetOnCodecThread";
+  if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+      InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+          WEBRTC_VIDEO_CODEC_OK) {
+    // TODO(fischman): wouldn't it be nice if there was a way to gracefully
+    // degrade to a SW encoder at this point?  There isn't one AFAICT :(
+    // https://code.google.com/p/webrtc/issues/detail?id=2920
+    return false;
+  }
+  return true;
+}
+
+int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
+    int width, int height, int kbps, int fps, bool use_surface) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  ALOGD << "InitEncodeOnCodecThread Type: " <<  (int)codecType_ << ", " <<
+      width << " x " << height << ". Bitrate: " << kbps <<
+      " kbps. Fps: " << fps;
+  if (kbps == 0) {
+    kbps = last_set_bitrate_kbps_;
+  }
+  if (fps == 0) {
+    fps = MAX_VIDEO_FPS;
+  }
+
+  width_ = width;
+  height_ = height;
+  last_set_bitrate_kbps_ = kbps;
+  last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS;
+  yuv_size_ = width_ * height_ * 3 / 2;
+  frames_received_ = 0;
+  frames_encoded_ = 0;
+  frames_dropped_media_encoder_ = 0;
+  consecutive_full_queue_frame_drops_ = 0;
+  frames_in_queue_ = 0;
+  current_timestamp_us_ = 0;
+  stat_start_time_ms_ = GetCurrentTimeMs();
+  current_frames_ = 0;
+  current_bytes_ = 0;
+  current_acc_qp_ = 0;
+  current_encoding_time_ms_ = 0;
+  last_input_timestamp_ms_ = -1;
+  last_output_timestamp_ms_ = -1;
+  output_timestamp_ = 0;
+  output_render_time_ms_ = 0;
+  timestamps_.clear();
+  render_times_ms_.clear();
+  frame_rtc_times_ms_.clear();
+  drop_next_input_frame_ = false;
+  use_surface_ = use_surface;
+  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+  gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
+  tl0_pic_idx_ = static_cast<uint8_t>(rand());
+  gof_idx_ = 0;
+  last_frame_received_ms_ = -1;
+  frames_received_since_last_key_ = kMinKeyFrameInterval;
+
+  // We enforce no extra stride/padding in the format creation step.
+  jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+      jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+  const bool encode_status = jni->CallBooleanMethod(
+      *j_media_codec_video_encoder_, j_init_encode_method_,
+      j_video_codec_enum, width, height, kbps, fps,
+      (use_surface ? egl_context_ : nullptr));
+  if (!encode_status) {
+    ALOGE << "Failed to configure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  CHECK_EXCEPTION(jni);
+
+  if (!use_surface) {
+    jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+        jni->CallObjectMethod(*j_media_codec_video_encoder_,
+            j_get_input_buffers_method_));
+    CHECK_EXCEPTION(jni);
+    if (IsNull(jni, input_buffers)) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    switch (GetIntField(jni, *j_media_codec_video_encoder_,
+        j_color_format_field_)) {
+      case COLOR_FormatYUV420Planar:
+        encoder_fourcc_ = libyuv::FOURCC_YU12;
+        break;
+      case COLOR_FormatYUV420SemiPlanar:
+      case COLOR_QCOM_FormatYUV420SemiPlanar:
+      case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+        encoder_fourcc_ = libyuv::FOURCC_NV12;
+        break;
+      default:
+        LOG(LS_ERROR) << "Wrong color format.";
+        return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+    RTC_CHECK(input_buffers_.empty())
+        << "Unexpected double InitEncode without Release";
+    input_buffers_.resize(num_input_buffers);
+    for (size_t i = 0; i < num_input_buffers; ++i) {
+      input_buffers_[i] =
+          jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+      int64_t yuv_buffer_capacity =
+          jni->GetDirectBufferCapacity(input_buffers_[i]);
+      CHECK_EXCEPTION(jni);
+      RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+    }
+  }
+
+  inited_ = true;
+  codec_thread_->PostDelayed(kMediaCodecPollMs, this);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
+    const webrtc::VideoFrame& frame,
+    const std::vector<webrtc::FrameType>* frame_types) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  bool send_key_frame = false;
+  if (codec_mode_ == webrtc::kRealtimeVideo) {
+    ++frames_received_since_last_key_;
+    int64_t now_ms = GetCurrentTimeMs();
+    if (last_frame_received_ms_ != -1 &&
+        (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
+      // Add limit to prevent triggering a key for every frame for very low
+      // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
+      if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
+        ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
+        send_key_frame = true;
+      }
+      frames_received_since_last_key_ = 0;
+    }
+    last_frame_received_ms_ = now_ms;
+  }
+
+  frames_received_++;
+  if (!DeliverPendingOutputs(jni)) {
+    if (!ResetCodecOnCodecThread())
+      return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  if (frames_encoded_ < kMaxEncodedLogFrames) {
+    ALOGD << "Encoder frame in # " << (frames_received_ - 1) <<
+        ". TS: " << (int)(current_timestamp_us_ / 1000) <<
+        ". Q: " << frames_in_queue_ <<
+        ". Fps: " << last_set_fps_ <<
+        ". Kbps: " << last_set_bitrate_kbps_;
+  }
+
+  if (drop_next_input_frame_) {
+    ALOGW << "Encoder drop frame - failed callback.";
+    drop_next_input_frame_ = false;
+    current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+    frames_dropped_media_encoder_++;
+    OnDroppedFrame();
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+
+  RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
+
+  // Check if we accumulated too many frames in encoder input buffers
+  // or the encoder latency exceeds 70 ms and drop frame if so.
+  if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
+    int encoder_latency_ms = last_input_timestamp_ms_ -
+        last_output_timestamp_ms_;
+    if (frames_in_queue_ > MAX_ENCODER_Q_SIZE ||
+        encoder_latency_ms > MAX_ENCODER_LATENCY_MS) {
+      ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
+          " ms. Q size: " << frames_in_queue_ << ". TS: " <<
+          (int)(current_timestamp_us_ / 1000) <<  ". Fps: " << last_set_fps_ <<
+          ". Consecutive drops: " << consecutive_full_queue_frame_drops_ ;
+      current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+      consecutive_full_queue_frame_drops_++;
+      if (consecutive_full_queue_frame_drops_ >=
+          ENCODER_STALL_FRAMEDROP_THRESHOLD) {
+        ALOGE << "Encoder got stuck. Reset.";
+        ResetCodecOnCodecThread();
+        return WEBRTC_VIDEO_CODEC_ERROR;
+      }
+      frames_dropped_media_encoder_++;
+      OnDroppedFrame();
+      return WEBRTC_VIDEO_CODEC_OK;
+    }
+  }
+  consecutive_full_queue_frame_drops_ = 0;
+
+  VideoFrame input_frame = frame;
+  if (scale_) {
+    // Check framerate before spatial resolution change.
+    quality_scaler_.OnEncodeFrame(frame);
+    const webrtc::QualityScaler::Resolution scaled_resolution =
+        quality_scaler_.GetScaledResolution();
+    if (scaled_resolution.width != frame.width() ||
+        scaled_resolution.height != frame.height()) {
+      if (frame.native_handle() != nullptr) {
+        rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+            static_cast<AndroidTextureBuffer*>(
+                frame.video_frame_buffer().get())->ScaleAndRotate(
+                    scaled_resolution.width,
+                    scaled_resolution.height,
+                    webrtc::kVideoRotation_0));
+        input_frame.set_video_frame_buffer(scaled_buffer);
+      } else {
+        input_frame = quality_scaler_.GetScaledFrame(frame);
+      }
+    }
+  }
+
+  if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+    ALOGE << "Failed to reconfigure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  // Save time when input frame is sent to the encoder input.
+  frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
+
+  const bool key_frame =
+      frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
+  bool encode_status = true;
+  if (!input_frame.native_handle()) {
+    int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+        j_dequeue_input_buffer_method_);
+    CHECK_EXCEPTION(jni);
+    if (j_input_buffer_index == -1) {
+      // Video codec falls behind - no input buffer available.
+      ALOGW << "Encoder drop frame - no input buffers available";
+      frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+      current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+      frames_dropped_media_encoder_++;
+      OnDroppedFrame();
+      return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
+    }
+    if (j_input_buffer_index == -2) {
+      ResetCodecOnCodecThread();
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+        j_input_buffer_index);
+  } else {
+    encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
+  }
+
+  if (!encode_status) {
+    ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  last_input_timestamp_ms_ =
+      current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+  frames_in_queue_++;
+
+  // Save input image timestamps for later output
+  timestamps_.push_back(input_frame.timestamp());
+  render_times_ms_.push_back(input_frame.render_time_ms());
+  current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+  if (!DeliverPendingOutputs(jni)) {
+    ALOGE << "Failed deliver pending outputs.";
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+    const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+  const bool is_texture_frame = frame.native_handle() != nullptr;
+  const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
+  const bool reconfigure_due_to_size =
+      frame.width() != width_ || frame.height() != height_;
+
+  if (reconfigure_due_to_format) {
+      ALOGD << "Reconfigure encoder due to format change. "
+            << (use_surface_ ?
+                "Reconfiguring to encode from byte buffer." :
+                "Reconfiguring to encode from texture.");
+      LogStatistics(true);
+  }
+  if (reconfigure_due_to_size) {
+    ALOGW << "Reconfigure encoder due to frame resolution change from "
+        << width_ << " x " << height_ << " to " << frame.width() << " x "
+        << frame.height();
+    LogStatistics(true);
+    width_ = frame.width();
+    height_ = frame.height();
+  }
+
+  if (!reconfigure_due_to_format && !reconfigure_due_to_size)
+    return true;
+
+  ReleaseOnCodecThread();
+
+  return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
+      WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!use_surface_);
+
+  jobject j_input_buffer = input_buffers_[input_buffer_index];
+  uint8_t* yuv_buffer =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+  CHECK_EXCEPTION(jni);
+  RTC_CHECK(yuv_buffer) << "Indirect buffer??";
+  RTC_CHECK(!libyuv::ConvertFromI420(
+      frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+      frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+      frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
+      yuv_buffer, width_, width_, height_, encoder_fourcc_))
+      << "ConvertFromI420 failed";
+
+  bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                              j_encode_buffer_method_,
+                                              key_frame,
+                                              input_buffer_index,
+                                              yuv_size_,
+                                              current_timestamp_us_);
+  CHECK_EXCEPTION(jni);
+  return encode_status;
+}
+
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(use_surface_);
+  NativeHandleImpl* handle =
+      static_cast<NativeHandleImpl*>(frame.native_handle());
+  jfloatArray sampling_matrix = jni->NewFloatArray(16);
+  jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+
+  bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                              j_encode_texture_method_,
+                                              key_frame,
+                                              handle->oes_texture_id,
+                                              sampling_matrix,
+                                              current_timestamp_us_);
+  CHECK_EXCEPTION(jni);
+  return encode_status;
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
+    webrtc::EncodedImageCallback* callback) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
+      frames_received_ << ". Encoded: " << frames_encoded_ <<
+      ". Dropped: " << frames_dropped_media_encoder_;
+  ScopedLocalRefFrame local_ref_frame(jni);
+  for (size_t i = 0; i < input_buffers_.size(); ++i)
+    jni->DeleteGlobalRef(input_buffers_[i]);
+  input_buffers_.clear();
+  jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
+  CHECK_EXCEPTION(jni);
+  rtc::MessageQueueManager::Clear(this);
+  inited_ = false;
+  use_surface_ = false;
+  ALOGD << "EncoderReleaseOnCodecThread done.";
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
+                                                      uint32_t frame_rate) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
+      frame_rate : MAX_ALLOWED_VIDEO_FPS;
+  if (last_set_bitrate_kbps_ == new_bit_rate &&
+      last_set_fps_ == frame_rate) {
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  if (scale_) {
+    quality_scaler_.ReportFramerate(frame_rate);
+  }
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  if (new_bit_rate > 0) {
+    last_set_bitrate_kbps_ = new_bit_rate;
+  }
+  if (frame_rate > 0) {
+    last_set_fps_ = frame_rate;
+  }
+  bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                       j_set_rates_method_,
+                                       last_set_bitrate_kbps_,
+                                       last_set_fps_);
+  CHECK_EXCEPTION(jni);
+  if (!ret) {
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
+}
+
+jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
+}
+
+bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
+}
+
+jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
+    JNIEnv* jni,
+    jobject j_output_buffer_info) {
+  return GetLongField(
+      jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
+}
+
+bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  while (true) {
+    jobject j_output_buffer_info = jni->CallObjectMethod(
+        *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
+    CHECK_EXCEPTION(jni);
+    if (IsNull(jni, j_output_buffer_info)) {
+      break;
+    }
+
+    int output_buffer_index =
+        GetOutputBufferInfoIndex(jni, j_output_buffer_info);
+    if (output_buffer_index == -1) {
+      ResetCodecOnCodecThread();
+      return false;
+    }
+
+    // Get key and config frame flags.
+    jobject j_output_buffer =
+        GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+    bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+
+    // Get frame timestamps from a queue - for non config frames only.
+    int64_t frame_encoding_time_ms = 0;
+    last_output_timestamp_ms_ =
+        GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+        1000;
+    if (frames_in_queue_ > 0) {
+      output_timestamp_ = timestamps_.front();
+      timestamps_.erase(timestamps_.begin());
+      output_render_time_ms_ = render_times_ms_.front();
+      render_times_ms_.erase(render_times_ms_.begin());
+      frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+      frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+      frames_in_queue_--;
+    }
+
+    // Extract payload.
+    size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+    uint8_t* payload = reinterpret_cast<uint8_t*>(
+        jni->GetDirectBufferAddress(j_output_buffer));
+    CHECK_EXCEPTION(jni);
+
+    if (frames_encoded_ < kMaxEncodedLogFrames) {
+      int current_latency =
+          (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
+      ALOGD << "Encoder frame out # " << frames_encoded_ <<
+          ". Key: " << key_frame <<
+          ". Size: " << payload_size <<
+          ". TS: " << (int)last_output_timestamp_ms_ <<
+          ". Latency: " << current_latency <<
+          ". EncTime: " << frame_encoding_time_ms;
+    }
+
+    // Callback - return encoded frame.
+    int32_t callback_status = 0;
+    if (callback_) {
+      scoped_ptr<webrtc::EncodedImage> image(
+          new webrtc::EncodedImage(payload, payload_size, payload_size));
+      image->_encodedWidth = width_;
+      image->_encodedHeight = height_;
+      image->_timeStamp = output_timestamp_;
+      image->capture_time_ms_ = output_render_time_ms_;
+      image->_frameType =
+          (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
+      image->_completeFrame = true;
+      image->adapt_reason_.quality_resolution_downscales =
+          scale_ ? quality_scaler_.downscale_shift() : -1;
+
+      webrtc::CodecSpecificInfo info;
+      memset(&info, 0, sizeof(info));
+      info.codecType = codecType_;
+      if (codecType_ == kVideoCodecVP8) {
+        info.codecSpecific.VP8.pictureId = picture_id_;
+        info.codecSpecific.VP8.nonReference = false;
+        info.codecSpecific.VP8.simulcastIdx = 0;
+        info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
+        info.codecSpecific.VP8.layerSync = false;
+        info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+        info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+      } else if (codecType_ == kVideoCodecVP9) {
+        if (key_frame) {
+          gof_idx_ = 0;
+        }
+        info.codecSpecific.VP9.picture_id = picture_id_;
+        info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+        info.codecSpecific.VP9.flexible_mode = false;
+        info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+        info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
+        info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
+        info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
+        info.codecSpecific.VP9.temporal_up_switch = true;
+        info.codecSpecific.VP9.inter_layer_predicted = false;
+        info.codecSpecific.VP9.gof_idx =
+            static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+        info.codecSpecific.VP9.num_spatial_layers = 1;
+        info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+        if (info.codecSpecific.VP9.ss_data_available) {
+          info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+          info.codecSpecific.VP9.width[0] = width_;
+          info.codecSpecific.VP9.height[0] = height_;
+          info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+        }
+      }
+      picture_id_ = (picture_id_ + 1) & 0x7FFF;
+
+      // Generate a header describing a single fragment.
+      webrtc::RTPFragmentationHeader header;
+      memset(&header, 0, sizeof(header));
+      if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
+        header.VerifyAndAllocateFragmentationHeader(1);
+        header.fragmentationOffset[0] = 0;
+        header.fragmentationLength[0] = image->_length;
+        header.fragmentationPlType[0] = 0;
+        header.fragmentationTimeDiff[0] = 0;
+        if (codecType_ == kVideoCodecVP8 && scale_) {
+          int qp;
+          if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
+            current_acc_qp_ += qp;
+            quality_scaler_.ReportQP(qp);
+          }
+        }
+      } else if (codecType_ == kVideoCodecH264) {
+        if (scale_) {
+          h264_bitstream_parser_.ParseBitstream(payload, payload_size);
+          int qp;
+          if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
+            current_acc_qp_ += qp;
+            quality_scaler_.ReportQP(qp);
+          }
+        }
+        // For H.264 search for start codes.
+        int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
+        int32_t scPositionsLength = 0;
+        int32_t scPosition = 0;
+        while (scPositionsLength < MAX_NALUS_PERFRAME) {
+          int32_t naluPosition = NextNaluPosition(
+              payload + scPosition, payload_size - scPosition);
+          if (naluPosition < 0) {
+            break;
+          }
+          scPosition += naluPosition;
+          scPositions[scPositionsLength++] = scPosition;
+          scPosition += H264_SC_LENGTH;
+        }
+        if (scPositionsLength == 0) {
+          ALOGE << "Start code is not found!";
+          ALOGE << "Data:" <<  image->_buffer[0] << " " << image->_buffer[1]
+              << " " << image->_buffer[2] << " " << image->_buffer[3]
+              << " " << image->_buffer[4] << " " << image->_buffer[5];
+          ResetCodecOnCodecThread();
+          return false;
+        }
+        scPositions[scPositionsLength] = payload_size;
+        header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
+        for (size_t i = 0; i < scPositionsLength; i++) {
+          header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
+          header.fragmentationLength[i] =
+              scPositions[i + 1] - header.fragmentationOffset[i];
+          header.fragmentationPlType[i] = 0;
+          header.fragmentationTimeDiff[i] = 0;
+        }
+      }
+
+      callback_status = callback_->Encoded(*image, &info, &header);
+    }
+
+    // Return output buffer back to the encoder.
+    bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                          j_release_output_buffer_method_,
+                                          output_buffer_index);
+    CHECK_EXCEPTION(jni);
+    if (!success) {
+      ResetCodecOnCodecThread();
+      return false;
+    }
+
+    // Calculate and print encoding statistics - every 3 seconds.
+    frames_encoded_++;
+    current_frames_++;
+    current_bytes_ += payload_size;
+    current_encoding_time_ms_ += frame_encoding_time_ms;
+    LogStatistics(false);
+
+    if (callback_status > 0) {
+      drop_next_input_frame_ = true;
+      // Theoretically could handle callback_status<0 here, but unclear what
+      // that would mean for us.
+    }
+  }
+  return true;
+}
+
+void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
+  int statistic_time_ms = GetCurrentTimeMs() - stat_start_time_ms_;
+  if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log) &&
+      current_frames_ > 0 && statistic_time_ms > 0) {
+    int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+    int current_fps =
+        (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+    ALOGD << "Encoded frames: " << frames_encoded_ <<
+        ". Bitrate: " << current_bitrate <<
+        ", target: " << last_set_bitrate_kbps_ << " kbps" <<
+        ", fps: " << current_fps <<
+        ", encTime: " << (current_encoding_time_ms_ / current_frames_) <<
+        ". QP: " << (current_acc_qp_ / current_frames_) <<
+        " for last " << statistic_time_ms << " ms.";
+    stat_start_time_ms_ = GetCurrentTimeMs();
+    current_frames_ = 0;
+    current_bytes_ = 0;
+    current_acc_qp_ = 0;
+    current_encoding_time_ms_ = 0;
+  }
+}
+
+int32_t MediaCodecVideoEncoder::NextNaluPosition(
+    uint8_t *buffer, size_t buffer_size) {
+  if (buffer_size < H264_SC_LENGTH) {
+    return -1;
+  }
+  uint8_t *head = buffer;
+  // Set end buffer pointer to 4 bytes before actual buffer end so we can
+  // access head[1], head[2] and head[3] in a loop without buffer overrun.
+  uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
+
+  while (head < end) {
+    if (head[0]) {
+      head++;
+      continue;
+    }
+    if (head[1]) { // got 00xx
+      head += 2;
+      continue;
+    }
+    if (head[2]) { // got 0000xx
+      head += 3;
+      continue;
+    }
+    if (head[3] != 0x01) { // got 000000xx
+      head++; // xx != 1, continue searching.
+      continue;
+    }
+    return (int32_t)(head - buffer);
+  }
+  return -1;
+}
+
+void MediaCodecVideoEncoder::OnDroppedFrame() {
+  // Report dropped frame to quality_scaler_.
+  if (scale_)
+    quality_scaler_.ReportDroppedFrame();
+}
+
+int MediaCodecVideoEncoder::GetTargetFramerate() {
+  return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
+}
+
+const char* MediaCodecVideoEncoder::ImplementationName() const {
+  return "MediaCodec";
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+  supported_codecs_.clear();
+
+  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_vp8_hw_supported) {
+    ALOGD << "VP8 HW Encoder supported.";
+    supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+
+  bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_vp9_hw_supported) {
+    ALOGD << "VP9 HW Encoder supported.";
+    supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+
+  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_h264_hw_supported) {
+    ALOGD << "H.264 HW Encoder supported.";
+    supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+}
+
+MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
+  ALOGD << "MediaCodecVideoEncoderFactory dtor";
+}
+
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+    JNIEnv* jni, jobject render_egl_context) {
+  ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+  if (!egl_base_.CreateEglBase(jni, render_egl_context)) {
+    ALOGW << "Invalid EGL context - HW surface encoding is disabled.";
+  }
+}
+
+webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
+    VideoCodecType type) {
+  if (supported_codecs_.empty()) {
+    ALOGW << "No HW video encoder for type " << (int)type;
+    return nullptr;
+  }
+  for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
+         it != supported_codecs_.end(); ++it) {
+    if (it->type == type) {
+      ALOGD << "Create HW video encoder for type " << (int)type <<
+          " (" << it->name << ").";
+      return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+          egl_base_.egl_base_context());
+    }
+  }
+  ALOGW << "Can not find HW video encoder for type " << (int)type;
+  return nullptr;
+}
+
+const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
+MediaCodecVideoEncoderFactory::codecs() const {
+  return supported_codecs_;
+}
+
+void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
+    webrtc::VideoEncoder* encoder) {
+  ALOGD << "Destroy video encoder.";
+  delete encoder;
+}
+
+}  // namespace webrtc_jni
+
diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.h b/webrtc/api/java/jni/androidmediaencoder_jni.h
new file mode 100644
index 0000000..e96a489
--- /dev/null
+++ b/webrtc/api/java/jni/androidmediaencoder_jni.h
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+
+#include <vector>
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based encoder factory.
+class MediaCodecVideoEncoderFactory
+    : public cricket::WebRtcVideoEncoderFactory {
+ public:
+  MediaCodecVideoEncoderFactory();
+  virtual ~MediaCodecVideoEncoderFactory();
+
+  void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+  // WebRtcVideoEncoderFactory implementation.
+  webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
+      override;
+  const std::vector<VideoCodec>& codecs() const override;
+  void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
+
+ private:
+  EglBase egl_base_;
+
+  // Empty if platform support is lacking, const after ctor returns.
+  std::vector<VideoCodec> supported_codecs_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
diff --git a/webrtc/api/java/jni/androidnetworkmonitor_jni.cc b/webrtc/api/java/jni/androidnetworkmonitor_jni.cc
new file mode 100644
index 0000000..a38fa11
--- /dev/null
+++ b/webrtc/api/java/jni/androidnetworkmonitor_jni.cc
@@ -0,0 +1,384 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/java/jni/androidnetworkmonitor_jni.h"
+
+#include <dlfcn.h>
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/ipaddress.h"
+
+namespace webrtc_jni {
+
+jobject AndroidNetworkMonitor::application_context_ = nullptr;
+
+static NetworkType GetNetworkTypeFromJava(JNIEnv* jni, jobject j_network_type) {
+  std::string enum_name =
+      GetJavaEnumName(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType",
+                      j_network_type);
+  if (enum_name == "CONNECTION_UNKNOWN") {
+    return NetworkType::NETWORK_UNKNOWN;
+  }
+  if (enum_name == "CONNECTION_ETHERNET") {
+    return NetworkType::NETWORK_ETHERNET;
+  }
+  if (enum_name == "CONNECTION_WIFI") {
+    return NetworkType::NETWORK_WIFI;
+  }
+  if (enum_name == "CONNECTION_4G") {
+    return NetworkType::NETWORK_4G;
+  }
+  if (enum_name == "CONNECTION_3G") {
+    return NetworkType::NETWORK_3G;
+  }
+  if (enum_name == "CONNECTION_2G") {
+    return NetworkType::NETWORK_2G;
+  }
+  if (enum_name == "CONNECTION_BLUETOOTH") {
+    return NetworkType::NETWORK_BLUETOOTH;
+  }
+  if (enum_name == "CONNECTION_NONE") {
+    return NetworkType::NETWORK_NONE;
+  }
+  ASSERT(false);
+  return NetworkType::NETWORK_UNKNOWN;
+}
+
+static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
+  switch (network_type) {
+    case NETWORK_UNKNOWN:
+      RTC_DCHECK(false) << "Unknown network type";
+      return rtc::ADAPTER_TYPE_UNKNOWN;
+    case NETWORK_ETHERNET:
+      return rtc::ADAPTER_TYPE_ETHERNET;
+    case NETWORK_WIFI:
+      return rtc::ADAPTER_TYPE_WIFI;
+    case NETWORK_4G:
+    case NETWORK_3G:
+    case NETWORK_2G:
+      return rtc::ADAPTER_TYPE_CELLULAR;
+    case NETWORK_BLUETOOTH:
+      // There is no corresponding mapping for bluetooth networks.
+      // Map it to VPN for now.
+      return rtc::ADAPTER_TYPE_VPN;
+    default:
+      RTC_DCHECK(false) << "Invalid network type " << network_type;
+      return rtc::ADAPTER_TYPE_UNKNOWN;
+  }
+}
+
+static rtc::IPAddress GetIPAddressFromJava(JNIEnv* jni, jobject j_ip_address) {
+  jclass j_ip_address_class = GetObjectClass(jni, j_ip_address);
+  jfieldID j_address_id = GetFieldID(jni, j_ip_address_class, "address", "[B");
+  jbyteArray j_addresses =
+      static_cast<jbyteArray>(GetObjectField(jni, j_ip_address, j_address_id));
+  size_t address_length = jni->GetArrayLength(j_addresses);
+  jbyte* addr_array = jni->GetByteArrayElements(j_addresses, nullptr);
+  CHECK_EXCEPTION(jni) << "Error during GetIPAddressFromJava";
+  if (address_length == 4) {
+    // IP4
+    struct in_addr ip4_addr;
+    memcpy(&ip4_addr.s_addr, addr_array, 4);
+    jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+    return rtc::IPAddress(ip4_addr);
+  }
+  // IP6
+  RTC_CHECK(address_length == 16);
+  struct in6_addr ip6_addr;
+  memcpy(ip6_addr.s6_addr, addr_array, address_length);
+  jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+  return rtc::IPAddress(ip6_addr);
+}
+
+static void GetIPAddressesFromJava(JNIEnv* jni,
+                                   jobjectArray j_ip_addresses,
+                                   std::vector<rtc::IPAddress>* ip_addresses) {
+  ip_addresses->clear();
+  size_t num_addresses = jni->GetArrayLength(j_ip_addresses);
+  CHECK_EXCEPTION(jni) << "Error during GetArrayLength";
+  for (size_t i = 0; i < num_addresses; ++i) {
+    jobject j_ip_address = jni->GetObjectArrayElement(j_ip_addresses, i);
+    CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+    rtc::IPAddress ip = GetIPAddressFromJava(jni, j_ip_address);
+    ip_addresses->push_back(ip);
+  }
+}
+
+static NetworkInformation GetNetworkInformationFromJava(
+    JNIEnv* jni,
+    jobject j_network_info) {
+  jclass j_network_info_class = GetObjectClass(jni, j_network_info);
+  jfieldID j_interface_name_id =
+      GetFieldID(jni, j_network_info_class, "name", "Ljava/lang/String;");
+  jfieldID j_handle_id = GetFieldID(jni, j_network_info_class, "handle", "I");
+  jfieldID j_type_id =
+      GetFieldID(jni, j_network_info_class, "type",
+                 "Lorg/webrtc/NetworkMonitorAutoDetect$ConnectionType;");
+  jfieldID j_ip_addresses_id =
+      GetFieldID(jni, j_network_info_class, "ipAddresses",
+                 "[Lorg/webrtc/NetworkMonitorAutoDetect$IPAddress;");
+
+  NetworkInformation network_info;
+  network_info.interface_name = JavaToStdString(
+      jni, GetStringField(jni, j_network_info, j_interface_name_id));
+  network_info.handle =
+      static_cast<NetworkHandle>(GetIntField(jni, j_network_info, j_handle_id));
+  network_info.type = GetNetworkTypeFromJava(
+      jni, GetObjectField(jni, j_network_info, j_type_id));
+  jobjectArray j_ip_addresses = static_cast<jobjectArray>(
+      GetObjectField(jni, j_network_info, j_ip_addresses_id));
+  GetIPAddressesFromJava(jni, j_ip_addresses, &network_info.ip_addresses);
+  return network_info;
+}
+
+std::string NetworkInformation::ToString() const {
+  std::stringstream ss;
+  ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
+     << type << "; address";
+  for (const rtc::IPAddress address : ip_addresses) {
+    ss << " " << address.ToString();
+  }
+  ss << "]";
+  return ss.str();
+}
+
+// static
+void AndroidNetworkMonitor::SetAndroidContext(JNIEnv* jni, jobject context) {
+  if (application_context_) {
+    jni->DeleteGlobalRef(application_context_);
+  }
+  application_context_ = NewGlobalRef(jni, context);
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor()
+    : j_network_monitor_class_(jni(),
+                               FindClass(jni(), "org/webrtc/NetworkMonitor")),
+      j_network_monitor_(
+          jni(),
+          jni()->CallStaticObjectMethod(
+              *j_network_monitor_class_,
+              GetStaticMethodID(
+                  jni(),
+                  *j_network_monitor_class_,
+                  "init",
+                  "(Landroid/content/Context;)Lorg/webrtc/NetworkMonitor;"),
+              application_context_)) {
+  ASSERT(application_context_ != nullptr);
+  CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.init";
+}
+
+void AndroidNetworkMonitor::Start() {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  if (started_) {
+    return;
+  }
+  started_ = true;
+
+  // This is kind of magic behavior, but doing this allows the SocketServer to
+  // use this as a NetworkBinder to bind sockets on a particular network when
+  // it creates sockets.
+  worker_thread()->socketserver()->set_network_binder(this);
+
+  jmethodID m =
+      GetMethodID(jni(), *j_network_monitor_class_, "startMonitoring", "(J)V");
+  jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+  CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
+}
+
+void AndroidNetworkMonitor::Stop() {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  if (!started_) {
+    return;
+  }
+  started_ = false;
+
+  // Once the network monitor stops, it will clear all network information and
+  // it won't find the network handle to bind anyway.
+  if (worker_thread()->socketserver()->network_binder() == this) {
+    worker_thread()->socketserver()->set_network_binder(nullptr);
+  }
+
+  jmethodID m =
+      GetMethodID(jni(), *j_network_monitor_class_, "stopMonitoring", "(J)V");
+  jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+  CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.stopMonitoring";
+
+  network_handle_by_address_.clear();
+  network_info_by_handle_.clear();
+}
+
+int AndroidNetworkMonitor::BindSocketToNetwork(int socket_fd,
+                                               const rtc::IPAddress& address) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  // Android prior to Lollipop didn't have support for binding sockets to
+  // networks. However, in that case it should not have reached here because
+  // |network_handle_by_address_| should only be populated in Android Lollipop
+  // and above.
+  // TODO(honghaiz): Add a check for Android version here so that it won't try
+  // to look for handle if the Android version is before Lollipop.
+  auto iter = network_handle_by_address_.find(address);
+  if (iter == network_handle_by_address_.end()) {
+    return rtc::NETWORK_BIND_ADDRESS_NOT_FOUND;
+  }
+  NetworkHandle network_handle = iter->second;
+
+  // NOTE: This does rely on Android implementation details, but
+  // these details are unlikely to change.
+  typedef int (*SetNetworkForSocket)(unsigned netId, int socketFd);
+  static SetNetworkForSocket setNetworkForSocket;
+  // This is not threadsafe, but we are running this only on the worker thread.
+  if (setNetworkForSocket == nullptr) {
+    // Android's netd client library should always be loaded in our address
+    // space as it shims libc functions like connect().
+    const std::string net_library_path = "libnetd_client.so";
+    void* lib = dlopen(net_library_path.c_str(), RTLD_LAZY);
+    if (lib == nullptr) {
+      LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
+      return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+    }
+    setNetworkForSocket = reinterpret_cast<SetNetworkForSocket>(
+        dlsym(lib, "setNetworkForSocket"));
+  }
+  if (setNetworkForSocket == nullptr) {
+    LOG(LS_ERROR) << "Symbol setNetworkForSocket not found ";
+    return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+  }
+  int rv = setNetworkForSocket(network_handle, socket_fd);
+  // If |network| has since disconnected, |rv| will be ENONET.  Surface this as
+  // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
+  // the less descriptive ERR_FAILED.
+  if (rv == 0) {
+    return rtc::NETWORK_BIND_SUCCESS;
+  }
+  if (rv == ENONET) {
+    return rtc::NETWORK_BIND_NETWORK_CHANGED;
+  }
+  return rtc::NETWORK_BIND_FAILURE;
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected(
+    const NetworkInformation& network_info) {
+  LOG(LS_INFO) << "Network connected: " << network_info.ToString();
+  worker_thread()->Invoke<void>(rtc::Bind(
+      &AndroidNetworkMonitor::OnNetworkConnected_w, this, network_info));
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected_w(
+    const NetworkInformation& network_info) {
+  adapter_type_by_name_[network_info.interface_name] =
+      AdapterTypeFromNetworkType(network_info.type);
+  network_info_by_handle_[network_info.handle] = network_info;
+  for (const rtc::IPAddress& address : network_info.ip_addresses) {
+    network_handle_by_address_[address] = network_info.handle;
+  }
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) {
+  LOG(LS_INFO) << "Network disconnected for handle " << handle;
+  worker_thread()->Invoke<void>(
+      rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle));
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) {
+  auto iter = network_info_by_handle_.find(handle);
+  if (iter != network_info_by_handle_.end()) {
+    for (const rtc::IPAddress& address : iter->second.ip_addresses) {
+      network_handle_by_address_.erase(address);
+    }
+    network_info_by_handle_.erase(iter);
+  }
+}
+
+void AndroidNetworkMonitor::SetNetworkInfos(
+    const std::vector<NetworkInformation>& network_infos) {
+  RTC_CHECK(thread_checker_.CalledOnValidThread());
+  network_handle_by_address_.clear();
+  network_info_by_handle_.clear();
+  for (NetworkInformation network : network_infos) {
+    OnNetworkConnected_w(network);
+  }
+}
+
+rtc::AdapterType AndroidNetworkMonitor::GetAdapterType(
+    const std::string& if_name) {
+  auto iter = adapter_type_by_name_.find(if_name);
+  if (iter == adapter_type_by_name_.end()) {
+    return rtc::ADAPTER_TYPE_UNKNOWN;
+  }
+  return iter->second;
+}
+
+rtc::NetworkMonitorInterface*
+AndroidNetworkMonitorFactory::CreateNetworkMonitor() {
+  return new AndroidNetworkMonitor();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyConnectionTypeChanged)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor) {
+  rtc::NetworkMonitorInterface* network_monitor =
+      reinterpret_cast<rtc::NetworkMonitorInterface*>(j_native_monitor);
+  network_monitor->OnNetworksChanged();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfActiveNetworkList)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+    jobjectArray j_network_infos) {
+  AndroidNetworkMonitor* network_monitor =
+      reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+  std::vector<NetworkInformation> network_infos;
+  size_t num_networks = jni->GetArrayLength(j_network_infos);
+  for (size_t i = 0; i < num_networks; ++i) {
+    jobject j_network_info = jni->GetObjectArrayElement(j_network_infos, i);
+    CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+    network_infos.push_back(GetNetworkInformationFromJava(jni, j_network_info));
+  }
+  network_monitor->SetNetworkInfos(network_infos);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkConnect)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+    jobject j_network_info) {
+  AndroidNetworkMonitor* network_monitor =
+      reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+  NetworkInformation network_info =
+      GetNetworkInformationFromJava(jni, j_network_info);
+  network_monitor->OnNetworkConnected(network_info);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkDisconnect)(
+    JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+    jint network_handle) {
+  AndroidNetworkMonitor* network_monitor =
+      reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+  network_monitor->OnNetworkDisconnected(
+      static_cast<NetworkHandle>(network_handle));
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/androidnetworkmonitor_jni.h b/webrtc/api/java/jni/androidnetworkmonitor_jni.h
new file mode 100644
index 0000000..220a5bc
--- /dev/null
+++ b/webrtc/api/java/jni/androidnetworkmonitor_jni.h
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+
+#include "webrtc/base/networkmonitor.h"
+
+#include <map>
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc_jni {
+
+typedef uint32_t NetworkHandle;
+
+// c++ equivalent of java NetworkMonitorAutoDetect.ConnectionType.
+enum NetworkType {
+  NETWORK_UNKNOWN,
+  NETWORK_ETHERNET,
+  NETWORK_WIFI,
+  NETWORK_4G,
+  NETWORK_3G,
+  NETWORK_2G,
+  NETWORK_BLUETOOTH,
+  NETWORK_NONE
+};
+
+// The information is collected from Android OS so that the native code can get
+// the network type and handle (Android network ID) for each interface.
+struct NetworkInformation {
+  std::string interface_name;
+  NetworkHandle handle;
+  NetworkType type;
+  std::vector<rtc::IPAddress> ip_addresses;
+
+  std::string ToString() const;
+};
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorBase,
+                              public rtc::NetworkBinderInterface {
+ public:
+  AndroidNetworkMonitor();
+
+  static void SetAndroidContext(JNIEnv* jni, jobject context);
+
+  void Start() override;
+  void Stop() override;
+
+  int BindSocketToNetwork(int socket_fd,
+                          const rtc::IPAddress& address) override;
+  rtc::AdapterType GetAdapterType(const std::string& if_name) override;
+  void OnNetworkConnected(const NetworkInformation& network_info);
+  void OnNetworkDisconnected(NetworkHandle network_handle);
+  void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
+
+ private:
+  JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
+
+  void OnNetworkConnected_w(const NetworkInformation& network_info);
+  void OnNetworkDisconnected_w(NetworkHandle network_handle);
+
+  ScopedGlobalRef<jclass> j_network_monitor_class_;
+  ScopedGlobalRef<jobject> j_network_monitor_;
+  rtc::ThreadChecker thread_checker_;
+  static jobject application_context_;
+  bool started_ = false;
+  std::map<std::string, rtc::AdapterType> adapter_type_by_name_;
+  std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_;
+  std::map<NetworkHandle, NetworkInformation> network_info_by_handle_;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+  AndroidNetworkMonitorFactory() {}
+
+  rtc::NetworkMonitorInterface* CreateNetworkMonitor() override;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc
new file mode 100644
index 0000000..a636d62
--- /dev/null
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc
@@ -0,0 +1,246 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "webrtc/api/java/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc_jni {
+
+jobject AndroidVideoCapturerJni::application_context_ = nullptr;
+
+// static
+int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
+                                               jobject appliction_context) {
+  if (application_context_) {
+    jni->DeleteGlobalRef(application_context_);
+  }
+  application_context_ = NewGlobalRef(jni, appliction_context);
+
+  return 0;
+}
+
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(
+    JNIEnv* jni,
+    jobject j_video_capturer,
+    jobject j_surface_texture_helper)
+    : j_video_capturer_(jni, j_video_capturer),
+      j_video_capturer_class_(
+          jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
+      j_observer_class_(
+          jni,
+          FindClass(jni,
+                    "org/webrtc/VideoCapturerAndroid$NativeObserver")),
+      surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
+          jni, j_surface_texture_helper)),
+      capturer_(nullptr) {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+  thread_checker_.DetachFromThread();
+}
+
+AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
+  jni()->CallVoidMethod(
+      *j_video_capturer_,
+      GetMethodID(jni(), *j_video_capturer_class_, "release", "()V"));
+  CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.release()";
+}
+
+void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
+                                    webrtc::AndroidVideoCapturer* capturer) {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni start";
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  {
+    rtc::CritScope cs(&capturer_lock_);
+    RTC_CHECK(capturer_ == nullptr);
+    RTC_CHECK(invoker_.get() == nullptr);
+    capturer_ = capturer;
+    invoker_.reset(new rtc::GuardedAsyncInvoker());
+  }
+  jobject j_frame_observer =
+      jni()->NewObject(*j_observer_class_,
+                       GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
+                       jlongFromPointer(this));
+  CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+  jmethodID m = GetMethodID(
+      jni(), *j_video_capturer_class_, "startCapture",
+      "(IIILandroid/content/Context;"
+      "Lorg/webrtc/VideoCapturerAndroid$CapturerObserver;)V");
+  jni()->CallVoidMethod(*j_video_capturer_,
+                        m, width, height,
+                        framerate,
+                        application_context_,
+                        j_frame_observer);
+  CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
+}
+
+void AndroidVideoCapturerJni::Stop() {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  {
+    rtc::CritScope cs(&capturer_lock_);
+    // Destroying |invoker_| will cancel all pending calls to |capturer_|.
+    invoker_ = nullptr;
+    capturer_ = nullptr;
+  }
+  jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+                            "stopCapture", "()V");
+  jni()->CallVoidMethod(*j_video_capturer_, m);
+  CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
+  LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
+}
+
+template <typename... Args>
+void AndroidVideoCapturerJni::AsyncCapturerInvoke(
+    const char* method_name,
+    void (webrtc::AndroidVideoCapturer::*method)(Args...),
+    typename Identity<Args>::type... args) {
+  rtc::CritScope cs(&capturer_lock_);
+  if (!invoker_) {
+    LOG(LS_WARNING) << method_name << "() called for closed capturer.";
+    return;
+  }
+  invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
+}
+
+std::string AndroidVideoCapturerJni::GetSupportedFormats() {
+  jmethodID m =
+      GetMethodID(jni(), *j_video_capturer_class_,
+                  "getSupportedFormatsAsJson", "()Ljava/lang/String;");
+  jstring j_json_caps =
+      (jstring) jni()->CallObjectMethod(*j_video_capturer_, m);
+  CHECK_EXCEPTION(jni()) << "error during supportedFormatsAsJson";
+  return JavaToStdString(jni(), j_json_caps);
+}
+
+void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
+  LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
+  AsyncCapturerInvoke("OnCapturerStarted",
+                      &webrtc::AndroidVideoCapturer::OnCapturerStarted,
+                      success);
+}
+
+void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
+                                                  int length,
+                                                  int width,
+                                                  int height,
+                                                  int rotation,
+                                                  int64_t timestamp_ns) {
+  const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
+  const uint8_t* vu_plane = y_plane + width * height;
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+      buffer_pool_.CreateBuffer(width, height);
+  libyuv::NV21ToI420(
+      y_plane, width,
+      vu_plane, width,
+      buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
+      buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
+      buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
+      width, height);
+  AsyncCapturerInvoke("OnIncomingFrame",
+                      &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+                      buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+                                             int height,
+                                             int rotation,
+                                             int64_t timestamp_ns,
+                                             const NativeHandleImpl& handle) {
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+      surface_texture_helper_->CreateTextureFrame(width, height, handle));
+
+  AsyncCapturerInvoke("OnIncomingFrame",
+                      &webrtc::AndroidVideoCapturer::OnIncomingFrame,
+                      buffer, rotation, timestamp_ns);
+}
+
+void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
+                                                    int height,
+                                                    int fps) {
+  AsyncCapturerInvoke("OnOutputFormatRequest",
+                      &webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
+                      width, height, fps);
+}
+
+JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
+
+JOW(void,
+    VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured)
+    (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
+        jint width, jint height, jint rotation, jlong timestamp) {
+  jboolean is_copy = true;
+  jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
+  reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+      ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
+  jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
+    (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+        jint j_oes_texture_id, jfloatArray j_transform_matrix,
+        jint j_rotation, jlong j_timestamp) {
+   reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+         ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+                          NativeHandleImpl(jni, j_oes_texture_id,
+                                           j_transform_matrix));
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
+    (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
+  LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
+  reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
+      j_success);
+}
+
+JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
+    (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+        jint j_fps) {
+  LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
+  reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest(
+      j_width, j_height, j_fps);
+}
+
+JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer)
+    (JNIEnv* jni, jclass,
+     jobject j_video_capturer, jobject j_surface_texture_helper) {
+  rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+      new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+          jni, j_video_capturer, j_surface_texture_helper);
+  rtc::scoped_ptr<cricket::VideoCapturer> capturer(
+      new webrtc::AndroidVideoCapturer(delegate));
+  // Caller takes ownership of the cricket::VideoCapturer* pointer.
+  return jlongFromPointer(capturer.release());
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.h b/webrtc/api/java/jni/androidvideocapturer_jni.h
new file mode 100644
index 0000000..bf611f5
--- /dev/null
+++ b/webrtc/api/java/jni/androidvideocapturer_jni.h
@@ -0,0 +1,116 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+
+#include <string>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+
+namespace webrtc_jni {
+
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
+
+// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
+// The purpose of the delegate is to hide the JNI specifics from the C++ only
+// AndroidVideoCapturer.
+class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
+ public:
+  static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
+
+  AndroidVideoCapturerJni(JNIEnv* jni,
+                          jobject j_video_capturer,
+                          jobject j_surface_texture_helper);
+
+  void Start(int width, int height, int framerate,
+             webrtc::AndroidVideoCapturer* capturer) override;
+  void Stop() override;
+
+  std::string GetSupportedFormats() override;
+
+  // Called from VideoCapturerAndroid::NativeObserver on a Java thread.
+  void OnCapturerStarted(bool success);
+  void OnMemoryBufferFrame(void* video_frame, int length, int width,
+                           int height, int rotation, int64_t timestamp_ns);
+  void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+                      const NativeHandleImpl& handle);
+  void OnOutputFormatRequest(int width, int height, int fps);
+
+ protected:
+  ~AndroidVideoCapturerJni();
+
+ private:
+  JNIEnv* jni();
+
+  // To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
+  template <typename T>
+  struct Identity {
+    typedef T type;
+  };
+
+  // Helper function to make safe asynchronous calls to |capturer_|. The calls
+  // are not guaranteed to be delivered.
+  template <typename... Args>
+  void AsyncCapturerInvoke(
+      const char* method_name,
+      void (webrtc::AndroidVideoCapturer::*method)(Args...),
+      typename Identity<Args>::type... args);
+
+  const ScopedGlobalRef<jobject> j_video_capturer_;
+  const ScopedGlobalRef<jclass> j_video_capturer_class_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+
+  // Used on the Java thread running the camera.
+  webrtc::I420BufferPool buffer_pool_;
+  rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+  rtc::ThreadChecker thread_checker_;
+
+  // |capturer| is a guaranteed to be a valid pointer between a call to
+  // AndroidVideoCapturerDelegate::Start
+  // until AndroidVideoCapturerDelegate::Stop.
+  rtc::CriticalSection capturer_lock_;
+  webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
+  // |invoker_| is used to communicate with |capturer_| on the thread Start() is
+  // called on.
+  rtc::scoped_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
+
+  static jobject application_context_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
diff --git a/webrtc/api/java/jni/classreferenceholder.cc b/webrtc/api/java/jni/classreferenceholder.cc
new file mode 100644
index 0000000..0625cc2
--- /dev/null
+++ b/webrtc/api/java/jni/classreferenceholder.cc
@@ -0,0 +1,152 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "webrtc/api/java/jni/classreferenceholder.h"
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+  explicit ClassReferenceHolder(JNIEnv* jni);
+  ~ClassReferenceHolder();
+
+  void FreeReferences(JNIEnv* jni);
+  jclass GetClass(const std::string& name);
+
+ private:
+  void LoadClass(JNIEnv* jni, const std::string& name);
+
+  std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+  RTC_CHECK(g_class_reference_holder == nullptr);
+  g_class_reference_holder = new ClassReferenceHolder(GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+  g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
+  delete g_class_reference_holder;
+  g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+  LoadClass(jni, "android/graphics/SurfaceTexture");
+  LoadClass(jni, "java/nio/ByteBuffer");
+  LoadClass(jni, "java/util/ArrayList");
+  LoadClass(jni, "org/webrtc/AudioTrack");
+  LoadClass(jni, "org/webrtc/CameraEnumerator");
+  LoadClass(jni, "org/webrtc/Camera2Enumerator");
+  LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
+  LoadClass(jni, "org/webrtc/DataChannel");
+  LoadClass(jni, "org/webrtc/DataChannel$Buffer");
+  LoadClass(jni, "org/webrtc/DataChannel$Init");
+  LoadClass(jni, "org/webrtc/DataChannel$State");
+  LoadClass(jni, "org/webrtc/EglBase");
+  LoadClass(jni, "org/webrtc/EglBase$Context");
+  LoadClass(jni, "org/webrtc/EglBase14$Context");
+  LoadClass(jni, "org/webrtc/IceCandidate");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
+  LoadClass(jni, "org/webrtc/MediaSource$State");
+  LoadClass(jni, "org/webrtc/MediaStream");
+  LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
+  LoadClass(jni, "org/webrtc/NetworkMonitor");
+  LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");
+  LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$IPAddress");
+  LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$NetworkInformation");
+  LoadClass(jni, "org/webrtc/PeerConnectionFactory");
+  LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
+  LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
+  LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
+  LoadClass(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy");
+  LoadClass(jni, "org/webrtc/PeerConnection$KeyType");
+  LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
+  LoadClass(jni, "org/webrtc/RtpReceiver");
+  LoadClass(jni, "org/webrtc/RtpSender");
+  LoadClass(jni, "org/webrtc/SessionDescription");
+  LoadClass(jni, "org/webrtc/SessionDescription$Type");
+  LoadClass(jni, "org/webrtc/StatsReport");
+  LoadClass(jni, "org/webrtc/StatsReport$Value");
+  LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
+  LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
+  LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
+  LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
+  LoadClass(jni, "org/webrtc/VideoTrack");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+  RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+  for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+      it != classes_.end(); ++it) {
+    jni->DeleteGlobalRef(it->second);
+  }
+  classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+  std::map<std::string, jclass>::iterator it = classes_.find(name);
+  RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+  return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+  jclass localRef = jni->FindClass(name.c_str());
+  CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+  RTC_CHECK(localRef) << name;
+  jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+  CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+  RTC_CHECK(globalRef) << name;
+  bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+  RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+  return g_class_reference_holder->GetClass(name);
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/classreferenceholder.h b/webrtc/api/java/jni/classreferenceholder.h
new file mode 100644
index 0000000..5edf614
--- /dev/null
+++ b/webrtc/api/java/jni/classreferenceholder.h
@@ -0,0 +1,59 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack.  Consequently, we only look up all classes once in app/webrtc.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+#define WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+
+#include <jni.h>
+#include <map>
+#include <string>
+
+namespace webrtc_jni {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
+  Java_org_webrtc_##name
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
diff --git a/webrtc/api/java/jni/eglbase_jni.cc b/webrtc/api/java/jni/eglbase_jni.cc
new file mode 100644
index 0000000..26eeeb3
--- /dev/null
+++ b/webrtc/api/java/jni/eglbase_jni.cc
@@ -0,0 +1,90 @@
+/*
+ * libjingle
+ * Copyright 2016 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "webrtc/api/java/jni/eglbase_jni.h"
+
+#include "webrtc/api/java/jni/androidmediacodeccommon.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+EglBase::EglBase() {
+}
+
+EglBase::~EglBase() {
+  if (egl_base_) {
+    JNIEnv* jni = AttachCurrentThreadIfNeeded();
+    jni->DeleteGlobalRef(egl_base_context_);
+    egl_base_context_ = nullptr;
+    jni->CallVoidMethod(egl_base_,
+                        GetMethodID(jni,
+                                    FindClass(jni, "org/webrtc/EglBase"),
+                                    "release", "()V"));
+    jni->DeleteGlobalRef(egl_base_);
+  }
+}
+
+bool EglBase::CreateEglBase(JNIEnv* jni, jobject egl_context) {
+  if (egl_base_) {
+    jni->DeleteGlobalRef(egl_base_context_);
+    egl_base_context_ = nullptr;
+    jni->CallVoidMethod(egl_base_,
+                        GetMethodID(jni,
+                                    FindClass(jni, "org/webrtc/EglBase"),
+                                    "release", "()V"));
+    jni->DeleteGlobalRef(egl_base_);
+    egl_base_ = nullptr;
+  }
+
+  if (IsNull(jni, egl_context))
+    return false;
+
+  jobject egl_base = jni->CallStaticObjectMethod(
+      FindClass(jni, "org/webrtc/EglBase"),
+      GetStaticMethodID(jni,
+                        FindClass(jni, "org/webrtc/EglBase"),
+                        "create",
+                        "(Lorg/webrtc/EglBase$Context;)Lorg/webrtc/EglBase;"),
+                        egl_context);
+  if (CheckException(jni))
+    return false;
+
+  egl_base_ = jni->NewGlobalRef(egl_base);
+  egl_base_context_ =  jni->NewGlobalRef(
+      jni->CallObjectMethod(
+          egl_base_,
+          GetMethodID(jni,
+                      FindClass(jni, "org/webrtc/EglBase"),
+                      "getEglBaseContext",
+                      "()Lorg/webrtc/EglBase$Context;")));
+  RTC_CHECK(egl_base_context_);
+  return true;
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/eglbase_jni.h b/webrtc/api/java/jni/eglbase_jni.h
new file mode 100644
index 0000000..de7e39e
--- /dev/null
+++ b/webrtc/api/java/jni/eglbase_jni.h
@@ -0,0 +1,60 @@
+/*
+ * libjingle
+ * Copyright 2016 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
+#define WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc_jni {
+
+// Helper class used for creating a Java instance of org/webrtc/EglBase.
+class EglBase {
+ public:
+  EglBase();
+  ~EglBase();
+
+  // Creates an new java EglBase instance. |egl_base_context| must be a valid
+  // EglBase$Context.
+  // Returns false if |egl_base_context| is a null Java object or if an
+  // exception occur in Java.
+  bool CreateEglBase(JNIEnv* jni, jobject egl_base_context);
+  jobject egl_base_context() const { return egl_base_context_; }
+
+ private:
+  jobject egl_base_ = nullptr;  // instance of org/webrtc/EglBase
+  jobject egl_base_context_ = nullptr;  // instance of EglBase$Context
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(EglBase);
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_EGLBASE_JNI_H_
diff --git a/webrtc/api/java/jni/jni_helpers.cc b/webrtc/api/java/jni/jni_helpers.cc
new file mode 100644
index 0000000..b07a9c5
--- /dev/null
+++ b/webrtc/api/java/jni/jni_helpers.cc
@@ -0,0 +1,296 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#include "webrtc/api/java/jni/jni_helpers.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+
+#include <asm/unistd.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+namespace webrtc_jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data.  Non-NULL in threads attached to |g_jvm| by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+JavaVM *GetJVM() {
+  RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+  return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+  void* env = NULL;
+  jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+  RTC_CHECK(((env != NULL) && (status == JNI_OK)) ||
+            ((env == NULL) && (status == JNI_EDETACHED)))
+      << "Unexpected GetEnv return: " << status << ":" << env;
+  return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+  // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
+  // we were responsible for originally attaching the thread, so are responsible
+  // for detaching it now.  However, because some JVM implementations (notably
+  // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+  // the JVMs accounting info for this thread may already be wiped out by the
+  // time this is called. Thus it may appear we are already detached even though
+  // it was our responsibility to detach!  Oh well.
+  if (!GetEnv())
+    return;
+
+  RTC_CHECK(GetEnv() == prev_jni_ptr)
+      << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+  jint status = g_jvm->DetachCurrentThread();
+  RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+  RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+  RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+      << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM *jvm) {
+  RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+  g_jvm = jvm;
+  RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+  RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+  JNIEnv* jni = nullptr;
+  if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+    return -1;
+
+  return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+  char buf[21];  // Big enough to hold a kuint64max plus terminating NULL.
+  RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+                        static_cast<long>(syscall(__NR_gettid))),
+               sizeof(buf))
+      << "Thread id is bigger than uint64??";
+  return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+  char name[17] = {0};
+  if (prctl(PR_GET_NAME, name) != 0)
+    return std::string("<noname>");
+  return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread.  Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+  JNIEnv* jni = GetEnv();
+  if (jni)
+    return jni;
+  RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+      << "TLS has a JNIEnv* but not attached?";
+
+  std::string name(GetThreadName() + " - " + GetThreadId());
+  JavaVMAttachArgs args;
+  args.version = JNI_VERSION_1_6;
+  args.name = &name[0];
+  args.group = NULL;
+  // Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_  // Oracle's jni.h violates the JNI spec!
+  void* env = NULL;
+#else
+  JNIEnv* env = NULL;
+#endif
+  RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+      << "Failed to attach thread";
+  RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+  jni = reinterpret_cast<JNIEnv*>(env);
+  RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+  return jni;
+}
+
+// Return a |jlong| that will correctly convert back to |ptr|.  This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr) {
+  static_assert(sizeof(intptr_t) <= sizeof(jlong),
+                "Time to rethink the use of jlongs");
+  // Going through intptr_t to be obvious about the definedness of the
+  // conversion from pointer to integral type.  intptr_t to jlong is a standard
+  // widening by the static_assert above.
+  jlong ret = reinterpret_cast<intptr_t>(ptr);
+  RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
+  return ret;
+}
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+    JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
+  jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+  CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
+                       << signature;
+  RTC_CHECK(m) << name << ", " << signature;
+  return m;
+}
+
+jmethodID GetStaticMethodID(
+    JNIEnv* jni, jclass c, const char* name, const char* signature) {
+  jmethodID m = jni->GetStaticMethodID(c, name, signature);
+  CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
+                       << signature;
+  RTC_CHECK(m) << name << ", " << signature;
+  return m;
+}
+
+jfieldID GetFieldID(
+    JNIEnv* jni, jclass c, const char* name, const char* signature) {
+  jfieldID f = jni->GetFieldID(c, name, signature);
+  CHECK_EXCEPTION(jni) << "error during GetFieldID";
+  RTC_CHECK(f) << name << ", " << signature;
+  return f;
+}
+
+jclass GetObjectClass(JNIEnv* jni, jobject object) {
+  jclass c = jni->GetObjectClass(object);
+  CHECK_EXCEPTION(jni) << "error during GetObjectClass";
+  RTC_CHECK(c) << "GetObjectClass returned NULL";
+  return c;
+}
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
+  jobject o = jni->GetObjectField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetObjectField";
+  RTC_CHECK(o) << "GetObjectField returned NULL";
+  return o;
+}
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
+  return static_cast<jstring>(GetObjectField(jni, object, id));
+}
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
+  jlong l = jni->GetLongField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetLongField";
+  return l;
+}
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
+  jint i = jni->GetIntField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetIntField";
+  return i;
+}
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
+  jboolean b = jni->GetBooleanField(object, id);
+  CHECK_EXCEPTION(jni) << "error during GetBooleanField";
+  return b;
+}
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj) {
+  ScopedLocalRefFrame local_ref_frame(jni);
+  return jni->NewLocalRef(obj) == NULL;
+}
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
+  jstring jstr = jni->NewStringUTF(native.c_str());
+  CHECK_EXCEPTION(jni) << "error during NewStringUTF";
+  return jstr;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+  const char* chars = jni->GetStringUTFChars(j_string, NULL);
+  CHECK_EXCEPTION(jni) << "Error during GetStringUTFChars";
+  std::string str(chars, jni->GetStringUTFLength(j_string));
+  CHECK_EXCEPTION(jni) << "Error during GetStringUTFLength";
+  jni->ReleaseStringUTFChars(j_string, chars);
+  CHECK_EXCEPTION(jni) << "Error during ReleaseStringUTFChars";
+  return str;
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+                          const std::string& state_class_name, int index) {
+  jmethodID state_values_id = GetStaticMethodID(
+      jni, state_class, "values", ("()[L" + state_class_name  + ";").c_str());
+  jobjectArray state_values = static_cast<jobjectArray>(
+      jni->CallStaticObjectMethod(state_class, state_values_id));
+  CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
+  jobject ret = jni->GetObjectArrayElement(state_values, index);
+  CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
+  return ret;
+}
+
+std::string GetJavaEnumName(JNIEnv* jni,
+                            const std::string& className,
+                            jobject j_enum) {
+  jclass enumClass = FindClass(jni, className.c_str());
+  jmethodID nameMethod =
+      GetMethodID(jni, enumClass, "name", "()Ljava/lang/String;");
+  jstring name =
+      reinterpret_cast<jstring>(jni->CallObjectMethod(j_enum, nameMethod));
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod for " << className
+                       << ".name";
+  return JavaToStdString(jni, name);
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+  jobject ret = jni->NewGlobalRef(o);
+  CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+  RTC_CHECK(ret);
+  return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+  jni->DeleteGlobalRef(o);
+  CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object.  Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+  RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+  jni_->PopLocalFrame(NULL);
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/jni_helpers.h b/webrtc/api/java/jni/jni_helpers.h
new file mode 100644
index 0000000..5498158
--- /dev/null
+++ b/webrtc/api/java/jni/jni_helpers.h
@@ -0,0 +1,146 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+#define WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "webrtc/base/checks.h"
+
+// Abort the process if |jni| has a Java exception pending.
+// This macros uses the comma operator to execute ExceptionDescribe
+// and ExceptionClear ignoring their return values and sending ""
+// to the error stream.
+#define CHECK_EXCEPTION(jni)        \
+  RTC_CHECK(!jni->ExceptionCheck()) \
+      << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+// Helper that calls ptr->Release() and aborts the process with a useful
+// message if that didn't actually delete *ptr because of extra refcounts.
+#define CHECK_RELEASE(ptr) \
+  RTC_CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
+
+namespace webrtc_jni {
+
+jint InitGlobalJniVariables(JavaVM *jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM *GetJVM();
+
+// Return a |JNIEnv*| usable on this thread.  Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+// Return a |jlong| that will correctly convert back to |ptr|.  This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr);
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+    JNIEnv* jni, jclass c, const std::string& name, const char* signature);
+
+jmethodID GetStaticMethodID(
+    JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jfieldID GetFieldID(JNIEnv* jni, jclass c, const char* name,
+                    const char* signature);
+
+jclass GetObjectClass(JNIEnv* jni, jobject object);
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id);
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id);
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id);
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id);
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id);
+
+// Java references to "null" can only be distinguished as such in C++ by
+// creating a local reference, so this helper wraps that logic.
+bool IsNull(JNIEnv* jni, jobject obj);
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+                          const std::string& state_class_name, int index);
+
+// Returns the name of a Java enum.
+std::string GetJavaEnumName(JNIEnv* jni,
+                            const std::string& className,
+                            jobject j_enum);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object.  Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+  explicit ScopedLocalRefFrame(JNIEnv* jni);
+  ~ScopedLocalRefFrame();
+
+ private:
+  JNIEnv* jni_;
+};
+
+// Scoped holder for global Java refs.
+template<class T>  // T is jclass, jobject, jintArray, etc.
+class ScopedGlobalRef {
+ public:
+  ScopedGlobalRef(JNIEnv* jni, T obj)
+      : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
+  ~ScopedGlobalRef() {
+    DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
+  }
+  T operator*() const {
+    return obj_;
+  }
+ private:
+  T obj_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
diff --git a/webrtc/api/java/jni/jni_onload.cc b/webrtc/api/java/jni/jni_onload.cc
new file mode 100644
index 0000000..af2804d
--- /dev/null
+++ b/webrtc/api/java/jni/jni_onload.cc
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace webrtc_jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+  jint ret = InitGlobalJniVariables(jvm);
+  RTC_DCHECK_GE(ret, 0);
+  if (ret < 0)
+    return -1;
+
+  RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+  LoadGlobalClassReferenceHolder();
+
+  return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+  FreeGlobalClassReferenceHolder();
+  RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/native_handle_impl.cc b/webrtc/api/java/jni/native_handle_impl.cc
new file mode 100644
index 0000000..8ec549c
--- /dev/null
+++ b/webrtc/api/java/jni/native_handle_impl.cc
@@ -0,0 +1,186 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/java/jni/native_handle_impl.h"
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+using webrtc::NativeHandleBuffer;
+
+namespace {
+
+void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+  // Texture coordinates are in the range 0 to 1. The transformation of the last
+  // row in each rotation matrix is needed for proper translation, e.g, to
+  // mirror x, we don't replace x by -x, but by 1-x.
+  switch (rotation) {
+    case webrtc::kVideoRotation_0:
+      break;
+    case webrtc::kVideoRotation_90: {
+      const float ROTATE_90[16] =
+          { a[4], a[5], a[6], a[7],
+            -a[0], -a[1], -a[2], -a[3],
+            a[8], a[9], a[10], a[11],
+            a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
+      memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+    } break;
+    case webrtc::kVideoRotation_180: {
+      const float ROTATE_180[16] =
+          { -a[0], -a[1], -a[2], -a[3],
+            -a[4], -a[5], -a[6], -a[7],
+            a[8], a[9], a[10], a[11],
+            a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
+            a[3] + a[11]+ a[15]};
+        memcpy(a, ROTATE_180, sizeof(ROTATE_180));
+      }
+      break;
+    case webrtc::kVideoRotation_270: {
+      const float ROTATE_270[16] =
+          { -a[4], -a[5], -a[6], -a[7],
+            a[0], a[1], a[2], a[3],
+            a[8], a[9], a[10], a[11],
+            a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
+        memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+    } break;
+  }
+}
+
+}  // anonymouse namespace
+
+namespace webrtc_jni {
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+                                   jint j_oes_texture_id,
+                                   jfloatArray j_transform_matrix)
+  : oes_texture_id(j_oes_texture_id) {
+  RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
+  jfloat* transform_matrix_ptr =
+      jni->GetFloatArrayElements(j_transform_matrix, nullptr);
+  for (int i = 0; i < 16; ++i) {
+    sampling_matrix[i] = transform_matrix_ptr[i];
+  }
+  jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
+}
+
+AndroidTextureBuffer::AndroidTextureBuffer(
+    int width,
+    int height,
+    const NativeHandleImpl& native_handle,
+    jobject surface_texture_helper,
+    const rtc::Callback0<void>& no_longer_used)
+    : webrtc::NativeHandleBuffer(&native_handle_, width, height),
+      native_handle_(native_handle),
+      surface_texture_helper_(surface_texture_helper),
+      no_longer_used_cb_(no_longer_used) {}
+
+AndroidTextureBuffer::~AndroidTextureBuffer() {
+  no_longer_used_cb_();
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+AndroidTextureBuffer::NativeToI420Buffer() {
+  int uv_width = (width()+7) / 8;
+  int stride = 8 * uv_width;
+  int uv_height = (height()+1)/2;
+  size_t size = stride * (height() + uv_height);
+  // The data is owned by the frame, and the normal case is that the
+  // data is deleted by the frame's destructor callback.
+  //
+  // TODO(nisse): Use an I420BufferPool. We then need to extend that
+  // class, and I420Buffer, to support our memory layout.
+  rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+      static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+  // See SurfaceTextureHelper.java for the required layout.
+  uint8_t* y_data = yuv_data.get();
+  uint8_t* u_data = y_data + height() * stride;
+  uint8_t* v_data = u_data + stride/2;
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+    new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+        width(), height(),
+        y_data, stride,
+        u_data, stride,
+        v_data, stride,
+        rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+
+  jmethodID transform_mid = GetMethodID(
+      jni,
+      GetObjectClass(jni, surface_texture_helper_),
+      "textureToYUV",
+      "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+  jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+  // TODO(nisse): Keep java transform matrix around.
+  jfloatArray sampling_matrix = jni->NewFloatArray(16);
+  jni->SetFloatArrayRegion(sampling_matrix, 0, 16,
+                           native_handle_.sampling_matrix);
+
+  jni->CallVoidMethod(surface_texture_helper_,
+                      transform_mid,
+                      byte_buffer, width(), height(), stride,
+                      native_handle_.oes_texture_id, sampling_matrix);
+  CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+  return copy;
+}
+
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
+                                     int dst_height,
+                                     webrtc::VideoRotation rotation) {
+  if (width() == dst_widht && height() == dst_height &&
+      rotation == webrtc::kVideoRotation_0) {
+    return this;
+  }
+  int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
+  int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
+
+  // Here we use Bind magic to add a reference count to |this| until the newly
+  // created AndroidTextureBuffer is destructed
+  rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+      new rtc::RefCountedObject<AndroidTextureBuffer>(
+          rotated_width, rotated_height, native_handle_,
+          surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+  RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+  return buffer;
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/native_handle_impl.h b/webrtc/api/java/jni/native_handle_impl.h
new file mode 100644
index 0000000..4203bdf
--- /dev/null
+++ b/webrtc/api/java/jni/native_handle_impl.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+#define WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+
+#include <jni.h>
+
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
+
+namespace webrtc_jni {
+
+// Wrapper for texture object.
+struct NativeHandleImpl {
+  NativeHandleImpl(JNIEnv* jni,
+                   jint j_oes_texture_id,
+                   jfloatArray j_transform_matrix);
+
+  const int oes_texture_id;
+  float sampling_matrix[16];
+};
+
+class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
+ public:
+  AndroidTextureBuffer(int width,
+                       int height,
+                       const NativeHandleImpl& native_handle,
+                       jobject surface_texture_helper,
+                       const rtc::Callback0<void>& no_longer_used);
+  ~AndroidTextureBuffer();
+  rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+  rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
+      int dst_widht,
+      int dst_height,
+      webrtc::VideoRotation rotation);
+
+ private:
+  NativeHandleImpl native_handle_;
+  // Raw object pointer, relying on the caller, i.e.,
+  // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+  // a global reference. TODO(nisse): Make this a reference to the C++
+  // SurfaceTextureHelper instead, but that requires some refactoring
+  // of AndroidVideoCapturerJni.
+  jobject surface_texture_helper_;
+  rtc::Callback0<void> no_longer_used_cb_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc
new file mode 100644
index 0000000..1160b2b
--- /dev/null
+++ b/webrtc/api/java/jni/peerconnection_jni.cc
@@ -0,0 +1,2058 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Hints for future visitors:
+// This entire file is an implementation detail of the org.webrtc Java package,
+// the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
+// The layout of this file is roughly:
+// - various helper C++ functions & classes that wrap Java counterparts and
+//   expose a C++ interface that can be passed to the C++ PeerConnection APIs
+// - implementations of methods declared "static" in the Java package (named
+//   things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
+//   the JNI spec).
+//
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
+// ref simulating the jlong held in Java-land, and then Release()s the ref in
+// the respective free call.  Sometimes this AddRef is implicit in the
+// construction of a scoped_refptr<> which is then .release()d.
+// Any persistent (non-local) references from C++ to Java must be global or weak
+// (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call.  In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include <limits>
+#include <utility>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/java/jni/androidmediadecoder_jni.h"
+#include "webrtc/api/java/jni/androidmediaencoder_jni.h"
+#include "webrtc/api/java/jni/androidnetworkmonitor_jni.h"
+#include "webrtc/api/java/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/event_tracer.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/networkmonitor.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/media/base/videorenderer.h"
+#include "webrtc/media/devices/videorendererfactory.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+#include "webrtc/modules/video_render/video_render_internal.h"
+#include "webrtc/system_wrappers/include/field_trial_default.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+using cricket::WebRtcVideoDecoderFactory;
+using cricket::WebRtcVideoEncoderFactory;
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using rtc::scoped_ptr;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrackVector;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInit;
+using webrtc::DataChannelInterface;
+using webrtc::DataChannelObserver;
+using webrtc::IceCandidateInterface;
+using webrtc::LogcatTraceContext;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SetSessionDescriptionObserver;
+using webrtc::StatsObserver;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+using webrtc::VideoRendererInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrackInterface;
+using webrtc::VideoTrackVector;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+// Field trials initialization string
+static char *field_trials_init_string = NULL;
+
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+static bool video_hw_acceleration_enabled = true;
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static jobject JavaEnumFromIndex(
+    JNIEnv* jni, const std::string& state_class_fragment, int index) {
+  const std::string state_class = "org/webrtc/" + state_class_fragment;
+  return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+                           state_class, index);
+}
+
+static DataChannelInit JavaDataChannelInitToNative(
+    JNIEnv* jni, jobject j_init) {
+  DataChannelInit init;
+
+  jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
+  jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
+  jfieldID max_retransmit_time_id =
+      GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
+  jfieldID max_retransmits_id =
+      GetFieldID(jni, j_init_class, "maxRetransmits", "I");
+  jfieldID protocol_id =
+      GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
+  jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
+  jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
+
+  init.ordered = GetBooleanField(jni, j_init, ordered_id);
+  init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
+  init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
+  init.protocol = JavaToStdString(
+      jni, GetStringField(jni, j_init, protocol_id));
+  init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
+  init.id = GetIntField(jni, j_init, id_id);
+
+  return init;
+}
+
+class ConstraintsWrapper;
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface.  Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PCOJava : public PeerConnectionObserver {
+ public:
+  PCOJava(JNIEnv* jni, jobject j_observer)
+      : j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
+        j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
+        j_media_stream_ctor_(GetMethodID(
+            jni, *j_media_stream_class_, "<init>", "(J)V")),
+        j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
+        j_audio_track_ctor_(GetMethodID(
+            jni, *j_audio_track_class_, "<init>", "(J)V")),
+        j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
+        j_video_track_ctor_(GetMethodID(
+            jni, *j_video_track_class_, "<init>", "(J)V")),
+        j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
+        j_data_channel_ctor_(GetMethodID(
+            jni, *j_data_channel_class_, "<init>", "(J)V")) {
+  }
+
+  virtual ~PCOJava() {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    while (!remote_streams_.empty())
+      DisposeRemoteStream(remote_streams_.begin());
+  }
+
+  void OnIceCandidate(const IceCandidateInterface* candidate) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    std::string sdp;
+    RTC_CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
+    jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
+    jmethodID ctor = GetMethodID(jni(), candidate_class,
+        "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
+    jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
+    jstring j_sdp = JavaStringFromStdString(jni(), sdp);
+    jobject j_candidate = jni()->NewObject(
+        candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp);
+    CHECK_EXCEPTION(jni()) << "error during NewObject";
+    jmethodID m = GetMethodID(jni(), *j_observer_class_,
+                              "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnSignalingChange(
+      PeerConnectionInterface::SignalingState new_state) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onSignalingChange",
+        "(Lorg/webrtc/PeerConnection$SignalingState;)V");
+    jobject new_state_enum =
+        JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
+    jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onIceConnectionChange",
+        "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
+    jobject new_state_enum = JavaEnumFromIndex(
+        jni(), "PeerConnection$IceConnectionState", new_state);
+    jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnIceConnectionReceivingChange(bool receiving) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onIceConnectionReceivingChange", "(Z)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, receiving);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onIceGatheringChange",
+        "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
+    jobject new_state_enum = JavaEnumFromIndex(
+        jni(), "PeerConnection$IceGatheringState", new_state);
+    jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnAddStream(MediaStreamInterface* stream) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    // Java MediaStream holds one reference. Corresponding Release() is in
+    // MediaStream_free, triggered by MediaStream.dispose().
+    stream->AddRef();
+    jobject j_stream =
+        jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_,
+                         reinterpret_cast<jlong>(stream));
+    CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+    for (const auto& track : stream->GetAudioTracks()) {
+      jstring id = JavaStringFromStdString(jni(), track->id());
+      // Java AudioTrack holds one reference. Corresponding Release() is in
+      // MediaStreamTrack_free, triggered by AudioTrack.dispose().
+      track->AddRef();
+      jobject j_track =
+          jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
+                           reinterpret_cast<jlong>(track.get()), id);
+      CHECK_EXCEPTION(jni()) << "error during NewObject";
+      jfieldID audio_tracks_id = GetFieldID(jni(),
+                                            *j_media_stream_class_,
+                                            "audioTracks",
+                                            "Ljava/util/LinkedList;");
+      jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
+      jmethodID add = GetMethodID(jni(),
+                                  GetObjectClass(jni(), audio_tracks),
+                                  "add",
+                                  "(Ljava/lang/Object;)Z");
+      jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
+      CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+      RTC_CHECK(added);
+    }
+
+    for (const auto& track : stream->GetVideoTracks()) {
+      jstring id = JavaStringFromStdString(jni(), track->id());
+      // Java VideoTrack holds one reference. Corresponding Release() is in
+      // MediaStreamTrack_free, triggered by VideoTrack.dispose().
+      track->AddRef();
+      jobject j_track =
+          jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
+                           reinterpret_cast<jlong>(track.get()), id);
+      CHECK_EXCEPTION(jni()) << "error during NewObject";
+      jfieldID video_tracks_id = GetFieldID(jni(),
+                                            *j_media_stream_class_,
+                                            "videoTracks",
+                                            "Ljava/util/LinkedList;");
+      jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
+      jmethodID add = GetMethodID(jni(),
+                                  GetObjectClass(jni(), video_tracks),
+                                  "add",
+                                  "(Ljava/lang/Object;)Z");
+      jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
+      CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+      RTC_CHECK(added);
+    }
+    remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
+
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
+                              "(Lorg/webrtc/MediaStream;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnRemoveStream(MediaStreamInterface* stream) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
+    RTC_CHECK(it != remote_streams_.end()) << "unexpected stream: " << std::hex
+                                           << stream;
+    jobject j_stream = it->second;
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
+                              "(Lorg/webrtc/MediaStream;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+    DisposeRemoteStream(it);
+  }
+
+  void OnDataChannel(DataChannelInterface* channel) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobject j_channel = jni()->NewObject(
+        *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel);
+    CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
+                              "(Lorg/webrtc/DataChannel;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
+
+    // Channel is now owned by Java object, and will be freed from
+    // DataChannel.dispose().  Important that this be done _after_ the
+    // CallVoidMethod above as Java code might call back into native code and be
+    // surprised to see a refcount of 2.
+    int bumped_count = channel->AddRef();
+    RTC_CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
+
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnRenegotiationNeeded() override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m =
+        GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
+    jni()->CallVoidMethod(*j_observer_global_, m);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void SetConstraints(ConstraintsWrapper* constraints) {
+    RTC_CHECK(!constraints_.get()) << "constraints already set!";
+    constraints_.reset(constraints);
+  }
+
+  const ConstraintsWrapper* constraints() { return constraints_.get(); }
+
+ private:
+  typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
+
+  void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it) {
+    jobject j_stream = it->second;
+    remote_streams_.erase(it);
+    jni()->CallVoidMethod(
+        j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V"));
+    CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()";
+    DeleteGlobalRef(jni(), j_stream);
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+  const ScopedGlobalRef<jclass> j_media_stream_class_;
+  const jmethodID j_media_stream_ctor_;
+  const ScopedGlobalRef<jclass> j_audio_track_class_;
+  const jmethodID j_audio_track_ctor_;
+  const ScopedGlobalRef<jclass> j_video_track_class_;
+  const jmethodID j_video_track_ctor_;
+  const ScopedGlobalRef<jclass> j_data_channel_class_;
+  const jmethodID j_data_channel_ctor_;
+  // C++ -> Java remote streams. The stored jobects are global refs and must be
+  // manually deleted upon removal. Use DisposeRemoteStream().
+  NativeToJavaStreamsMap remote_streams_;
+  scoped_ptr<ConstraintsWrapper> constraints_;
+};
+
+// Wrapper for a Java MediaConstraints object.  Copies all needed data so when
+// the constructor returns the Java object is no longer needed.
+class ConstraintsWrapper : public MediaConstraintsInterface {
+ public:
+  ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
+    PopulateConstraintsFromJavaPairList(
+        jni, j_constraints, "mandatory", &mandatory_);
+    PopulateConstraintsFromJavaPairList(
+        jni, j_constraints, "optional", &optional_);
+  }
+
+  virtual ~ConstraintsWrapper() {}
+
+  // MediaConstraintsInterface.
+  const Constraints& GetMandatory() const override { return mandatory_; }
+
+  const Constraints& GetOptional() const override { return optional_; }
+
+ private:
+  // Helper for translating a List<Pair<String, String>> to a Constraints.
+  static void PopulateConstraintsFromJavaPairList(
+      JNIEnv* jni, jobject j_constraints,
+      const char* field_name, Constraints* field) {
+    jfieldID j_id = GetFieldID(jni,
+        GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
+    jobject j_list = GetObjectField(jni, j_constraints, j_id);
+    jmethodID j_iterator_id = GetMethodID(jni,
+        GetObjectClass(jni, j_list), "iterator", "()Ljava/util/Iterator;");
+    jobject j_iterator = jni->CallObjectMethod(j_list, j_iterator_id);
+    CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+    jmethodID j_has_next = GetMethodID(jni,
+        GetObjectClass(jni, j_iterator), "hasNext", "()Z");
+    jmethodID j_next = GetMethodID(jni,
+        GetObjectClass(jni, j_iterator), "next", "()Ljava/lang/Object;");
+    while (jni->CallBooleanMethod(j_iterator, j_has_next)) {
+      CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+      jobject entry = jni->CallObjectMethod(j_iterator, j_next);
+      CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+      jmethodID get_key = GetMethodID(jni,
+          GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
+      jstring j_key = reinterpret_cast<jstring>(
+          jni->CallObjectMethod(entry, get_key));
+      CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+      jmethodID get_value = GetMethodID(jni,
+          GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
+      jstring j_value = reinterpret_cast<jstring>(
+          jni->CallObjectMethod(entry, get_value));
+      CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+      field->push_back(Constraint(JavaToStdString(jni, j_key),
+                                  JavaToStdString(jni, j_value)));
+    }
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+  }
+
+  Constraints mandatory_;
+  Constraints optional_;
+};
+
+static jobject JavaSdpFromNativeSdp(
+    JNIEnv* jni, const SessionDescriptionInterface* desc) {
+  std::string sdp;
+  RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+  jstring j_description = JavaStringFromStdString(jni, sdp);
+
+  jclass j_type_class = FindClass(
+      jni, "org/webrtc/SessionDescription$Type");
+  jmethodID j_type_from_canonical = GetStaticMethodID(
+      jni, j_type_class, "fromCanonicalForm",
+      "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
+  jstring j_type_string = JavaStringFromStdString(jni, desc->type());
+  jobject j_type = jni->CallStaticObjectMethod(
+      j_type_class, j_type_from_canonical, j_type_string);
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+
+  jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
+  jmethodID j_sdp_ctor = GetMethodID(
+      jni, j_sdp_class, "<init>",
+      "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
+  jobject j_sdp = jni->NewObject(
+      j_sdp_class, j_sdp_ctor, j_type, j_description);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  return j_sdp;
+}
+
+template <class T>  // T is one of {Create,Set}SessionDescriptionObserver.
+class SdpObserverWrapper : public T {
+ public:
+  SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+                     ConstraintsWrapper* constraints)
+      : constraints_(constraints),
+        j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
+  }
+
+  virtual ~SdpObserverWrapper() {}
+
+  // Can't mark override because of templating.
+  virtual void OnSuccess() {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
+    jni()->CallVoidMethod(*j_observer_global_, m);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  // Can't mark override because of templating.
+  virtual void OnSuccess(SessionDescriptionInterface* desc) {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jmethodID m = GetMethodID(
+        jni(), *j_observer_class_, "onCreateSuccess",
+        "(Lorg/webrtc/SessionDescription;)V");
+    jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
+    jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+ protected:
+  // Common implementation for failure of Set & Create types, distinguished by
+  // |op| being "Set" or "Create".
+  void DoOnFailure(const std::string& op, const std::string& error) {
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
+                              "(Ljava/lang/String;)V");
+    jstring j_error_string = JavaStringFromStdString(jni(), error);
+    jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+ private:
+  scoped_ptr<ConstraintsWrapper> constraints_;
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+};
+
+class CreateSdpObserverWrapper
+    : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
+ public:
+  CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+                           ConstraintsWrapper* constraints)
+      : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+  void OnFailure(const std::string& error) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    SdpObserverWrapper::DoOnFailure(std::string("Create"), error);
+  }
+};
+
+class SetSdpObserverWrapper
+    : public SdpObserverWrapper<SetSessionDescriptionObserver> {
+ public:
+  SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+                        ConstraintsWrapper* constraints)
+      : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+  void OnFailure(const std::string& error) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    SdpObserverWrapper::DoOnFailure(std::string("Set"), error);
+  }
+};
+
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverWrapper : public DataChannelObserver {
+ public:
+  DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
+      : j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+        j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
+        j_on_buffered_amount_change_mid_(GetMethodID(
+            jni, *j_observer_class_, "onBufferedAmountChange", "(J)V")),
+        j_on_state_change_mid_(
+            GetMethodID(jni, *j_observer_class_, "onStateChange", "()V")),
+        j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
+                                      "(Lorg/webrtc/DataChannel$Buffer;)V")),
+        j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_, "<init>",
+                                   "(Ljava/nio/ByteBuffer;Z)V")) {}
+
+  virtual ~DataChannelObserverWrapper() {}
+
+  void OnBufferedAmountChange(uint64_t previous_amount) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
+                          previous_amount);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnStateChange() override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+  void OnMessage(const DataBuffer& buffer) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobject byte_buffer = jni()->NewDirectByteBuffer(
+        const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+    jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
+                                        byte_buffer, buffer.binary);
+    jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+ private:
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+  const ScopedGlobalRef<jclass> j_buffer_class_;
+  const jmethodID j_on_buffered_amount_change_mid_;
+  const jmethodID j_on_state_change_mid_;
+  const jmethodID j_on_message_mid_;
+  const jmethodID j_buffer_ctor_;
+};
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverWrapper : public StatsObserver {
+ public:
+  StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
+      : j_observer_global_(jni, j_observer),
+        j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+        j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
+        j_stats_report_ctor_(GetMethodID(
+            jni, *j_stats_report_class_, "<init>",
+            "(Ljava/lang/String;Ljava/lang/String;D"
+            "[Lorg/webrtc/StatsReport$Value;)V")),
+        j_value_class_(jni, FindClass(
+            jni, "org/webrtc/StatsReport$Value")),
+        j_value_ctor_(GetMethodID(
+            jni, *j_value_class_, "<init>",
+            "(Ljava/lang/String;Ljava/lang/String;)V")) {
+  }
+
+  virtual ~StatsObserverWrapper() {}
+
+  void OnComplete(const StatsReports& reports) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobjectArray j_reports = ReportsToJava(jni(), reports);
+    jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
+                              "([Lorg/webrtc/StatsReport;)V");
+    jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
+    CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+  }
+
+ private:
+  jobjectArray ReportsToJava(
+      JNIEnv* jni, const StatsReports& reports) {
+    jobjectArray reports_array = jni->NewObjectArray(
+        reports.size(), *j_stats_report_class_, NULL);
+    int i = 0;
+    for (const auto* report : reports) {
+      ScopedLocalRefFrame local_ref_frame(jni);
+      jstring j_id = JavaStringFromStdString(jni, report->id()->ToString());
+      jstring j_type = JavaStringFromStdString(jni, report->TypeToString());
+      jobjectArray j_values = ValuesToJava(jni, report->values());
+      jobject j_report = jni->NewObject(*j_stats_report_class_,
+                                        j_stats_report_ctor_,
+                                        j_id,
+                                        j_type,
+                                        report->timestamp(),
+                                        j_values);
+      jni->SetObjectArrayElement(reports_array, i++, j_report);
+    }
+    return reports_array;
+  }
+
+  jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
+    jobjectArray j_values = jni->NewObjectArray(
+        values.size(), *j_value_class_, NULL);
+    int i = 0;
+    for (const auto& it : values) {
+      ScopedLocalRefFrame local_ref_frame(jni);
+      // Should we use the '.name' enum value here instead of converting the
+      // name to a string?
+      jstring j_name = JavaStringFromStdString(jni, it.second->display_name());
+      jstring j_value = JavaStringFromStdString(jni, it.second->ToString());
+      jobject j_element_value =
+          jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
+      jni->SetObjectArrayElement(j_values, i++, j_element_value);
+    }
+    return j_values;
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  const ScopedGlobalRef<jobject> j_observer_global_;
+  const ScopedGlobalRef<jclass> j_observer_class_;
+  const ScopedGlobalRef<jclass> j_stats_report_class_;
+  const jmethodID j_stats_report_ctor_;
+  const ScopedGlobalRef<jclass> j_value_class_;
+  const jmethodID j_value_ctor_;
+};
+
+// Adapter presenting a cricket::VideoRenderer as a
+// webrtc::VideoRendererInterface.
+class VideoRendererWrapper : public VideoRendererInterface {
+ public:
+  static VideoRendererWrapper* Create(cricket::VideoRenderer* renderer) {
+    if (renderer)
+      return new VideoRendererWrapper(renderer);
+    return NULL;
+  }
+
+  virtual ~VideoRendererWrapper() {}
+
+  void RenderFrame(const cricket::VideoFrame* video_frame) override {
+    ScopedLocalRefFrame local_ref_frame(AttachCurrentThreadIfNeeded());
+    renderer_->RenderFrame(video_frame->GetCopyWithRotationApplied());
+  }
+
+ private:
+  explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
+      : renderer_(renderer) {}
+  scoped_ptr<cricket::VideoRenderer> renderer_;
+};
+
+// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
+// instance.
+class JavaVideoRendererWrapper : public VideoRendererInterface {
+ public:
+  JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
+      : j_callbacks_(jni, j_callbacks),
+        j_render_frame_id_(GetMethodID(
+            jni, GetObjectClass(jni, j_callbacks), "renderFrame",
+            "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
+        j_frame_class_(jni,
+                       FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
+        j_i420_frame_ctor_id_(GetMethodID(
+            jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
+        j_texture_frame_ctor_id_(GetMethodID(
+            jni, *j_frame_class_, "<init>",
+            "(IIII[FJ)V")),
+        j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
+    CHECK_EXCEPTION(jni);
+  }
+
+  virtual ~JavaVideoRendererWrapper() {}
+
+  void RenderFrame(const cricket::VideoFrame* video_frame) override {
+    ScopedLocalRefFrame local_ref_frame(jni());
+    jobject j_frame = (video_frame->GetNativeHandle() != nullptr)
+                          ? CricketToJavaTextureFrame(video_frame)
+                          : CricketToJavaI420Frame(video_frame);
+    // |j_callbacks_| is responsible for releasing |j_frame| with
+    // VideoRenderer.renderFrameDone().
+    jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+    CHECK_EXCEPTION(jni());
+  }
+
+ private:
+  // Make a shallow copy of |frame| to be used with Java. The callee has
+  // ownership of the frame, and the frame should be released with
+  // VideoRenderer.releaseNativeFrame().
+  static jlong javaShallowCopy(const cricket::VideoFrame* frame) {
+    return jlongFromPointer(frame->Copy());
+  }
+
+  // Return a VideoRenderer.I420Frame referring to the data in |frame|.
+  jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
+    jintArray strides = jni()->NewIntArray(3);
+    jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
+    strides_array[0] = frame->GetYPitch();
+    strides_array[1] = frame->GetUPitch();
+    strides_array[2] = frame->GetVPitch();
+    jni()->ReleaseIntArrayElements(strides, strides_array, 0);
+    jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
+    jobject y_buffer =
+        jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
+                                   frame->GetYPitch() * frame->GetHeight());
+    jobject u_buffer = jni()->NewDirectByteBuffer(
+        const_cast<uint8_t*>(frame->GetUPlane()), frame->GetChromaSize());
+    jobject v_buffer = jni()->NewDirectByteBuffer(
+        const_cast<uint8_t*>(frame->GetVPlane()), frame->GetChromaSize());
+    jni()->SetObjectArrayElement(planes, 0, y_buffer);
+    jni()->SetObjectArrayElement(planes, 1, u_buffer);
+    jni()->SetObjectArrayElement(planes, 2, v_buffer);
+    return jni()->NewObject(
+        *j_frame_class_, j_i420_frame_ctor_id_,
+        frame->GetWidth(), frame->GetHeight(),
+        static_cast<int>(frame->GetVideoRotation()),
+        strides, planes, javaShallowCopy(frame));
+  }
+
+  // Return a VideoRenderer.I420Frame referring texture object in |frame|.
+  jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
+    NativeHandleImpl* handle =
+        reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
+    jfloatArray sampling_matrix = jni()->NewFloatArray(16);
+    jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+    return jni()->NewObject(
+        *j_frame_class_, j_texture_frame_ctor_id_,
+        frame->GetWidth(), frame->GetHeight(),
+        static_cast<int>(frame->GetVideoRotation()),
+        handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
+  }
+
+  JNIEnv* jni() {
+    return AttachCurrentThreadIfNeeded();
+  }
+
+  ScopedGlobalRef<jobject> j_callbacks_;
+  jmethodID j_render_frame_id_;
+  ScopedGlobalRef<jclass> j_frame_class_;
+  jmethodID j_i420_frame_ctor_id_;
+  jmethodID j_texture_frame_ctor_id_;
+  ScopedGlobalRef<jclass> j_byte_buffer_class_;
+};
+
+
+static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
+  jfieldID native_dc_id = GetFieldID(jni,
+      GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
+  jlong j_d = GetLongField(jni, j_dc, native_dc_id);
+  return reinterpret_cast<DataChannelInterface*>(j_d);
+}
+
+JOW(jlong, DataChannel_registerObserverNative)(
+    JNIEnv* jni, jobject j_dc, jobject j_observer) {
+  scoped_ptr<DataChannelObserverWrapper> observer(
+      new DataChannelObserverWrapper(jni, j_observer));
+  ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+  return jlongFromPointer(observer.release());
+}
+
+JOW(void, DataChannel_unregisterObserverNative)(
+    JNIEnv* jni, jobject j_dc, jlong native_observer) {
+  ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+  delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
+}
+
+JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
+  return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
+  return JavaEnumFromIndex(
+      jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
+}
+
+JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
+  uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+  RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+      << "buffered_amount overflowed jlong!";
+  return static_cast<jlong>(buffered_amount);
+}
+
+JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
+  ExtractNativeDC(jni, j_dc)->Close();
+}
+
+JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
+                                      jbyteArray data, jboolean binary) {
+  jbyte* bytes = jni->GetByteArrayElements(data, NULL);
+  bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
+      rtc::Buffer(bytes, jni->GetArrayLength(data)),
+      binary));
+  jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
+  return ret;
+}
+
+JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
+  CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
+}
+
+JOW(void, Logging_nativeEnableTracing)(
+    JNIEnv* jni, jclass, jstring j_path, jint nativeLevels,
+    jint nativeSeverity) {
+  std::string path = JavaToStdString(jni, j_path);
+  if (nativeLevels != webrtc::kTraceNone) {
+    webrtc::Trace::set_level_filter(nativeLevels);
+    if (path != "logcat:") {
+      RTC_CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
+          << "SetTraceFile failed";
+    } else {
+      // Intentionally leak this to avoid needing to reason about its lifecycle.
+      // It keeps no state and functions only as a dispatch point.
+      static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
+    }
+  }
+  if (nativeSeverity >= rtc::LS_SENSITIVE && nativeSeverity <= rtc::LS_ERROR) {
+    rtc::LogMessage::LogToDebug(
+        static_cast<rtc::LoggingSeverity>(nativeSeverity));
+  }
+}
+
+JOW(void, Logging_nativeEnableLogThreads)(JNIEnv* jni, jclass) {
+  rtc::LogMessage::LogThreads(true);
+}
+
+JOW(void, Logging_nativeEnableLogTimeStamps)(JNIEnv* jni, jclass) {
+  rtc::LogMessage::LogTimestamps(true);
+}
+
+JOW(void, Logging_nativeLog)(
+    JNIEnv* jni, jclass, jint j_severity, jstring j_tag, jstring j_message) {
+  std::string message = JavaToStdString(jni, j_message);
+  std::string tag = JavaToStdString(jni, j_tag);
+  LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag) << message;
+}
+
+JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
+  CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
+  PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
+  delete p;
+}
+
+JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
+  CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
+}
+
+JOW(void, VideoCapturer_free)(JNIEnv*, jclass, jlong j_p) {
+  delete reinterpret_cast<cricket::VideoCapturer*>(j_p);
+}
+
+JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
+  delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
+}
+
+JOW(void, VideoRenderer_releaseNativeFrame)(
+    JNIEnv* jni, jclass, jlong j_frame_ptr) {
+  delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
+}
+
+JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
+  reinterpret_cast<MediaStreamTrackInterface*>(j_p)->Release();
+}
+
+JOW(jboolean, MediaStream_nativeAddAudioTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+      reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeAddVideoTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)
+      ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+      reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
+    JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+  return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+      reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
+}
+
+JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
+  CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
+    JNIEnv * jni, jclass, jobject j_observer) {
+  return (jlong)new PCOJava(jni, j_observer);
+}
+
+JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
+    JNIEnv* jni, jclass, jobject context,
+    jboolean initialize_audio, jboolean initialize_video,
+    jboolean video_hw_acceleration) {
+  bool failure = false;
+  video_hw_acceleration_enabled = video_hw_acceleration;
+  AndroidNetworkMonitor::SetAndroidContext(jni, context);
+  if (!factory_static_initialized) {
+    if (initialize_video) {
+      failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
+    }
+    if (initialize_audio)
+      failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
+    factory_static_initialized = true;
+  }
+  return !failure;
+}
+
+JOW(void, PeerConnectionFactory_initializeFieldTrials)(
+    JNIEnv* jni, jclass, jstring j_trials_init_string) {
+  field_trials_init_string = NULL;
+  if (j_trials_init_string != NULL) {
+    const char* init_string =
+        jni->GetStringUTFChars(j_trials_init_string, NULL);
+    int init_string_length = jni->GetStringUTFLength(j_trials_init_string);
+    field_trials_init_string = new char[init_string_length + 1];
+    rtc::strcpyn(field_trials_init_string, init_string_length + 1, init_string);
+    jni->ReleaseStringUTFChars(j_trials_init_string, init_string);
+    LOG(LS_INFO) << "initializeFieldTrials: " << field_trials_init_string;
+  }
+  webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
+}
+
+JOW(void, PeerConnectionFactory_initializeInternalTracer)(JNIEnv* jni, jclass) {
+  rtc::tracing::SetupInternalTracer();
+}
+
+JOW(jboolean, PeerConnectionFactory_startInternalTracingCapture)(
+    JNIEnv* jni, jclass, jstring j_event_tracing_filename) {
+  if (!j_event_tracing_filename)
+    return false;
+
+  const char* init_string =
+      jni->GetStringUTFChars(j_event_tracing_filename, NULL);
+  LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+  bool ret = rtc::tracing::StartInternalCapture(init_string);
+  jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
+  return ret;
+}
+
+JOW(void, PeerConnectionFactory_stopInternalTracingCapture)(
+    JNIEnv* jni, jclass) {
+  rtc::tracing::StopInternalCapture();
+}
+
+JOW(void, PeerConnectionFactory_shutdownInternalTracer)(JNIEnv* jni, jclass) {
+  rtc::tracing::ShutdownInternalTracer();
+}
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown).  This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+  OwnedFactoryAndThreads(Thread* worker_thread,
+                         Thread* signaling_thread,
+                         WebRtcVideoEncoderFactory* encoder_factory,
+                         WebRtcVideoDecoderFactory* decoder_factory,
+                         rtc::NetworkMonitorFactory* network_monitor_factory,
+                         PeerConnectionFactoryInterface* factory)
+      : worker_thread_(worker_thread),
+        signaling_thread_(signaling_thread),
+        encoder_factory_(encoder_factory),
+        decoder_factory_(decoder_factory),
+        network_monitor_factory_(network_monitor_factory),
+        factory_(factory) {}
+
+  ~OwnedFactoryAndThreads() {
+    CHECK_RELEASE(factory_);
+    if (network_monitor_factory_ != nullptr) {
+      rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_);
+    }
+  }
+
+  PeerConnectionFactoryInterface* factory() { return factory_; }
+  WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
+  WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
+  rtc::NetworkMonitorFactory* network_monitor_factory() {
+    return network_monitor_factory_;
+  }
+  void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; }
+  void InvokeJavaCallbacksOnFactoryThreads();
+
+ private:
+  void JavaCallbackOnFactoryThreads();
+
+  const scoped_ptr<Thread> worker_thread_;
+  const scoped_ptr<Thread> signaling_thread_;
+  WebRtcVideoEncoderFactory* encoder_factory_;
+  WebRtcVideoDecoderFactory* decoder_factory_;
+  rtc::NetworkMonitorFactory* network_monitor_factory_;
+  PeerConnectionFactoryInterface* factory_;  // Const after ctor except dtor.
+};
+
+void OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads() {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  ScopedLocalRefFrame local_ref_frame(jni);
+  jclass j_factory_class = FindClass(jni, "org/webrtc/PeerConnectionFactory");
+  jmethodID m = nullptr;
+  if (Thread::Current() == worker_thread_) {
+    LOG(LS_INFO) << "Worker thread JavaCallback";
+    m = GetStaticMethodID(jni, j_factory_class, "onWorkerThreadReady", "()V");
+  }
+  if (Thread::Current() == signaling_thread_) {
+    LOG(LS_INFO) << "Signaling thread JavaCallback";
+    m = GetStaticMethodID(
+        jni, j_factory_class, "onSignalingThreadReady", "()V");
+  }
+  if (m != nullptr) {
+    jni->CallStaticVoidMethod(j_factory_class, m);
+    CHECK_EXCEPTION(jni) << "error during JavaCallback::CallStaticVoidMethod";
+  }
+}
+
+void OwnedFactoryAndThreads::InvokeJavaCallbacksOnFactoryThreads() {
+  LOG(LS_INFO) << "InvokeJavaCallbacksOnFactoryThreads.";
+  worker_thread_->Invoke<void>(
+      Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+  signaling_thread_->Invoke<void>(
+      Bind(&OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads, this));
+}
+
+PeerConnectionFactoryInterface::Options ParseOptionsFromJava(JNIEnv* jni,
+                                                             jobject options) {
+  jclass options_class = jni->GetObjectClass(options);
+  jfieldID network_ignore_mask_field =
+      jni->GetFieldID(options_class, "networkIgnoreMask", "I");
+  int network_ignore_mask =
+      jni->GetIntField(options, network_ignore_mask_field);
+
+  jfieldID disable_encryption_field =
+      jni->GetFieldID(options_class, "disableEncryption", "Z");
+  bool disable_encryption =
+      jni->GetBooleanField(options, disable_encryption_field);
+
+  jfieldID disable_network_monitor_field =
+      jni->GetFieldID(options_class, "disableNetworkMonitor", "Z");
+  bool disable_network_monitor =
+      jni->GetBooleanField(options, disable_network_monitor_field);
+
+  PeerConnectionFactoryInterface::Options native_options;
+
+  // This doesn't necessarily match the c++ version of this struct; feel free
+  // to add more parameters as necessary.
+  native_options.network_ignore_mask = network_ignore_mask;
+  native_options.disable_encryption = disable_encryption;
+  native_options.disable_network_monitor = disable_network_monitor;
+  return native_options;
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
+    JNIEnv* jni, jclass, jobject joptions) {
+  // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+  // ThreadManager only WrapCurrentThread()s the thread where it is first
+  // created.  Since the semantics around when auto-wrapping happens in
+  // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
+  // about ramifications of auto-wrapping there.
+  rtc::ThreadManager::Instance()->WrapCurrentThread();
+  webrtc::Trace::CreateTrace();
+  Thread* worker_thread = new Thread();
+  worker_thread->SetName("worker_thread", NULL);
+  Thread* signaling_thread = new Thread();
+  signaling_thread->SetName("signaling_thread", NULL);
+  RTC_CHECK(worker_thread->Start() && signaling_thread->Start())
+      << "Failed to start threads";
+  WebRtcVideoEncoderFactory* encoder_factory = nullptr;
+  WebRtcVideoDecoderFactory* decoder_factory = nullptr;
+  rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;
+
+  PeerConnectionFactoryInterface::Options options;
+  bool has_options = joptions != NULL;
+  if (has_options) {
+    options = ParseOptionsFromJava(jni, joptions);
+  }
+
+  if (video_hw_acceleration_enabled) {
+    encoder_factory = new MediaCodecVideoEncoderFactory();
+    decoder_factory = new MediaCodecVideoDecoderFactory();
+  }
+  // Do not create network_monitor_factory only if the options are
+  // provided and disable_network_monitor therein is set to true.
+  if (!(has_options && options.disable_network_monitor)) {
+    network_monitor_factory = new AndroidNetworkMonitorFactory();
+    rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
+  }
+
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      webrtc::CreatePeerConnectionFactory(worker_thread,
+                                          signaling_thread,
+                                          NULL,
+                                          encoder_factory,
+                                          decoder_factory));
+  RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+                     << "WebRTC/libjingle init likely failed on this device";
+  // TODO(honghaiz): Maybe put the options as the argument of
+  // CreatePeerConnectionFactory.
+  if (has_options) {
+    factory->SetOptions(options);
+  }
+  OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+      worker_thread, signaling_thread,
+      encoder_factory, decoder_factory,
+      network_monitor_factory, factory.release());
+  owned_factory->InvokeJavaCallbacksOnFactoryThreads();
+  return jlongFromPointer(owned_factory);
+}
+
+JOW(void, PeerConnectionFactory_nativeFreeFactory)(JNIEnv*, jclass, jlong j_p) {
+  delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+  if (field_trials_init_string) {
+    webrtc::field_trial::InitFieldTrialsFromString(NULL);
+    delete field_trials_init_string;
+    field_trials_init_string = NULL;
+  }
+  webrtc::Trace::ReturnTrace();
+}
+
+static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
+  return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+JOW(void, PeerConnectionFactory_nativeThreadsCallbacks)(
+    JNIEnv*, jclass, jlong j_p) {
+  OwnedFactoryAndThreads *factory =
+      reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+  factory->InvokeJavaCallbacksOnFactoryThreads();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
+    JNIEnv* jni, jclass, jlong native_factory, jstring label) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<MediaStreamInterface> stream(
+      factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
+  return (jlong)stream.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
+    JNIEnv* jni, jclass, jlong native_factory, jlong native_capturer,
+    jobject j_constraints) {
+  scoped_ptr<ConstraintsWrapper> constraints(
+      new ConstraintsWrapper(jni, j_constraints));
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<VideoSourceInterface> source(
+      factory->CreateVideoSource(
+          reinterpret_cast<cricket::VideoCapturer*>(native_capturer),
+          constraints.get()));
+  return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
+    JNIEnv* jni, jclass, jlong native_factory, jstring id,
+    jlong native_source) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<VideoTrackInterface> track(
+      factory->CreateVideoTrack(
+          JavaToStdString(jni, id),
+          reinterpret_cast<VideoSourceInterface*>(native_source)));
+  return (jlong)track.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
+    JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
+  scoped_ptr<ConstraintsWrapper> constraints(
+      new ConstraintsWrapper(jni, j_constraints));
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<AudioSourceInterface> source(
+      factory->CreateAudioSource(constraints.get()));
+  return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
+    JNIEnv* jni, jclass, jlong native_factory, jstring id,
+    jlong native_source) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
+      JavaToStdString(jni, id),
+      reinterpret_cast<AudioSourceInterface*>(native_source)));
+  return (jlong)track.release();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartAecDump)(
+    JNIEnv* jni, jclass, jlong native_factory, jint file,
+    jint filesize_limit_bytes) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  return factory->StartAecDump(file, filesize_limit_bytes);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopAecDump)(
+    JNIEnv* jni, jclass, jlong native_factory) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  factory->StopAecDump();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartRtcEventLog)(
+    JNIEnv* jni, jclass, jlong native_factory, jint file) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  return factory->StartRtcEventLog(file);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopRtcEventLog)(
+    JNIEnv* jni, jclass, jlong native_factory) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  factory->StopRtcEventLog();
+}
+
+JOW(void, PeerConnectionFactory_nativeSetOptions)(
+    JNIEnv* jni, jclass, jlong native_factory, jobject options) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      factoryFromJava(native_factory));
+  PeerConnectionFactoryInterface::Options options_to_set =
+      ParseOptionsFromJava(jni, options);
+  factory->SetOptions(options_to_set);
+
+  if (options_to_set.disable_network_monitor) {
+    OwnedFactoryAndThreads* owner =
+        reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+    if (owner->network_monitor_factory()) {
+      rtc::NetworkMonitorFactory::ReleaseFactory(
+          owner->network_monitor_factory());
+      owner->clear_network_monitor_factory();
+    }
+  }
+}
+
+JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
+    JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+    jobject remote_egl_context) {
+  OwnedFactoryAndThreads* owned_factory =
+      reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+  jclass j_eglbase14_context_class =
+      FindClass(jni, "org/webrtc/EglBase14$Context");
+
+  MediaCodecVideoEncoderFactory* encoder_factory =
+      static_cast<MediaCodecVideoEncoderFactory*>
+          (owned_factory->encoder_factory());
+  if (encoder_factory &&
+      jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
+    LOG(LS_INFO) << "Set EGL context for HW encoding.";
+    encoder_factory->SetEGLContext(jni, local_egl_context);
+  }
+
+  MediaCodecVideoDecoderFactory* decoder_factory =
+      static_cast<MediaCodecVideoDecoderFactory*>
+          (owned_factory->decoder_factory());
+  if (decoder_factory &&
+      jni->IsInstanceOf(remote_egl_context, j_eglbase14_context_class)) {
+    LOG(LS_INFO) << "Set EGL context for HW decoding.";
+    decoder_factory->SetEGLContext(jni, remote_egl_context);
+  }
+}
+
+static PeerConnectionInterface::IceTransportsType
+JavaIceTransportsTypeToNativeType(JNIEnv* jni, jobject j_ice_transports_type) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$IceTransportsType",
+      j_ice_transports_type);
+
+  if (enum_name == "ALL")
+    return PeerConnectionInterface::kAll;
+
+  if (enum_name == "RELAY")
+    return PeerConnectionInterface::kRelay;
+
+  if (enum_name == "NOHOST")
+    return PeerConnectionInterface::kNoHost;
+
+  if (enum_name == "NONE")
+    return PeerConnectionInterface::kNone;
+
+  RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+  return PeerConnectionInterface::kAll;
+}
+
+static PeerConnectionInterface::BundlePolicy
+JavaBundlePolicyToNativeType(JNIEnv* jni, jobject j_bundle_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$BundlePolicy",
+      j_bundle_policy);
+
+  if (enum_name == "BALANCED")
+    return PeerConnectionInterface::kBundlePolicyBalanced;
+
+  if (enum_name == "MAXBUNDLE")
+    return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+  if (enum_name == "MAXCOMPAT")
+    return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+  RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+  return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+static PeerConnectionInterface::RtcpMuxPolicy
+JavaRtcpMuxPolicyToNativeType(JNIEnv* jni, jobject j_rtcp_mux_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$RtcpMuxPolicy",
+      j_rtcp_mux_policy);
+
+  if (enum_name == "NEGOTIATE")
+    return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+  if (enum_name == "REQUIRE")
+    return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+  RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+  return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+static PeerConnectionInterface::TcpCandidatePolicy
+JavaTcpCandidatePolicyToNativeType(
+    JNIEnv* jni, jobject j_tcp_candidate_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$TcpCandidatePolicy",
+      j_tcp_candidate_policy);
+
+  if (enum_name == "ENABLED")
+    return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+  if (enum_name == "DISABLED")
+    return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+  RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+  return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+static rtc::KeyType JavaKeyTypeToNativeType(JNIEnv* jni, jobject j_key_type) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$KeyType", j_key_type);
+
+  if (enum_name == "RSA")
+    return rtc::KT_RSA;
+  if (enum_name == "ECDSA")
+    return rtc::KT_ECDSA;
+
+  RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+  return rtc::KT_ECDSA;
+}
+
+static PeerConnectionInterface::ContinualGatheringPolicy
+    JavaContinualGatheringPolicyToNativeType(
+        JNIEnv* jni, jobject j_gathering_policy) {
+  std::string enum_name = GetJavaEnumName(
+      jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy",
+      j_gathering_policy);
+  if (enum_name == "GATHER_ONCE")
+    return PeerConnectionInterface::GATHER_ONCE;
+
+  if (enum_name == "GATHER_CONTINUALLY")
+    return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+  RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+                   << enum_name;
+  return PeerConnectionInterface::GATHER_ONCE;
+}
+
+static void JavaIceServersToJsepIceServers(
+    JNIEnv* jni, jobject j_ice_servers,
+    PeerConnectionInterface::IceServers* ice_servers) {
+  jclass list_class = GetObjectClass(jni, j_ice_servers);
+  jmethodID iterator_id = GetMethodID(
+      jni, list_class, "iterator", "()Ljava/util/Iterator;");
+  jobject iterator = jni->CallObjectMethod(j_ice_servers, iterator_id);
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+  jmethodID iterator_has_next = GetMethodID(
+      jni, GetObjectClass(jni, iterator), "hasNext", "()Z");
+  jmethodID iterator_next = GetMethodID(
+      jni, GetObjectClass(jni, iterator), "next", "()Ljava/lang/Object;");
+  while (jni->CallBooleanMethod(iterator, iterator_has_next)) {
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+    jobject j_ice_server = jni->CallObjectMethod(iterator, iterator_next);
+    CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+    jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
+    jfieldID j_ice_server_uri_id =
+        GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
+    jfieldID j_ice_server_username_id =
+        GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
+    jfieldID j_ice_server_password_id =
+        GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
+    jstring uri = reinterpret_cast<jstring>(
+        GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
+    jstring username = reinterpret_cast<jstring>(
+        GetObjectField(jni, j_ice_server, j_ice_server_username_id));
+    jstring password = reinterpret_cast<jstring>(
+        GetObjectField(jni, j_ice_server, j_ice_server_password_id));
+    PeerConnectionInterface::IceServer server;
+    server.uri = JavaToStdString(jni, uri);
+    server.username = JavaToStdString(jni, username);
+    server.password = JavaToStdString(jni, password);
+    ice_servers->push_back(server);
+  }
+  CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+}
+
+static void JavaRTCConfigurationToJsepRTCConfiguration(
+    JNIEnv* jni,
+    jobject j_rtc_config,
+    PeerConnectionInterface::RTCConfiguration* rtc_config) {
+  jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+
+  jfieldID j_ice_transports_type_id = GetFieldID(
+      jni, j_rtc_config_class, "iceTransportsType",
+      "Lorg/webrtc/PeerConnection$IceTransportsType;");
+  jobject j_ice_transports_type = GetObjectField(
+      jni, j_rtc_config, j_ice_transports_type_id);
+
+  jfieldID j_bundle_policy_id = GetFieldID(
+      jni, j_rtc_config_class, "bundlePolicy",
+      "Lorg/webrtc/PeerConnection$BundlePolicy;");
+  jobject j_bundle_policy = GetObjectField(
+      jni, j_rtc_config, j_bundle_policy_id);
+
+  jfieldID j_rtcp_mux_policy_id = GetFieldID(
+      jni, j_rtc_config_class, "rtcpMuxPolicy",
+      "Lorg/webrtc/PeerConnection$RtcpMuxPolicy;");
+  jobject j_rtcp_mux_policy = GetObjectField(
+      jni, j_rtc_config, j_rtcp_mux_policy_id);
+
+  jfieldID j_tcp_candidate_policy_id = GetFieldID(
+      jni, j_rtc_config_class, "tcpCandidatePolicy",
+      "Lorg/webrtc/PeerConnection$TcpCandidatePolicy;");
+  jobject j_tcp_candidate_policy = GetObjectField(
+      jni, j_rtc_config, j_tcp_candidate_policy_id);
+
+  jfieldID j_ice_servers_id = GetFieldID(
+      jni, j_rtc_config_class, "iceServers", "Ljava/util/List;");
+  jobject j_ice_servers = GetObjectField(jni, j_rtc_config, j_ice_servers_id);
+
+  jfieldID j_audio_jitter_buffer_max_packets_id =
+      GetFieldID(jni, j_rtc_config_class, "audioJitterBufferMaxPackets", "I");
+  jfieldID j_audio_jitter_buffer_fast_accelerate_id = GetFieldID(
+      jni, j_rtc_config_class, "audioJitterBufferFastAccelerate", "Z");
+
+  jfieldID j_ice_connection_receiving_timeout_id =
+      GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+
+  jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
+      jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
+
+  jfieldID j_continual_gathering_policy_id =
+      GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
+                 "Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
+  jobject j_continual_gathering_policy =
+      GetObjectField(jni, j_rtc_config, j_continual_gathering_policy_id);
+
+  rtc_config->type =
+      JavaIceTransportsTypeToNativeType(jni, j_ice_transports_type);
+  rtc_config->bundle_policy =
+      JavaBundlePolicyToNativeType(jni, j_bundle_policy);
+  rtc_config->rtcp_mux_policy =
+      JavaRtcpMuxPolicyToNativeType(jni, j_rtcp_mux_policy);
+  rtc_config->tcp_candidate_policy =
+      JavaTcpCandidatePolicyToNativeType(jni, j_tcp_candidate_policy);
+  JavaIceServersToJsepIceServers(jni, j_ice_servers, &rtc_config->servers);
+  rtc_config->audio_jitter_buffer_max_packets =
+      GetIntField(jni, j_rtc_config, j_audio_jitter_buffer_max_packets_id);
+  rtc_config->audio_jitter_buffer_fast_accelerate = GetBooleanField(
+      jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
+  rtc_config->ice_connection_receiving_timeout =
+      GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+  rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
+      jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
+  rtc_config->continual_gathering_policy =
+      JavaContinualGatheringPolicyToNativeType(
+          jni, j_continual_gathering_policy);
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
+    JNIEnv *jni, jclass, jlong factory, jobject j_rtc_config,
+    jobject j_constraints, jlong observer_p) {
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
+      reinterpret_cast<PeerConnectionFactoryInterface*>(
+          factoryFromJava(factory)));
+
+  PeerConnectionInterface::RTCConfiguration rtc_config;
+  JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+  jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+  jfieldID j_key_type_id = GetFieldID(jni, j_rtc_config_class, "keyType",
+                                      "Lorg/webrtc/PeerConnection$KeyType;");
+  jobject j_key_type = GetObjectField(jni, j_rtc_config, j_key_type_id);
+
+  // Create ECDSA certificate.
+  if (JavaKeyTypeToNativeType(jni, j_key_type) == rtc::KT_ECDSA) {
+    scoped_ptr<rtc::SSLIdentity> ssl_identity(
+        rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
+    if (ssl_identity.get()) {
+      rtc_config.certificates.push_back(
+          rtc::RTCCertificate::Create(std::move(ssl_identity)));
+      LOG(LS_INFO) << "ECDSA certificate created.";
+    } else {
+      // Failing to create certificate should not abort peer connection
+      // creation. Instead default encryption (currently RSA) will be used.
+      LOG(LS_WARNING) <<
+          "Failed to generate SSLIdentity. Default encryption will be used.";
+    }
+  }
+
+  PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
+  observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
+  rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
+      rtc_config, observer->constraints(), NULL, NULL, observer));
+  return (jlong)pc.release();
+}
+
+static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
+    JNIEnv* jni, jobject j_pc) {
+  jfieldID native_pc_id = GetFieldID(jni,
+      GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
+  jlong j_p = GetLongField(jni, j_pc, native_pc_id);
+  return rtc::scoped_refptr<PeerConnectionInterface>(
+      reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
+  const SessionDescriptionInterface* sdp =
+      ExtractNativePC(jni, j_pc)->local_description();
+  return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
+  const SessionDescriptionInterface* sdp =
+      ExtractNativePC(jni, j_pc)->remote_description();
+  return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_createDataChannel)(
+    JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
+  DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
+  rtc::scoped_refptr<DataChannelInterface> channel(
+      ExtractNativePC(jni, j_pc)->CreateDataChannel(
+          JavaToStdString(jni, j_label), &init));
+  // Mustn't pass channel.get() directly through NewObject to avoid reading its
+  // vararg parameter as 64-bit and reading memory that doesn't belong to the
+  // 32-bit parameter.
+  jlong nativeChannelPtr = jlongFromPointer(channel.get());
+  RTC_CHECK(nativeChannelPtr) << "Failed to create DataChannel";
+  jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
+  jmethodID j_data_channel_ctor = GetMethodID(
+      jni, j_data_channel_class, "<init>", "(J)V");
+  jobject j_channel = jni->NewObject(
+      j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  // Channel is now owned by Java object, and will be freed from there.
+  int bumped_count = channel->AddRef();
+  RTC_CHECK(bumped_count == 2) << "Unexpected refcount";
+  return j_channel;
+}
+
+JOW(void, PeerConnection_createOffer)(
+    JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+  ConstraintsWrapper* constraints =
+      new ConstraintsWrapper(jni, j_constraints);
+  rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+          jni, j_observer, constraints));
+  ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
+}
+
+JOW(void, PeerConnection_createAnswer)(
+    JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+  ConstraintsWrapper* constraints =
+      new ConstraintsWrapper(jni, j_constraints);
+  rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+          jni, j_observer, constraints));
+  ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
+}
+
+// Helper to create a SessionDescriptionInterface from a SessionDescription.
+static SessionDescriptionInterface* JavaSdpToNativeSdp(
+    JNIEnv* jni, jobject j_sdp) {
+  jfieldID j_type_id = GetFieldID(
+      jni, GetObjectClass(jni, j_sdp), "type",
+      "Lorg/webrtc/SessionDescription$Type;");
+  jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
+  jmethodID j_canonical_form_id = GetMethodID(
+      jni, GetObjectClass(jni, j_type), "canonicalForm",
+      "()Ljava/lang/String;");
+  jstring j_type_string = (jstring)jni->CallObjectMethod(
+      j_type, j_canonical_form_id);
+  CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+  std::string std_type = JavaToStdString(jni, j_type_string);
+
+  jfieldID j_description_id = GetFieldID(
+      jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
+  jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
+  std::string std_description = JavaToStdString(jni, j_description);
+
+  return webrtc::CreateSessionDescription(
+      std_type, std_description, NULL);
+}
+
+JOW(void, PeerConnection_setLocalDescription)(
+    JNIEnv* jni, jobject j_pc,
+    jobject j_observer, jobject j_sdp) {
+  rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<SetSdpObserverWrapper>(
+          jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+  ExtractNativePC(jni, j_pc)->SetLocalDescription(
+      observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(void, PeerConnection_setRemoteDescription)(
+    JNIEnv* jni, jobject j_pc,
+    jobject j_observer, jobject j_sdp) {
+  rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+      new rtc::RefCountedObject<SetSdpObserverWrapper>(
+          jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+  ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+      observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(jboolean, PeerConnection_setConfiguration)(
+    JNIEnv* jni, jobject j_pc, jobject j_rtc_config) {
+  PeerConnectionInterface::RTCConfiguration rtc_config;
+  JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+  return ExtractNativePC(jni, j_pc)->SetConfiguration(rtc_config);
+}
+
+JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
+    JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
+    jint j_sdp_mline_index, jstring j_candidate_sdp) {
+  std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
+  std::string sdp = JavaToStdString(jni, j_candidate_sdp);
+  scoped_ptr<IceCandidateInterface> candidate(
+      webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
+  return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+JOW(jboolean, PeerConnection_nativeAddLocalStream)(
+    JNIEnv* jni, jobject j_pc, jlong native_stream) {
+  return ExtractNativePC(jni, j_pc)->AddStream(
+      reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(void, PeerConnection_nativeRemoveLocalStream)(
+    JNIEnv* jni, jobject j_pc, jlong native_stream) {
+  ExtractNativePC(jni, j_pc)->RemoveStream(
+      reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(jobject, PeerConnection_nativeCreateSender)(
+    JNIEnv* jni, jobject j_pc, jstring j_kind, jstring j_stream_id) {
+  jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+  jmethodID j_rtp_sender_ctor =
+      GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+  std::string kind = JavaToStdString(jni, j_kind);
+  std::string stream_id = JavaToStdString(jni, j_stream_id);
+  rtc::scoped_refptr<RtpSenderInterface> sender =
+      ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+  if (!sender.get()) {
+    return nullptr;
+  }
+  jlong nativeSenderPtr = jlongFromPointer(sender.get());
+  jobject j_sender =
+      jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+  // Sender is now owned by the Java object, and will be freed from
+  // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+  sender->AddRef();
+  return j_sender;
+}
+
+JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
+  jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+  jmethodID j_array_list_ctor =
+      GetMethodID(jni, j_array_list_class, "<init>", "()V");
+  jmethodID j_array_list_add =
+      GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+  jobject j_senders = jni->NewObject(j_array_list_class, j_array_list_ctor);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+
+  jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+  jmethodID j_rtp_sender_ctor =
+      GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+  auto senders = ExtractNativePC(jni, j_pc)->GetSenders();
+  for (const auto& sender : senders) {
+    jlong nativeSenderPtr = jlongFromPointer(sender.get());
+    jobject j_sender =
+        jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+    CHECK_EXCEPTION(jni) << "error during NewObject";
+    // Sender is now owned by the Java object, and will be freed from
+    // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+    sender->AddRef();
+    jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+  }
+  return j_senders;
+}
+
+JOW(jobject, PeerConnection_nativeGetReceivers)(JNIEnv* jni, jobject j_pc) {
+  jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+  jmethodID j_array_list_ctor =
+      GetMethodID(jni, j_array_list_class, "<init>", "()V");
+  jmethodID j_array_list_add =
+      GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+  jobject j_receivers = jni->NewObject(j_array_list_class, j_array_list_ctor);
+  CHECK_EXCEPTION(jni) << "error during NewObject";
+
+  jclass j_rtp_receiver_class = FindClass(jni, "org/webrtc/RtpReceiver");
+  jmethodID j_rtp_receiver_ctor =
+      GetMethodID(jni, j_rtp_receiver_class, "<init>", "(J)V");
+
+  auto receivers = ExtractNativePC(jni, j_pc)->GetReceivers();
+  for (const auto& receiver : receivers) {
+    jlong nativeReceiverPtr = jlongFromPointer(receiver.get());
+    jobject j_receiver = jni->NewObject(j_rtp_receiver_class,
+                                        j_rtp_receiver_ctor, nativeReceiverPtr);
+    CHECK_EXCEPTION(jni) << "error during NewObject";
+    // Receiver is now owned by Java object, and will be freed from there.
+    receiver->AddRef();
+    jni->CallBooleanMethod(j_receivers, j_array_list_add, j_receiver);
+    CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+  }
+  return j_receivers;
+}
+
+JOW(bool, PeerConnection_nativeGetStats)(
+    JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
+  rtc::scoped_refptr<StatsObserverWrapper> observer(
+      new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
+  return ExtractNativePC(jni, j_pc)->GetStats(
+      observer,
+      reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+      PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
+  PeerConnectionInterface::SignalingState state =
+      ExtractNativePC(jni, j_pc)->signaling_state();
+  return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
+}
+
+JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
+  PeerConnectionInterface::IceConnectionState state =
+      ExtractNativePC(jni, j_pc)->ice_connection_state();
+  return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
+}
+
+JOW(jobject, PeerConnection_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
+  PeerConnectionInterface::IceGatheringState state =
+      ExtractNativePC(jni, j_pc)->ice_gathering_state();
+  return JavaEnumFromIndex(jni, "PeerConnection$IceGatheringState", state);
+}
+
+JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
+  ExtractNativePC(jni, j_pc)->Close();
+  return;
+}
+
+JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+  rtc::scoped_refptr<MediaSourceInterface> p(
+      reinterpret_cast<MediaSourceInterface*>(j_p));
+  return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
+}
+
+JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
+    JNIEnv* jni, jclass, jobject j_callbacks) {
+  scoped_ptr<JavaVideoRendererWrapper> renderer(
+      new JavaVideoRendererWrapper(jni, j_callbacks));
+  return (jlong)renderer.release();
+}
+
+JOW(void, VideoRenderer_nativeCopyPlane)(
+    JNIEnv *jni, jclass, jobject j_src_buffer, jint width, jint height,
+    jint src_stride, jobject j_dst_buffer, jint dst_stride) {
+  size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer);
+  size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
+  RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
+  RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
+  RTC_CHECK(src_size >= src_stride * height)
+      << "Insufficient source buffer capacity " << src_size;
+  RTC_CHECK(dst_size >= dst_stride * height)
+      << "Isufficient destination buffer capacity " << dst_size;
+  uint8_t *src =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer));
+  uint8_t *dst =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
+  if (src_stride == dst_stride) {
+    memcpy(dst, src, src_stride * height);
+  } else {
+    for (int i = 0; i < height; i++) {
+      memcpy(dst, src, width);
+      src += src_stride;
+      dst += dst_stride;
+    }
+  }
+}
+
+JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
+  reinterpret_cast<VideoSourceInterface*>(j_p)->Stop();
+}
+
+JOW(void, VideoSource_restart)(
+    JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
+  reinterpret_cast<VideoSourceInterface*>(j_p_source)->Restart();
+}
+
+JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
+  return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+  return JavaEnumFromIndex(
+      jni,
+      "MediaStreamTrack$State",
+      reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetState)(
+    JNIEnv* jni, jclass, jlong j_p, jint j_new_state) {
+  MediaStreamTrackInterface::TrackState new_state =
+      (MediaStreamTrackInterface::TrackState)j_new_state;
+  return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+      ->set_state(new_state);
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
+    JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
+  return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+      ->set_enabled(enabled);
+}
+
+JOW(void, VideoTrack_nativeAddRenderer)(
+    JNIEnv* jni, jclass,
+    jlong j_video_track_pointer, jlong j_renderer_pointer) {
+  reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->AddRenderer(
+      reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(void, VideoTrack_nativeRemoveRenderer)(
+    JNIEnv* jni, jclass,
+    jlong j_video_track_pointer, jlong j_renderer_pointer) {
+  reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)->RemoveRenderer(
+      reinterpret_cast<VideoRendererInterface*>(j_renderer_pointer));
+}
+
+JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)(
+    JNIEnv* jni, jclass,
+    jstring j_dirPath, jint j_maxFileSize, jint j_severity) {
+  std::string dir_path = JavaToStdString(jni, j_dirPath);
+  rtc::CallSessionFileRotatingLogSink* sink =
+      new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+  if (!sink->Init()) {
+    LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+        "Failed to init CallSessionFileRotatingLogSink for path " << dir_path;
+    delete sink;
+    return 0;
+  }
+  rtc::LogMessage::AddLogToStream(
+      sink, static_cast<rtc::LoggingSeverity>(j_severity));
+  return (jlong) sink;
+}
+
+JOW(void, CallSessionFileRotatingLogSink_nativeDeleteSink)(
+    JNIEnv* jni, jclass, jlong j_sink) {
+  rtc::CallSessionFileRotatingLogSink* sink =
+      reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+  rtc::LogMessage::RemoveLogToStream(sink);
+  delete sink;
+}
+
+JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
+    JNIEnv* jni, jclass, jstring j_dirPath) {
+  std::string dir_path = JavaToStdString(jni, j_dirPath);
+  rtc::scoped_ptr<rtc::CallSessionFileRotatingStream> stream(
+      new rtc::CallSessionFileRotatingStream(dir_path));
+  if (!stream->Open()) {
+    LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+        "Failed to open CallSessionFileRotatingStream for path " << dir_path;
+    return jni->NewByteArray(0);
+  }
+  size_t log_size = 0;
+  if (!stream->GetSize(&log_size) || log_size == 0) {
+    LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+        "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+    return jni->NewByteArray(0);
+  }
+
+  size_t read = 0;
+  rtc::scoped_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+  stream->ReadAll(buffer.get(), log_size, &read, nullptr);
+
+  jbyteArray result = jni->NewByteArray(read);
+  jni->SetByteArrayRegion(result, 0, read, buffer.get());
+
+  return result;
+}
+
+JOW(jboolean, RtpSender_nativeSetTrack)(JNIEnv* jni,
+                                    jclass,
+                                    jlong j_rtp_sender_pointer,
+                                    jlong j_track_pointer) {
+  return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+      ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+JOW(jlong, RtpSender_nativeGetTrack)(JNIEnv* jni,
+                                  jclass,
+                                  jlong j_rtp_sender_pointer,
+                                  jlong j_track_pointer) {
+  return jlongFromPointer(
+      reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+          ->track()
+          .release());
+}
+
+JOW(jstring, RtpSender_nativeId)(
+    JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+  return JavaStringFromStdString(
+      jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+JOW(void, RtpSender_free)(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+  reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->Release();
+}
+
+JOW(jlong, RtpReceiver_nativeGetTrack)(JNIEnv* jni,
+                                    jclass,
+                                    jlong j_rtp_receiver_pointer,
+                                    jlong j_track_pointer) {
+  return jlongFromPointer(
+      reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+          ->track()
+          .release());
+}
+
+JOW(jstring, RtpReceiver_nativeId)(
+    JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+  return JavaStringFromStdString(
+      jni,
+      reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+JOW(void, RtpReceiver_free)(JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+  reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->Release();
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/surfacetexturehelper_jni.cc b/webrtc/api/java/jni/surfacetexturehelper_jni.cc
new file mode 100644
index 0000000..335081d
--- /dev/null
+++ b/webrtc/api/java/jni/surfacetexturehelper_jni.cc
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+
+#include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc_jni {
+
+SurfaceTextureHelper::SurfaceTextureHelper(
+    JNIEnv* jni, jobject surface_texture_helper)
+  : j_surface_texture_helper_(jni, surface_texture_helper),
+    j_return_texture_method_(
+        GetMethodID(jni,
+                    FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+                    "returnTextureFrame",
+                    "()V")) {
+  CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
+}
+
+SurfaceTextureHelper::~SurfaceTextureHelper() {
+}
+
+void SurfaceTextureHelper::ReturnTextureFrame() const {
+  JNIEnv* jni = AttachCurrentThreadIfNeeded();
+  jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
+
+  CHECK_EXCEPTION(
+      jni) << "error during SurfaceTextureHelper.returnTextureFrame";
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+SurfaceTextureHelper::CreateTextureFrame(int width, int height,
+    const NativeHandleImpl& native_handle) {
+  return new rtc::RefCountedObject<AndroidTextureBuffer>(
+      width, height, native_handle, *j_surface_texture_helper_,
+      rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
+}
+
+}  // namespace webrtc_jni
diff --git a/webrtc/api/java/jni/surfacetexturehelper_jni.h b/webrtc/api/java/jni/surfacetexturehelper_jni.h
new file mode 100644
index 0000000..8953b02
--- /dev/null
+++ b/webrtc/api/java/jni/surfacetexturehelper_jni.h
@@ -0,0 +1,79 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/api/java/jni/native_handle_impl.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+
+namespace webrtc_jni {
+
+// Helper class to create and synchronize access to an Android SurfaceTexture.
+// It is used for creating webrtc::VideoFrameBuffers from a SurfaceTexture when
+// the SurfaceTexture has been updated.
+// When the VideoFrameBuffer is released, this class returns the buffer to the
+// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
+// can be released on an arbitrary thread.
+// SurfaceTextureHelper is reference counted to make sure that it is not
+// destroyed while a VideoFrameBuffer is in use.
+// This class is the C++ counterpart of the java class SurfaceTextureHelper.
+// Usage:
+// 1. Create an java instance of SurfaceTextureHelper.
+// 2. Create an instance of this class.
+// 3. Register a listener to the Java SurfaceListener and start producing
+// new buffers.
+// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+class SurfaceTextureHelper : public rtc::RefCountInterface {
+ public:
+  SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
+
+  rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
+      int width,
+      int height,
+      const NativeHandleImpl& native_handle);
+
+ protected:
+  ~SurfaceTextureHelper();
+
+ private:
+  //  May be called on arbitrary thread.
+  void ReturnTextureFrame() const;
+
+  const ScopedGlobalRef<jobject> j_surface_texture_helper_;
+  const jmethodID j_return_texture_method_;
+};
+
+}  // namespace webrtc_jni
+
+#endif  // WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
diff --git a/webrtc/api/java/src/org/webrtc/AudioSource.java b/webrtc/api/java/src/org/webrtc/AudioSource.java
new file mode 100644
index 0000000..06177a6
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/AudioSource.java
@@ -0,0 +1,38 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface.  Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+  public AudioSource(long nativeSource) {
+    super(nativeSource);
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/AudioTrack.java b/webrtc/api/java/src/org/webrtc/AudioTrack.java
new file mode 100644
index 0000000..3200080
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/AudioTrack.java
@@ -0,0 +1,35 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+  public AudioTrack(long nativeTrack) {
+    super(nativeTrack);
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/CallSessionFileRotatingLogSink.java b/webrtc/api/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 0000000..f7032a7
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,57 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+  static {
+    System.loadLibrary("jingle_peerconnection_so");
+  }
+
+  private long nativeSink;
+
+  public static byte[] getLogData(String dirPath) {
+    return nativeGetLogData(dirPath);
+  }
+
+  public CallSessionFileRotatingLogSink(
+      String dirPath, int maxFileSize, Logging.Severity severity) {
+    nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+  }
+
+  public void dispose() {
+    if (nativeSink != 0) {
+      nativeDeleteSink(nativeSink);
+      nativeSink = 0;
+    }
+  }
+
+  private static native long nativeAddSink(
+      String dirPath, int maxFileSize, int severity);
+  private static native void nativeDeleteSink(long nativeSink);
+  private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/webrtc/api/java/src/org/webrtc/DataChannel.java b/webrtc/api/java/src/org/webrtc/DataChannel.java
new file mode 100644
index 0000000..1866098
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/DataChannel.java
@@ -0,0 +1,143 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+  /** Java wrapper for WebIDL RTCDataChannel. */
+  public static class Init {
+    public boolean ordered = true;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int maxRetransmitTimeMs = -1;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int maxRetransmits = -1;
+    public String protocol = "";
+    public boolean negotiated = false;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int id = -1;
+
+    public Init() {}
+
+    // Called only by native code.
+    private Init(
+        boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+        String protocol, boolean negotiated, int id) {
+      this.ordered = ordered;
+      this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+      this.maxRetransmits = maxRetransmits;
+      this.protocol = protocol;
+      this.negotiated = negotiated;
+      this.id = id;
+    }
+  }
+
+  /** Java version of C++ DataBuffer.  The atom of data in a DataChannel. */
+  public static class Buffer {
+    /** The underlying data. */
+    public final ByteBuffer data;
+
+    /**
+     * Indicates whether |data| contains UTF-8 text or "binary data"
+     * (i.e. anything else).
+     */
+    public final boolean binary;
+
+    public Buffer(ByteBuffer data, boolean binary) {
+      this.data = data;
+      this.binary = binary;
+    }
+  }
+
+  /** Java version of C++ DataChannelObserver. */
+  public interface Observer {
+    /** The data channel's bufferedAmount has changed. */
+    public void onBufferedAmountChange(long previousAmount);
+    /** The data channel state has changed. */
+    public void onStateChange();
+    /**
+     * A data buffer was successfully received.  NOTE: |buffer.data| will be
+     * freed once this function returns so callers who want to use the data
+     * asynchronously must make sure to copy it first.
+     */
+    public void onMessage(Buffer buffer);
+  }
+
+  /** Keep in sync with DataChannelInterface::DataState. */
+  public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
+
+  private final long nativeDataChannel;
+  private long nativeObserver;
+
+  public DataChannel(long nativeDataChannel) {
+    this.nativeDataChannel = nativeDataChannel;
+  }
+
+  /** Register |observer|, replacing any previously-registered observer. */
+  public void registerObserver(Observer observer) {
+    if (nativeObserver != 0) {
+      unregisterObserverNative(nativeObserver);
+    }
+    nativeObserver = registerObserverNative(observer);
+  }
+  private native long registerObserverNative(Observer observer);
+
+  /** Unregister the (only) observer. */
+  public void unregisterObserver() {
+    unregisterObserverNative(nativeObserver);
+  }
+  private native void unregisterObserverNative(long nativeObserver);
+
+  public native String label();
+
+  public native State state();
+
+  /**
+   * Return the number of bytes of application data (UTF-8 text and binary data)
+   * that have been queued using SendBuffer but have not yet been transmitted
+   * to the network.
+   */
+  public native long bufferedAmount();
+
+  /** Close the channel. */
+  public native void close();
+
+  /** Send |data| to the remote peer; return success. */
+  public boolean send(Buffer buffer) {
+    // TODO(fischman): this could be cleverer about avoiding copies if the
+    // ByteBuffer is direct and/or is backed by an array.
+    byte[] data = new byte[buffer.data.remaining()];
+    buffer.data.get(data);
+    return sendNative(data, buffer.binary);
+  }
+  private native boolean sendNative(byte[] data, boolean binary);
+
+  /** Dispose of native resources attached to this channel. */
+  public native void dispose();
+};
diff --git a/webrtc/api/java/src/org/webrtc/IceCandidate.java b/webrtc/api/java/src/org/webrtc/IceCandidate.java
new file mode 100644
index 0000000..eb42ce4
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/IceCandidate.java
@@ -0,0 +1,48 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/**
+ * Representation of a single ICE Candidate, mirroring
+ * {@code IceCandidateInterface} in the C++ API.
+ */
+public class IceCandidate {
+  public final String sdpMid;
+  public final int sdpMLineIndex;
+  public final String sdp;
+
+  public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
+    this.sdpMid = sdpMid;
+    this.sdpMLineIndex = sdpMLineIndex;
+    this.sdp = sdp;
+  }
+
+  public String toString() {
+    return sdpMid + ":" + sdpMLineIndex + ":" + sdp;
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaCodecVideoDecoder.java b/webrtc/api/java/src/org/webrtc/MediaCodecVideoDecoder.java
new file mode 100644
index 0000000..1288d41
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -0,0 +1,701 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.os.SystemClock;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
+// This class is an implementation detail of the Java PeerConnection API.
+@SuppressWarnings("deprecation")
+public class MediaCodecVideoDecoder {
+  // This class is constructed, operated, and destroyed by its C++ incarnation,
+  // so the class and its methods have non-public visibility.  The API this
+  // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
+  // possibly to minimize the amount of translation work necessary.
+
+  private static final String TAG = "MediaCodecVideoDecoder";
+
+  // Tracks webrtc::VideoCodecType.
+  public enum VideoCodecType {
+    VIDEO_CODEC_VP8,
+    VIDEO_CODEC_VP9,
+    VIDEO_CODEC_H264
+  }
+
+  // Timeout for input buffer dequeue.
+  private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
+  // Timeout for codec releasing.
+  private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+  // Max number of output buffers queued before starting to drop decoded frames.
+  private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+  // Active running decoder instance. Set in initDecode() (called from native code)
+  // and reset to null in release() call.
+  private static MediaCodecVideoDecoder runningInstance = null;
+  private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+  private static int codecErrors = 0;
+  // List of disabled codec types - can be set from application.
+  private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
+
+  private Thread mediaCodecThread;
+  private MediaCodec mediaCodec;
+  private ByteBuffer[] inputBuffers;
+  private ByteBuffer[] outputBuffers;
+  private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+  private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
+  private static final String H264_MIME_TYPE = "video/avc";
+  // List of supported HW VP8 decoders.
+  private static final String[] supportedVp8HwCodecPrefixes =
+    {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+  // List of supported HW VP9 decoders.
+  private static final String[] supportedVp9HwCodecPrefixes =
+    {"OMX.qcom.", "OMX.Exynos." };
+  // List of supported HW H.264 decoders.
+  private static final String[] supportedH264HwCodecPrefixes =
+    {"OMX.qcom.", "OMX.Intel." };
+  // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+  // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+  private static final int
+    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+  // Allowable color formats supported by codec - in order of preference.
+  private static final List<Integer> supportedColorList = Arrays.asList(
+    CodecCapabilities.COLOR_FormatYUV420Planar,
+    CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+    CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
+  private int colorFormat;
+  private int width;
+  private int height;
+  private int stride;
+  private int sliceHeight;
+  private boolean hasDecodedFirstFrame;
+  private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
+  private boolean useSurface;
+
+  // The below variables are only used when decoding to a Surface.
+  private TextureListener textureListener;
+  private int droppedFrames;
+  private Surface surface = null;
+  private final Queue<DecodedOutputBuffer>
+      dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+
+  // MediaCodec error handler - invoked when critical error happens which may prevent
+  // further use of media codec API. Now it means that one of media codec instances
+  // is hanging and can no longer be used in the next call.
+  public static interface MediaCodecVideoDecoderErrorCallback {
+    void onMediaCodecVideoDecoderCriticalError(int codecErrors);
+  }
+
+  public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
+    Logging.d(TAG, "Set error callback");
+    MediaCodecVideoDecoder.errorCallback = errorCallback;
+  }
+
+  // Functions to disable HW decoding - can be called from applications for platforms
+  // which have known HW decoding problems.
+  public static void disableVp8HwCodec() {
+    Logging.w(TAG, "VP8 decoding is disabled by application.");
+    hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
+  }
+
+  public static void disableVp9HwCodec() {
+    Logging.w(TAG, "VP9 decoding is disabled by application.");
+    hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
+  }
+
+  public static void disableH264HwCodec() {
+    Logging.w(TAG, "H.264 decoding is disabled by application.");
+    hwDecoderDisabledTypes.add(H264_MIME_TYPE);
+  }
+
+  // Functions to query if HW decoding is supported.
+  public static boolean isVp8HwSupported() {
+    return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+        (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
+  }
+
+  public static boolean isVp9HwSupported() {
+    return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+        (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
+  }
+
+  public static boolean isH264HwSupported() {
+    return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
+        (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
+  }
+
+  public static void printStackTrace() {
+    if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+      StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+      if (mediaCodecStackTraces.length > 0) {
+        Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
+        for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+          Logging.d(TAG, stackTrace.toString());
+        }
+      }
+    }
+  }
+
+  // Helper struct for findDecoder() below.
+  private static class DecoderProperties {
+    public DecoderProperties(String codecName, int colorFormat) {
+      this.codecName = codecName;
+      this.colorFormat = colorFormat;
+    }
+    public final String codecName; // OpenMax component name for VP8 codec.
+    public final int colorFormat;  // Color format supported by codec.
+  }
+
+  private static DecoderProperties findDecoder(
+      String mime, String[] supportedCodecPrefixes) {
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+      return null; // MediaCodec.setParameters is missing.
+    }
+    Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
+    for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+      MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+      if (info.isEncoder()) {
+        continue;
+      }
+      String name = null;
+      for (String mimeType : info.getSupportedTypes()) {
+        if (mimeType.equals(mime)) {
+          name = info.getName();
+          break;
+        }
+      }
+      if (name == null) {
+        continue;  // No HW support in this codec; try the next one.
+      }
+      Logging.d(TAG, "Found candidate decoder " + name);
+
+      // Check if this is supported decoder.
+      boolean supportedCodec = false;
+      for (String codecPrefix : supportedCodecPrefixes) {
+        if (name.startsWith(codecPrefix)) {
+          supportedCodec = true;
+          break;
+        }
+      }
+      if (!supportedCodec) {
+        continue;
+      }
+
+      // Check if codec supports either yuv420 or nv12.
+      CodecCapabilities capabilities =
+          info.getCapabilitiesForType(mime);
+      for (int colorFormat : capabilities.colorFormats) {
+        Logging.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
+      }
+      for (int supportedColorFormat : supportedColorList) {
+        for (int codecColorFormat : capabilities.colorFormats) {
+          if (codecColorFormat == supportedColorFormat) {
+            // Found supported HW decoder.
+            Logging.d(TAG, "Found target decoder " + name +
+                ". Color: 0x" + Integer.toHexString(codecColorFormat));
+            return new DecoderProperties(name, codecColorFormat);
+          }
+        }
+      }
+    }
+    Logging.d(TAG, "No HW decoder found for mime " + mime);
+    return null;  // No HW decoder.
+  }
+
+  private void checkOnMediaCodecThread() throws IllegalStateException {
+    if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+      throw new IllegalStateException(
+          "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
+          " but is now called on " + Thread.currentThread());
+    }
+  }
+
+  // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+  private boolean initDecode(
+      VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
+    if (mediaCodecThread != null) {
+      throw new RuntimeException("Forgot to release()?");
+    }
+    useSurface = (surfaceTextureHelper != null);
+    String mime = null;
+    String[] supportedCodecPrefixes = null;
+    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+      mime = VP8_MIME_TYPE;
+      supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+    } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+      mime = VP9_MIME_TYPE;
+      supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
+    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+      mime = H264_MIME_TYPE;
+      supportedCodecPrefixes = supportedH264HwCodecPrefixes;
+    } else {
+      throw new RuntimeException("Non supported codec " + type);
+    }
+    DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
+    if (properties == null) {
+      throw new RuntimeException("Cannot find HW decoder for " + type);
+    }
+    Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
+        ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
+        ". Use Surface: " + useSurface);
+    runningInstance = this; // Decoder is now running and can be queried for stack traces.
+    mediaCodecThread = Thread.currentThread();
+    try {
+      this.width = width;
+      this.height = height;
+      stride = width;
+      sliceHeight = height;
+
+      if (useSurface) {
+        textureListener = new TextureListener(surfaceTextureHelper);
+        surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+      }
+
+      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+      if (!useSurface) {
+        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+      }
+      Logging.d(TAG, "  Format: " + format);
+      mediaCodec =
+          MediaCodecVideoEncoder.createByCodecName(properties.codecName);
+      if (mediaCodec == null) {
+        Logging.e(TAG, "Can not create media decoder");
+        return false;
+      }
+      mediaCodec.configure(format, surface, null, 0);
+      mediaCodec.start();
+      colorFormat = properties.colorFormat;
+      outputBuffers = mediaCodec.getOutputBuffers();
+      inputBuffers = mediaCodec.getInputBuffers();
+      decodeStartTimeMs.clear();
+      hasDecodedFirstFrame = false;
+      dequeuedSurfaceOutputBuffers.clear();
+      droppedFrames = 0;
+      Logging.d(TAG, "Input buffers: " + inputBuffers.length +
+          ". Output buffers: " + outputBuffers.length);
+      return true;
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "initDecode failed", e);
+      return false;
+    }
+  }
+
+  private void release() {
+    Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
+    checkOnMediaCodecThread();
+
+    // Run Mediacodec stop() and release() on separate thread since sometime
+    // Mediacodec.stop() may hang.
+    final CountDownLatch releaseDone = new CountDownLatch(1);
+
+    Runnable runMediaCodecRelease = new Runnable() {
+      @Override
+      public void run() {
+        try {
+          Logging.d(TAG, "Java releaseDecoder on release thread");
+          mediaCodec.stop();
+          mediaCodec.release();
+          Logging.d(TAG, "Java releaseDecoder on release thread done");
+        } catch (Exception e) {
+          Logging.e(TAG, "Media decoder release failed", e);
+        }
+        releaseDone.countDown();
+      }
+    };
+    new Thread(runMediaCodecRelease).start();
+
+    if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+      Logging.e(TAG, "Media decoder release timeout");
+      codecErrors++;
+      if (errorCallback != null) {
+        Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+        errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
+      }
+    }
+
+    mediaCodec = null;
+    mediaCodecThread = null;
+    runningInstance = null;
+    if (useSurface) {
+      surface.release();
+      surface = null;
+      textureListener.release();
+    }
+    Logging.d(TAG, "Java releaseDecoder done");
+  }
+
+  // Dequeue an input buffer and return its index, -1 if no input buffer is
+  // available, or -2 if the codec is no longer operative.
+  private int dequeueInputBuffer() {
+    checkOnMediaCodecThread();
+    try {
+      return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "dequeueIntputBuffer failed", e);
+      return -2;
+    }
+  }
+
+  private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
+      long timeStampMs, long ntpTimeStamp) {
+    checkOnMediaCodecThread();
+    try {
+      inputBuffers[inputBufferIndex].position(0);
+      inputBuffers[inputBufferIndex].limit(size);
+      decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
+          ntpTimeStamp));
+      mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
+      return true;
+    }
+    catch (IllegalStateException e) {
+      Logging.e(TAG, "decode failed", e);
+      return false;
+    }
+  }
+
+  private static class TimeStamps {
+    public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
+      this.decodeStartTimeMs = decodeStartTimeMs;
+      this.timeStampMs = timeStampMs;
+      this.ntpTimeStampMs = ntpTimeStampMs;
+    }
+    // Time when this frame was queued for decoding.
+    private final long decodeStartTimeMs;
+    // Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
+    private final long timeStampMs;
+    // Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
+    private final long ntpTimeStampMs;
+  }
+
+  // Helper struct for dequeueOutputBuffer() below.
+  private static class DecodedOutputBuffer {
+    public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
+        long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
+      this.index = index;
+      this.offset = offset;
+      this.size = size;
+      this.presentationTimeStampMs = presentationTimeStampMs;
+      this.timeStampMs = timeStampMs;
+      this.ntpTimeStampMs = ntpTimeStampMs;
+      this.decodeTimeMs = decodeTime;
+      this.endDecodeTimeMs = endDecodeTime;
+    }
+
+    private final int index;
+    private final int offset;
+    private final int size;
+    // Presentation timestamp returned in dequeueOutputBuffer call.
+    private final long presentationTimeStampMs;
+    // C++ inputImage._timeStamp value for output frame.
+    private final long timeStampMs;
+    // C++ inputImage.ntp_time_ms_ value for output frame.
+    private final long ntpTimeStampMs;
+    // Number of ms it took to decode this frame.
+    private final long decodeTimeMs;
+    // System time when this frame decoding finished.
+    private final long endDecodeTimeMs;
+  }
+
+  // Helper struct for dequeueTextureBuffer() below.
+  private static class DecodedTextureBuffer {
+    private final int textureID;
+    private final float[] transformMatrix;
+    // Presentation timestamp returned in dequeueOutputBuffer call.
+    private final long presentationTimeStampMs;
+    // C++ inputImage._timeStamp value for output frame.
+    private final long timeStampMs;
+    // C++ inputImage.ntp_time_ms_ value for output frame.
+    private final long ntpTimeStampMs;
+    // Number of ms it took to decode this frame.
+    private final long decodeTimeMs;
+    // Interval from when the frame finished decoding until this buffer has been created.
+    // Since there is only one texture, this interval depend on the time from when
+    // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+    // so that the texture can be updated with the next decoded frame.
+    private final long frameDelayMs;
+
+    // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+    // that was dropped.
+    public DecodedTextureBuffer(int textureID, float[] transformMatrix,
+        long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
+        long frameDelay) {
+      this.textureID = textureID;
+      this.transformMatrix = transformMatrix;
+      this.presentationTimeStampMs = presentationTimeStampMs;
+      this.timeStampMs = timeStampMs;
+      this.ntpTimeStampMs = ntpTimeStampMs;
+      this.decodeTimeMs = decodeTimeMs;
+      this.frameDelayMs = frameDelay;
+    }
+  }
+
+  // Poll based texture listener.
+  private static class TextureListener
+      implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+    private final SurfaceTextureHelper surfaceTextureHelper;
+    // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+    private final Object newFrameLock = new Object();
+    // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
+    // onTextureFrameAvailable().
+    private DecodedOutputBuffer bufferToRender;
+    private DecodedTextureBuffer renderedBuffer;
+
+    public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+      this.surfaceTextureHelper = surfaceTextureHelper;
+      surfaceTextureHelper.setListener(this);
+    }
+
+    public void addBufferToRender(DecodedOutputBuffer buffer) {
+      if (bufferToRender != null) {
+        Logging.e(TAG,
+            "Unexpected addBufferToRender() called while waiting for a texture.");
+        throw new IllegalStateException("Waiting for a texture.");
+      }
+      bufferToRender = buffer;
+    }
+
+    public boolean isWaitingForTexture() {
+      synchronized (newFrameLock) {
+        return bufferToRender != null;
+      }
+    }
+
+    // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+    @Override
+    public void onTextureFrameAvailable(
+        int oesTextureId, float[] transformMatrix, long timestampNs) {
+      synchronized (newFrameLock) {
+        if (renderedBuffer != null) {
+          Logging.e(TAG,
+              "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+          throw new IllegalStateException("Already holding a texture.");
+        }
+        // |timestampNs| is always zero on some Android versions.
+        renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
+            bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
+            bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
+            SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
+        bufferToRender = null;
+        newFrameLock.notifyAll();
+      }
+    }
+
+    // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+    public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
+      synchronized (newFrameLock) {
+        if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
+          try {
+            newFrameLock.wait(timeoutMs);
+          } catch(InterruptedException e) {
+            // Restore the interrupted status by reinterrupting the thread.
+            Thread.currentThread().interrupt();
+          }
+        }
+        DecodedTextureBuffer returnedBuffer = renderedBuffer;
+        renderedBuffer = null;
+        return returnedBuffer;
+      }
+    }
+
+    public void release() {
+      // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
+      // progress is done. Therefore, the call to disconnect() must be outside any synchronized
+      // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+      surfaceTextureHelper.disconnect();
+      synchronized (newFrameLock) {
+        if (renderedBuffer != null) {
+          surfaceTextureHelper.returnTextureFrame();
+          renderedBuffer = null;
+        }
+      }
+    }
+  }
+
+  // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
+  // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+  // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+  // upon codec error.
+  private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
+    checkOnMediaCodecThread();
+    if (decodeStartTimeMs.isEmpty()) {
+      return null;
+    }
+    // Drain the decoder until receiving a decoded buffer or hitting
+    // MediaCodec.INFO_TRY_AGAIN_LATER.
+    final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+    while (true) {
+      final int result = mediaCodec.dequeueOutputBuffer(
+          info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
+      switch (result) {
+        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+          outputBuffers = mediaCodec.getOutputBuffers();
+          Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+          if (hasDecodedFirstFrame) {
+            throw new RuntimeException("Unexpected output buffer change event.");
+          }
+          break;
+        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+          MediaFormat format = mediaCodec.getOutputFormat();
+          Logging.d(TAG, "Decoder format changed: " + format.toString());
+          int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+          int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+          if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+            throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+                height + ". New " + new_width + "*" + new_height);
+          }
+          width = format.getInteger(MediaFormat.KEY_WIDTH);
+          height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+          if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+            colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+            Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+            if (!supportedColorList.contains(colorFormat)) {
+              throw new IllegalStateException("Non supported color format: " + colorFormat);
+            }
+          }
+          if (format.containsKey("stride")) {
+            stride = format.getInteger("stride");
+          }
+          if (format.containsKey("slice-height")) {
+            sliceHeight = format.getInteger("slice-height");
+          }
+          Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+          stride = Math.max(width, stride);
+          sliceHeight = Math.max(height, sliceHeight);
+          break;
+        case MediaCodec.INFO_TRY_AGAIN_LATER:
+          return null;
+        default:
+          hasDecodedFirstFrame = true;
+          TimeStamps timeStamps = decodeStartTimeMs.remove();
+          return new DecodedOutputBuffer(result,
+              info.offset,
+              info.size,
+              TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
+              timeStamps.timeStampMs,
+              timeStamps.ntpTimeStampMs,
+              SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs,
+              SystemClock.elapsedRealtime());
+        }
+    }
+  }
+
+  // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+  // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+  // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+  // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
+  // a frame can't be returned.
+  private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+    checkOnMediaCodecThread();
+    if (!useSurface) {
+      throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+    }
+    DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+    if (outputBuffer != null) {
+      dequeuedSurfaceOutputBuffers.add(outputBuffer);
+    }
+
+    MaybeRenderDecodedTextureBuffer();
+    // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
+    DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
+    if (renderedBuffer != null) {
+      MaybeRenderDecodedTextureBuffer();
+      return renderedBuffer;
+    }
+
+    if ((dequeuedSurfaceOutputBuffers.size()
+         >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+         || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+      ++droppedFrames;
+      // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
+      // The oldest frame is owned by |textureListener| and can't be dropped since
+      // mediaCodec.releaseOutputBuffer has already been called.
+      final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
+      if (dequeueTimeoutMs > 0) {
+        // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
+        // return the one and only texture even if it does not render.
+        Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+            + droppedFrame.presentationTimeStampMs +
+            ". Total number of dropped frames: " + droppedFrames);
+      } else {
+        Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
+            ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
+            ". Total number of dropped frames: " + droppedFrames);
+      }
+
+      mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
+      return new DecodedTextureBuffer(0, null,
+          droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
+          droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+          SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
+    }
+    return null;
+  }
+
+  private void MaybeRenderDecodedTextureBuffer() {
+    if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
+      return;
+    }
+    // Get the first frame in the queue and render to the decoder output surface.
+    final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
+    textureListener.addBufferToRender(buffer);
+    mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
+  }
+
+  // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
+  // non-surface decoding.
+  // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
+  // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
+  // MediaCodec.CodecException upon codec error.
+  private void returnDecodedOutputBuffer(int index)
+      throws IllegalStateException, MediaCodec.CodecException {
+    checkOnMediaCodecThread();
+    if (useSurface) {
+      throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
+    }
+    mediaCodec.releaseOutputBuffer(index, false /* render */);
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaCodecVideoEncoder.java b/webrtc/api/java/src/org/webrtc/MediaCodecVideoEncoder.java
new file mode 100644
index 0000000..8b8ee71
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -0,0 +1,602 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
+// This class is an implementation detail of the Java PeerConnection API.
+@TargetApi(19)
+@SuppressWarnings("deprecation")
+public class MediaCodecVideoEncoder {
+  // This class is constructed, operated, and destroyed by its C++ incarnation,
+  // so the class and its methods have non-public visibility.  The API this
+  // class exposes aims to mimic the webrtc::VideoEncoder API as closely as
+  // possibly to minimize the amount of translation work necessary.
+
+  private static final String TAG = "MediaCodecVideoEncoder";
+
+  // Tracks webrtc::VideoCodecType.
+  public enum VideoCodecType {
+    VIDEO_CODEC_VP8,
+    VIDEO_CODEC_VP9,
+    VIDEO_CODEC_H264
+  }
+
+  private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
+  private static final int DEQUEUE_TIMEOUT = 0;  // Non-blocking, no wait.
+  // Active running encoder instance. Set in initEncode() (called from native code)
+  // and reset to null in release() call.
+  private static MediaCodecVideoEncoder runningInstance = null;
+  private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
+  private static int codecErrors = 0;
+  // List of disabled codec types - can be set from application.
+  private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
+
+  private Thread mediaCodecThread;
+  private MediaCodec mediaCodec;
+  private ByteBuffer[] outputBuffers;
+  private EglBase14 eglBase;
+  private int width;
+  private int height;
+  private Surface inputSurface;
+  private GlRectDrawer drawer;
+  private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+  private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
+  private static final String H264_MIME_TYPE = "video/avc";
+  // List of supported HW VP8 codecs.
+  private static final String[] supportedVp8HwCodecPrefixes =
+    {"OMX.qcom.", "OMX.Intel." };
+  // List of supported HW VP9 decoders.
+  private static final String[] supportedVp9HwCodecPrefixes =
+    {"OMX.qcom."};
+  // List of supported HW H.264 codecs.
+  private static final String[] supportedH264HwCodecPrefixes =
+    {"OMX.qcom." };
+  // List of devices with poor H.264 encoder quality.
+  private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
+    // HW H.264 encoder on below devices has poor bitrate control - actual
+    // bitrates deviates a lot from the target value.
+    "SAMSUNG-SGH-I337",
+    "Nexus 7",
+    "Nexus 4"
+  };
+
+  // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+  // in OMX_Video.h
+  private static final int VIDEO_ControlRateConstant = 2;
+  // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+  // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+  private static final int
+    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+  // Allowable color formats supported by codec - in order of preference.
+  private static final int[] supportedColorList = {
+    CodecCapabilities.COLOR_FormatYUV420Planar,
+    CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+    CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
+  };
+  private static final int[] supportedSurfaceColorList = {
+    CodecCapabilities.COLOR_FormatSurface
+  };
+  private VideoCodecType type;
+  private int colorFormat;  // Used by native code.
+
+  // SPS and PPS NALs (Config frame) for H.264.
+  private ByteBuffer configData = null;
+
+  // MediaCodec error handler - invoked when critical error happens which may prevent
+  // further use of media codec API. Now it means that one of media codec instances
+  // is hanging and can no longer be used in the next call.
+  public static interface MediaCodecVideoEncoderErrorCallback {
+    void onMediaCodecVideoEncoderCriticalError(int codecErrors);
+  }
+
+  public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
+    Logging.d(TAG, "Set error callback");
+    MediaCodecVideoEncoder.errorCallback = errorCallback;
+  }
+
+  // Functions to disable HW encoding - can be called from applications for platforms
+  // which have known HW decoding problems.
+  public static void disableVp8HwCodec() {
+    Logging.w(TAG, "VP8 encoding is disabled by application.");
+    hwEncoderDisabledTypes.add(VP8_MIME_TYPE);
+  }
+
+  public static void disableVp9HwCodec() {
+    Logging.w(TAG, "VP9 encoding is disabled by application.");
+    hwEncoderDisabledTypes.add(VP9_MIME_TYPE);
+  }
+
+  public static void disableH264HwCodec() {
+    Logging.w(TAG, "H.264 encoding is disabled by application.");
+    hwEncoderDisabledTypes.add(H264_MIME_TYPE);
+  }
+
+  // Functions to query if HW encoding is supported.
+  public static boolean isVp8HwSupported() {
+    return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+        (findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null);
+  }
+
+  public static boolean isVp9HwSupported() {
+    return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+        (findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null);
+  }
+
+  public static boolean isH264HwSupported() {
+    return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
+        (findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null);
+  }
+
+  public static boolean isVp8HwSupportedUsingTextures() {
+    return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) && (findHwEncoder(
+        VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null);
+  }
+
+  public static boolean isVp9HwSupportedUsingTextures() {
+    return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) && (findHwEncoder(
+        VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null);
+  }
+
+  public static boolean isH264HwSupportedUsingTextures() {
+    return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) && (findHwEncoder(
+        H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null);
+  }
+
+  // Helper struct for findHwEncoder() below.
+  private static class EncoderProperties {
+    public EncoderProperties(String codecName, int colorFormat) {
+      this.codecName = codecName;
+      this.colorFormat = colorFormat;
+    }
+    public final String codecName; // OpenMax component name for HW codec.
+    public final int colorFormat;  // Color format supported by codec.
+  }
+
+  private static EncoderProperties findHwEncoder(
+      String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
+    // MediaCodec.setParameters is missing for JB and below, so bitrate
+    // can not be adjusted dynamically.
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+      return null;
+    }
+
+    // Check if device is in H.264 exception list.
+    if (mime.equals(H264_MIME_TYPE)) {
+      List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
+      if (exceptionModels.contains(Build.MODEL)) {
+        Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
+        return null;
+      }
+    }
+
+    for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+      MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+      if (!info.isEncoder()) {
+        continue;
+      }
+      String name = null;
+      for (String mimeType : info.getSupportedTypes()) {
+        if (mimeType.equals(mime)) {
+          name = info.getName();
+          break;
+        }
+      }
+      if (name == null) {
+        continue;  // No HW support in this codec; try the next one.
+      }
+      Logging.v(TAG, "Found candidate encoder " + name);
+
+      // Check if this is supported HW encoder.
+      boolean supportedCodec = false;
+      for (String hwCodecPrefix : supportedHwCodecPrefixes) {
+        if (name.startsWith(hwCodecPrefix)) {
+          supportedCodec = true;
+          break;
+        }
+      }
+      if (!supportedCodec) {
+        continue;
+      }
+
+      CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
+      for (int colorFormat : capabilities.colorFormats) {
+        Logging.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
+      }
+
+      for (int supportedColorFormat : colorList) {
+        for (int codecColorFormat : capabilities.colorFormats) {
+          if (codecColorFormat == supportedColorFormat) {
+            // Found supported HW encoder.
+            Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name +
+                ". Color: 0x" + Integer.toHexString(codecColorFormat));
+            return new EncoderProperties(name, codecColorFormat);
+          }
+        }
+      }
+    }
+    return null;  // No HW encoder.
+  }
+
+  private void checkOnMediaCodecThread() {
+    if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+      throw new RuntimeException(
+          "MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
+          " but is now called on " + Thread.currentThread());
+    }
+  }
+
+  public static void printStackTrace() {
+    if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+      StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+      if (mediaCodecStackTraces.length > 0) {
+        Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
+        for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+          Logging.d(TAG, stackTrace.toString());
+        }
+      }
+    }
+  }
+
+  static MediaCodec createByCodecName(String codecName) {
+    try {
+      // In the L-SDK this call can throw IOException so in order to work in
+      // both cases catch an exception.
+      return MediaCodec.createByCodecName(codecName);
+    } catch (Exception e) {
+      return null;
+    }
+  }
+
+  boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
+      EglBase14.Context sharedContext) {
+    final boolean useSurface = sharedContext != null;
+    Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
+        ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
+
+    this.width = width;
+    this.height = height;
+    if (mediaCodecThread != null) {
+      throw new RuntimeException("Forgot to release()?");
+    }
+    EncoderProperties properties = null;
+    String mime = null;
+    int keyFrameIntervalSec = 0;
+    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+      mime = VP8_MIME_TYPE;
+      properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
+          useSurface ? supportedSurfaceColorList : supportedColorList);
+      keyFrameIntervalSec = 100;
+    } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+      mime = VP9_MIME_TYPE;
+      properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
+          useSurface ? supportedSurfaceColorList : supportedColorList);
+      keyFrameIntervalSec = 100;
+    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+      mime = H264_MIME_TYPE;
+      properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
+          useSurface ? supportedSurfaceColorList : supportedColorList);
+      keyFrameIntervalSec = 20;
+    }
+    if (properties == null) {
+      throw new RuntimeException("Can not find HW encoder for " + type);
+    }
+    runningInstance = this; // Encoder is now running and can be queried for stack traces.
+    colorFormat = properties.colorFormat;
+    Logging.d(TAG, "Color format: " + colorFormat);
+
+    mediaCodecThread = Thread.currentThread();
+    try {
+      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+      format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
+      format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
+      format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+      format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
+      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+      Logging.d(TAG, "  Format: " + format);
+      mediaCodec = createByCodecName(properties.codecName);
+      this.type = type;
+      if (mediaCodec == null) {
+        Logging.e(TAG, "Can not create media encoder");
+        return false;
+      }
+      mediaCodec.configure(
+          format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+      if (useSurface) {
+        eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
+        // Create an input surface and keep a reference since we must release the surface when done.
+        inputSurface = mediaCodec.createInputSurface();
+        eglBase.createSurface(inputSurface);
+        drawer = new GlRectDrawer();
+      }
+      mediaCodec.start();
+      outputBuffers = mediaCodec.getOutputBuffers();
+      Logging.d(TAG, "Output buffers: " + outputBuffers.length);
+
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "initEncode failed", e);
+      return false;
+    }
+    return true;
+  }
+
+  ByteBuffer[]  getInputBuffers() {
+    ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+    Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+    return inputBuffers;
+  }
+
+  boolean encodeBuffer(
+      boolean isKeyframe, int inputBuffer, int size,
+      long presentationTimestampUs) {
+    checkOnMediaCodecThread();
+    try {
+      if (isKeyframe) {
+        // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+        // indicate this in queueInputBuffer() below and guarantee _this_ frame
+        // be encoded as a key frame, but sadly that flag is ignored.  Instead,
+        // we request a key frame "soon".
+        Logging.d(TAG, "Sync frame request");
+        Bundle b = new Bundle();
+        b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+        mediaCodec.setParameters(b);
+      }
+      mediaCodec.queueInputBuffer(
+          inputBuffer, 0, size, presentationTimestampUs, 0);
+      return true;
+    }
+    catch (IllegalStateException e) {
+      Logging.e(TAG, "encodeBuffer failed", e);
+      return false;
+    }
+  }
+
+  boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
+      long presentationTimestampUs) {
+    checkOnMediaCodecThread();
+    try {
+      if (isKeyframe) {
+        Logging.d(TAG, "Sync frame request");
+        Bundle b = new Bundle();
+        b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+        mediaCodec.setParameters(b);
+      }
+      eglBase.makeCurrent();
+      // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+      // but it's a workaround for bug webrtc:5147.
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
+      eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+      return true;
+    }
+    catch (RuntimeException e) {
+      Logging.e(TAG, "encodeTexture failed", e);
+      return false;
+    }
+  }
+
+  void release() {
+    Logging.d(TAG, "Java releaseEncoder");
+    checkOnMediaCodecThread();
+
+    // Run Mediacodec stop() and release() on separate thread since sometime
+    // Mediacodec.stop() may hang.
+    final CountDownLatch releaseDone = new CountDownLatch(1);
+
+    Runnable runMediaCodecRelease = new Runnable() {
+      @Override
+      public void run() {
+        try {
+          Logging.d(TAG, "Java releaseEncoder on release thread");
+          mediaCodec.stop();
+          mediaCodec.release();
+          Logging.d(TAG, "Java releaseEncoder on release thread done");
+        } catch (Exception e) {
+          Logging.e(TAG, "Media encoder release failed", e);
+        }
+        releaseDone.countDown();
+      }
+    };
+    new Thread(runMediaCodecRelease).start();
+
+    if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+      Logging.e(TAG, "Media encoder release timeout");
+      codecErrors++;
+      if (errorCallback != null) {
+        Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+        errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
+      }
+    }
+
+    mediaCodec = null;
+    mediaCodecThread = null;
+    if (drawer != null) {
+      drawer.release();
+      drawer = null;
+    }
+    if (eglBase != null) {
+      eglBase.release();
+      eglBase = null;
+    }
+    if (inputSurface != null) {
+      inputSurface.release();
+      inputSurface = null;
+    }
+    runningInstance = null;
+    Logging.d(TAG, "Java releaseEncoder done");
+  }
+
+  private boolean setRates(int kbps, int frameRateIgnored) {
+    // frameRate argument is ignored - HW encoder is supposed to use
+    // video frame timestamps for bit allocation.
+    checkOnMediaCodecThread();
+    Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + frameRateIgnored);
+    try {
+      Bundle params = new Bundle();
+      params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, 1000 * kbps);
+      mediaCodec.setParameters(params);
+      return true;
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "setRates failed", e);
+      return false;
+    }
+  }
+
+  // Dequeue an input buffer and return its index, -1 if no input buffer is
+  // available, or -2 if the codec is no longer operative.
+  int dequeueInputBuffer() {
+    checkOnMediaCodecThread();
+    try {
+      return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "dequeueIntputBuffer failed", e);
+      return -2;
+    }
+  }
+
+  // Helper struct for dequeueOutputBuffer() below.
+  static class OutputBufferInfo {
+    public OutputBufferInfo(
+        int index, ByteBuffer buffer,
+        boolean isKeyFrame, long presentationTimestampUs) {
+      this.index = index;
+      this.buffer = buffer;
+      this.isKeyFrame = isKeyFrame;
+      this.presentationTimestampUs = presentationTimestampUs;
+    }
+
+    public final int index;
+    public final ByteBuffer buffer;
+    public final boolean isKeyFrame;
+    public final long presentationTimestampUs;
+  }
+
+  // Dequeue and return an output buffer, or null if no output is ready.  Return
+  // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
+  OutputBufferInfo dequeueOutputBuffer() {
+    checkOnMediaCodecThread();
+    try {
+      MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+      int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+      // Check if this is config frame and save configuration data.
+      if (result >= 0) {
+        boolean isConfigFrame =
+            (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
+        if (isConfigFrame) {
+          Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
+              ". Size: " + info.size);
+          configData = ByteBuffer.allocateDirect(info.size);
+          outputBuffers[result].position(info.offset);
+          outputBuffers[result].limit(info.offset + info.size);
+          configData.put(outputBuffers[result]);
+          // Release buffer back.
+          mediaCodec.releaseOutputBuffer(result, false);
+          // Query next output.
+          result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+        }
+      }
+      if (result >= 0) {
+        // MediaCodec doesn't care about Buffer position/remaining/etc so we can
+        // mess with them to get a slice and avoid having to pass extra
+        // (BufferInfo-related) parameters back to C++.
+        ByteBuffer outputBuffer = outputBuffers[result].duplicate();
+        outputBuffer.position(info.offset);
+        outputBuffer.limit(info.offset + info.size);
+        // Check key frame flag.
+        boolean isKeyFrame =
+            (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+        if (isKeyFrame) {
+          Logging.d(TAG, "Sync frame generated");
+        }
+        if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
+          Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
+              " to output buffer with offset " + info.offset + ", size " +
+              info.size);
+          // For H.264 key frame append SPS and PPS NALs at the start
+          ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
+              configData.capacity() + info.size);
+          configData.rewind();
+          keyFrameBuffer.put(configData);
+          keyFrameBuffer.put(outputBuffer);
+          keyFrameBuffer.position(0);
+          return new OutputBufferInfo(result, keyFrameBuffer,
+              isKeyFrame, info.presentationTimeUs);
+        } else {
+          return new OutputBufferInfo(result, outputBuffer.slice(),
+              isKeyFrame, info.presentationTimeUs);
+        }
+      } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+        outputBuffers = mediaCodec.getOutputBuffers();
+        return dequeueOutputBuffer();
+      } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+        return dequeueOutputBuffer();
+      } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
+        return null;
+      }
+      throw new RuntimeException("dequeueOutputBuffer: " + result);
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "dequeueOutputBuffer failed", e);
+      return new OutputBufferInfo(-1, null, false, -1);
+    }
+  }
+
+  // Release a dequeued output buffer back to the codec for re-use.  Return
+  // false if the codec is no longer operable.
+  boolean releaseOutputBuffer(int index) {
+    checkOnMediaCodecThread();
+    try {
+      mediaCodec.releaseOutputBuffer(index, false);
+      return true;
+    } catch (IllegalStateException e) {
+      Logging.e(TAG, "releaseOutputBuffer failed", e);
+      return false;
+    }
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaConstraints.java b/webrtc/api/java/src/org/webrtc/MediaConstraints.java
new file mode 100644
index 0000000..730df35
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaConstraints.java
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+  /** Simple String key/value pair. */
+  public static class KeyValuePair {
+    private final String key;
+    private final String value;
+
+    public KeyValuePair(String key, String value) {
+      this.key = key;
+      this.value = value;
+    }
+
+    public String getKey() {
+      return key;
+    }
+
+    public String getValue() {
+      return value;
+    }
+
+    public String toString() {
+      return key + ": " + value;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (this == other) {
+        return true;
+      }
+      if (other == null || getClass() != other.getClass()) {
+        return false;
+      }
+      KeyValuePair that = (KeyValuePair)other;
+      return key.equals(that.key) && value.equals(that.value);
+    }
+
+    @Override
+    public int hashCode() {
+      return key.hashCode() + value.hashCode();
+    }
+  }
+
+  public final List<KeyValuePair> mandatory;
+  public final List<KeyValuePair> optional;
+
+  public MediaConstraints() {
+    mandatory = new LinkedList<KeyValuePair>();
+    optional = new LinkedList<KeyValuePair>();
+  }
+
+  private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+    StringBuilder builder = new StringBuilder("[");
+    for (KeyValuePair pair : list) {
+      if (builder.length() > 1) {
+        builder.append(", ");
+      }
+      builder.append(pair.toString());
+    }
+    return builder.append("]").toString();
+  }
+
+  public String toString() {
+    return "mandatory: " + stringifyKeyValuePairList(mandatory) +
+        ", optional: " + stringifyKeyValuePairList(optional);
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaSource.java b/webrtc/api/java/src/org/webrtc/MediaSource.java
new file mode 100644
index 0000000..d79b462
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaSource.java
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+  /** Tracks MediaSourceInterface.SourceState */
+  public enum State {
+    INITIALIZING, LIVE, ENDED, MUTED
+  }
+
+  final long nativeSource;  // Package-protected for PeerConnectionFactory.
+
+  public MediaSource(long nativeSource) {
+    this.nativeSource = nativeSource;
+  }
+
+  public State state() {
+    return nativeState(nativeSource);
+  }
+
+  public void dispose() {
+    free(nativeSource);
+  }
+
+  private static native State nativeState(long pointer);
+
+  private static native void free(long nativeSource);
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaStream.java b/webrtc/api/java/src/org/webrtc/MediaStream.java
new file mode 100644
index 0000000..be00f13
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaStream.java
@@ -0,0 +1,134 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java wrapper for a C++ MediaStreamInterface. */
+public class MediaStream {
+  public final LinkedList<AudioTrack> audioTracks;
+  public final LinkedList<VideoTrack> videoTracks;
+  public final LinkedList<VideoTrack> preservedVideoTracks;
+  // Package-protected for PeerConnection.
+  final long nativeStream;
+
+  public MediaStream(long nativeStream) {
+    audioTracks = new LinkedList<AudioTrack>();
+    videoTracks = new LinkedList<VideoTrack>();
+    preservedVideoTracks = new LinkedList<VideoTrack>();
+    this.nativeStream = nativeStream;
+  }
+
+  public boolean addTrack(AudioTrack track) {
+    if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
+      audioTracks.add(track);
+      return true;
+    }
+    return false;
+  }
+
+  public boolean addTrack(VideoTrack track) {
+    if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+      videoTracks.add(track);
+      return true;
+    }
+    return false;
+  }
+
+  // Tracks added in addTrack() call will be auto released once MediaStream.dispose()
+  // is called. If video track need to be preserved after MediaStream is destroyed it
+  // should be added to MediaStream using addPreservedTrack() call.
+  public boolean addPreservedTrack(VideoTrack track) {
+    if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+      preservedVideoTracks.add(track);
+      return true;
+    }
+    return false;
+  }
+
+  public boolean removeTrack(AudioTrack track) {
+    if (nativeRemoveAudioTrack(nativeStream, track.nativeTrack)) {
+      audioTracks.remove(track);
+      return true;
+    }
+    return false;
+  }
+
+  public boolean removeTrack(VideoTrack track) {
+    if (nativeRemoveVideoTrack(nativeStream, track.nativeTrack)) {
+      videoTracks.remove(track);
+      preservedVideoTracks.remove(track);
+      return true;
+    }
+    return false;
+  }
+
+  public void dispose() {
+    // Remove and release previously added audio and video tracks.
+    while (!audioTracks.isEmpty()) {
+      AudioTrack track = audioTracks.getFirst();
+      removeTrack(track);
+      track.dispose();
+    }
+    while (!videoTracks.isEmpty()) {
+      VideoTrack track = videoTracks.getFirst();
+      removeTrack(track);
+      track.dispose();
+    }
+    // Remove, but do not release preserved video tracks.
+    while (!preservedVideoTracks.isEmpty()) {
+      removeTrack(preservedVideoTracks.getFirst());
+    }
+    free(nativeStream);
+  }
+
+  public String label() {
+    return nativeLabel(nativeStream);
+  }
+
+  public String toString() {
+    return "[" + label() + ":A=" + audioTracks.size() +
+        ":V=" + videoTracks.size() + "]";
+  }
+
+  private static native boolean nativeAddAudioTrack(
+      long nativeStream, long nativeAudioTrack);
+
+  private static native boolean nativeAddVideoTrack(
+      long nativeStream, long nativeVideoTrack);
+
+  private static native boolean nativeRemoveAudioTrack(
+      long nativeStream, long nativeAudioTrack);
+
+  private static native boolean nativeRemoveVideoTrack(
+      long nativeStream, long nativeVideoTrack);
+
+  private static native String nativeLabel(long nativeStream);
+
+  private static native void free(long nativeStream);
+}
diff --git a/webrtc/api/java/src/org/webrtc/MediaStreamTrack.java b/webrtc/api/java/src/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 0000000..3965069
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,86 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+  /** Tracks MediaStreamTrackInterface.TrackState */
+  public enum State {
+    INITIALIZING, LIVE, ENDED, FAILED
+  }
+
+  final long nativeTrack;
+
+  public MediaStreamTrack(long nativeTrack) {
+    this.nativeTrack = nativeTrack;
+  }
+
+  public String id() {
+    return nativeId(nativeTrack);
+  }
+
+  public String kind() {
+    return nativeKind(nativeTrack);
+  }
+
+  public boolean enabled() {
+    return nativeEnabled(nativeTrack);
+  }
+
+  public boolean setEnabled(boolean enable) {
+    return nativeSetEnabled(nativeTrack, enable);
+  }
+
+  public State state() {
+    return nativeState(nativeTrack);
+  }
+
+  public boolean setState(State newState) {
+    return nativeSetState(nativeTrack, newState.ordinal());
+  }
+
+  public void dispose() {
+    free(nativeTrack);
+  }
+
+  private static native String nativeId(long nativeTrack);
+
+  private static native String nativeKind(long nativeTrack);
+
+  private static native boolean nativeEnabled(long nativeTrack);
+
+  private static native boolean nativeSetEnabled(
+      long nativeTrack, boolean enabled);
+
+  private static native State nativeState(long nativeTrack);
+
+  private static native boolean nativeSetState(
+      long nativeTrack, int newState);
+
+  private static native void free(long nativeTrack);
+}
diff --git a/webrtc/api/java/src/org/webrtc/PeerConnection.java b/webrtc/api/java/src/org/webrtc/PeerConnection.java
new file mode 100644
index 0000000..36cd075
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/PeerConnection.java
@@ -0,0 +1,305 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Java-land version of the PeerConnection APIs; wraps the C++ API
+ * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
+ * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
+ * http://www.w3.org/TR/mediacapture-streams/
+ */
+public class PeerConnection {
+  static {
+    System.loadLibrary("jingle_peerconnection_so");
+  }
+
+  /** Tracks PeerConnectionInterface::IceGatheringState */
+  public enum IceGatheringState { NEW, GATHERING, COMPLETE };
+
+
+  /** Tracks PeerConnectionInterface::IceConnectionState */
+  public enum IceConnectionState {
+    NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
+  };
+
+  /** Tracks PeerConnectionInterface::SignalingState */
+  public enum SignalingState {
+    STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
+    HAVE_REMOTE_PRANSWER, CLOSED
+  };
+
+  /** Java version of PeerConnectionObserver. */
+  public static interface Observer {
+    /** Triggered when the SignalingState changes. */
+    public void onSignalingChange(SignalingState newState);
+
+    /** Triggered when the IceConnectionState changes. */
+    public void onIceConnectionChange(IceConnectionState newState);
+
+    /** Triggered when the ICE connection receiving status changes. */
+    public void onIceConnectionReceivingChange(boolean receiving);
+
+    /** Triggered when the IceGatheringState changes. */
+    public void onIceGatheringChange(IceGatheringState newState);
+
+    /** Triggered when a new ICE candidate has been found. */
+    public void onIceCandidate(IceCandidate candidate);
+
+    /** Triggered when media is received on a new stream from remote peer. */
+    public void onAddStream(MediaStream stream);
+
+    /** Triggered when a remote peer close a stream. */
+    public void onRemoveStream(MediaStream stream);
+
+    /** Triggered when a remote peer opens a DataChannel. */
+    public void onDataChannel(DataChannel dataChannel);
+
+    /** Triggered when renegotiation is necessary. */
+    public void onRenegotiationNeeded();
+  }
+
+  /** Java version of PeerConnectionInterface.IceServer. */
+  public static class IceServer {
+    public final String uri;
+    public final String username;
+    public final String password;
+
+    /** Convenience constructor for STUN servers. */
+    public IceServer(String uri) {
+      this(uri, "", "");
+    }
+
+    public IceServer(String uri, String username, String password) {
+      this.uri = uri;
+      this.username = username;
+      this.password = password;
+    }
+
+    public String toString() {
+      return uri + "[" + username + ":" + password + "]";
+    }
+  }
+
+  /** Java version of PeerConnectionInterface.IceTransportsType */
+  public enum IceTransportsType {
+    NONE, RELAY, NOHOST, ALL
+  };
+
+  /** Java version of PeerConnectionInterface.BundlePolicy */
+  public enum BundlePolicy {
+    BALANCED, MAXBUNDLE, MAXCOMPAT
+  };
+
+  /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
+  public enum RtcpMuxPolicy {
+    NEGOTIATE, REQUIRE
+  };
+
+  /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
+  public enum TcpCandidatePolicy {
+    ENABLED, DISABLED
+  };
+
+  /** Java version of rtc::KeyType */
+  public enum KeyType {
+    RSA, ECDSA
+  }
+
+  /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
+  public enum ContinualGatheringPolicy {
+    GATHER_ONCE, GATHER_CONTINUALLY
+  }
+
+  /** Java version of PeerConnectionInterface.RTCConfiguration */
+  public static class RTCConfiguration {
+    public IceTransportsType iceTransportsType;
+    public List<IceServer> iceServers;
+    public BundlePolicy bundlePolicy;
+    public RtcpMuxPolicy rtcpMuxPolicy;
+    public TcpCandidatePolicy tcpCandidatePolicy;
+    public int audioJitterBufferMaxPackets;
+    public boolean audioJitterBufferFastAccelerate;
+    public int iceConnectionReceivingTimeout;
+    public int iceBackupCandidatePairPingInterval;
+    public KeyType keyType;
+    public ContinualGatheringPolicy continualGatheringPolicy;
+
+    public RTCConfiguration(List<IceServer> iceServers) {
+      iceTransportsType = IceTransportsType.ALL;
+      bundlePolicy = BundlePolicy.BALANCED;
+      rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE;
+      tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
+      this.iceServers = iceServers;
+      audioJitterBufferMaxPackets = 50;
+      audioJitterBufferFastAccelerate = false;
+      iceConnectionReceivingTimeout = -1;
+      iceBackupCandidatePairPingInterval = -1;
+      keyType = KeyType.ECDSA;
+      continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
+    }
+  };
+
+  private final List<MediaStream> localStreams;
+  private final long nativePeerConnection;
+  private final long nativeObserver;
+  private List<RtpSender> senders;
+  private List<RtpReceiver> receivers;
+
+  PeerConnection(long nativePeerConnection, long nativeObserver) {
+    this.nativePeerConnection = nativePeerConnection;
+    this.nativeObserver = nativeObserver;
+    localStreams = new LinkedList<MediaStream>();
+    senders = new LinkedList<RtpSender>();
+    receivers = new LinkedList<RtpReceiver>();
+  }
+
+  // JsepInterface.
+  public native SessionDescription getLocalDescription();
+
+  public native SessionDescription getRemoteDescription();
+
+  public native DataChannel createDataChannel(
+      String label, DataChannel.Init init);
+
+  public native void createOffer(
+      SdpObserver observer, MediaConstraints constraints);
+
+  public native void createAnswer(
+      SdpObserver observer, MediaConstraints constraints);
+
+  public native void setLocalDescription(
+      SdpObserver observer, SessionDescription sdp);
+
+  public native void setRemoteDescription(
+      SdpObserver observer, SessionDescription sdp);
+
+  public native boolean setConfiguration(RTCConfiguration config);
+
+  public boolean addIceCandidate(IceCandidate candidate) {
+    return nativeAddIceCandidate(
+        candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
+  }
+
+  public boolean addStream(MediaStream stream) {
+    boolean ret = nativeAddLocalStream(stream.nativeStream);
+    if (!ret) {
+      return false;
+    }
+    localStreams.add(stream);
+    return true;
+  }
+
+  public void removeStream(MediaStream stream) {
+    nativeRemoveLocalStream(stream.nativeStream);
+    localStreams.remove(stream);
+  }
+
+  public RtpSender createSender(String kind, String stream_id) {
+    RtpSender new_sender = nativeCreateSender(kind, stream_id);
+    if (new_sender != null) {
+      senders.add(new_sender);
+    }
+    return new_sender;
+  }
+
+  // Note that calling getSenders will dispose of the senders previously
+  // returned (and same goes for getReceivers).
+  public List<RtpSender> getSenders() {
+    for (RtpSender sender : senders) {
+      sender.dispose();
+    }
+    senders = nativeGetSenders();
+    return Collections.unmodifiableList(senders);
+  }
+
+  public List<RtpReceiver> getReceivers() {
+    for (RtpReceiver receiver : receivers) {
+      receiver.dispose();
+    }
+    receivers = nativeGetReceivers();
+    return Collections.unmodifiableList(receivers);
+  }
+
+  public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
+    return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack);
+  }
+
+  // TODO(fischman): add support for DTMF-related methods once that API
+  // stabilizes.
+  public native SignalingState signalingState();
+
+  public native IceConnectionState iceConnectionState();
+
+  public native IceGatheringState iceGatheringState();
+
+  public native void close();
+
+  public void dispose() {
+    close();
+    for (MediaStream stream : localStreams) {
+      nativeRemoveLocalStream(stream.nativeStream);
+      stream.dispose();
+    }
+    localStreams.clear();
+    for (RtpSender sender : senders) {
+      sender.dispose();
+    }
+    senders.clear();
+    for (RtpReceiver receiver : receivers) {
+      receiver.dispose();
+    }
+    receivers.clear();
+    freePeerConnection(nativePeerConnection);
+    freeObserver(nativeObserver);
+  }
+
+  private static native void freePeerConnection(long nativePeerConnection);
+
+  private static native void freeObserver(long nativeObserver);
+
+  private native boolean nativeAddIceCandidate(
+      String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
+
+  private native boolean nativeAddLocalStream(long nativeStream);
+
+  private native void nativeRemoveLocalStream(long nativeStream);
+
+  private native boolean nativeGetStats(
+      StatsObserver observer, long nativeTrack);
+
+  private native RtpSender nativeCreateSender(String kind, String stream_id);
+
+  private native List<RtpSender> nativeGetSenders();
+
+  private native List<RtpReceiver> nativeGetReceivers();
+}
diff --git a/webrtc/api/java/src/org/webrtc/PeerConnectionFactory.java b/webrtc/api/java/src/org/webrtc/PeerConnectionFactory.java
new file mode 100644
index 0000000..e6b3205
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/PeerConnectionFactory.java
@@ -0,0 +1,271 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+import java.util.List;
+
+/**
+ * Java wrapper for a C++ PeerConnectionFactoryInterface.  Main entry point to
+ * the PeerConnection API for clients.
+ */
+public class PeerConnectionFactory {
+  static {
+    System.loadLibrary("jingle_peerconnection_so");
+  }
+
+  private static final String TAG = "PeerConnectionFactory";
+  private final long nativeFactory;
+  private static Thread workerThread;
+  private static Thread signalingThread;
+
+  public static class Options {
+    // Keep in sync with webrtc/base/network.h!
+    static final int ADAPTER_TYPE_UNKNOWN = 0;
+    static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
+    static final int ADAPTER_TYPE_WIFI = 1 << 1;
+    static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
+    static final int ADAPTER_TYPE_VPN = 1 << 3;
+    static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
+
+    public int networkIgnoreMask;
+    public boolean disableEncryption;
+    public boolean disableNetworkMonitor;
+  }
+
+  // |context| is an android.content.Context object, but we keep it untyped here
+  // to allow building on non-Android platforms.
+  // Callers may specify either |initializeAudio| or |initializeVideo| as false
+  // to skip initializing the respective engine (and avoid the need for the
+  // respective permissions).
+  // |renderEGLContext| can be provided to suport HW video decoding to
+  // texture and will be used to create a shared EGL context on video
+  // decoding thread.
+  public static native boolean initializeAndroidGlobals(
+      Object context, boolean initializeAudio, boolean initializeVideo,
+      boolean videoHwAcceleration);
+
+  // Field trial initialization. Must be called before PeerConnectionFactory
+  // is created.
+  public static native void initializeFieldTrials(String fieldTrialsInitString);
+  // Internal tracing initialization. Must be called before PeerConnectionFactory is created to
+  // prevent racing with tracing code.
+  public static native void initializeInternalTracer();
+  // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+  // PeerConnectionFactory is gone to prevent races with code performing tracing.
+  public static native void shutdownInternalTracer();
+  // Start/stop internal capturing of internal tracing.
+  public static native boolean startInternalTracingCapture(String tracing_filename);
+  public static native void stopInternalTracingCapture();
+
+  @Deprecated
+  public PeerConnectionFactory() {
+    this(null);
+  }
+
+  public PeerConnectionFactory(Options options) {
+    nativeFactory = nativeCreatePeerConnectionFactory(options);
+    if (nativeFactory == 0) {
+      throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
+    }
+  }
+
+  public PeerConnection createPeerConnection(
+      PeerConnection.RTCConfiguration rtcConfig,
+      MediaConstraints constraints,
+      PeerConnection.Observer observer) {
+    long nativeObserver = nativeCreateObserver(observer);
+    if (nativeObserver == 0) {
+      return null;
+    }
+    long nativePeerConnection = nativeCreatePeerConnection(
+        nativeFactory, rtcConfig, constraints, nativeObserver);
+    if (nativePeerConnection == 0) {
+      return null;
+    }
+    return new PeerConnection(nativePeerConnection, nativeObserver);
+  }
+
+  public PeerConnection createPeerConnection(
+      List<PeerConnection.IceServer> iceServers,
+      MediaConstraints constraints,
+      PeerConnection.Observer observer) {
+    PeerConnection.RTCConfiguration rtcConfig =
+        new PeerConnection.RTCConfiguration(iceServers);
+    return createPeerConnection(rtcConfig, constraints, observer);
+  }
+
+  public MediaStream createLocalMediaStream(String label) {
+    return new MediaStream(
+        nativeCreateLocalMediaStream(nativeFactory, label));
+  }
+
+  public VideoSource createVideoSource(
+      VideoCapturer capturer, MediaConstraints constraints) {
+    return new VideoSource(nativeCreateVideoSource(
+        nativeFactory, capturer.takeNativeVideoCapturer(), constraints));
+  }
+
+  public VideoTrack createVideoTrack(String id, VideoSource source) {
+    return new VideoTrack(nativeCreateVideoTrack(
+        nativeFactory, id, source.nativeSource));
+  }
+
+  public AudioSource createAudioSource(MediaConstraints constraints) {
+    return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
+  }
+
+  public AudioTrack createAudioTrack(String id, AudioSource source) {
+    return new AudioTrack(nativeCreateAudioTrack(
+        nativeFactory, id, source.nativeSource));
+  }
+
+  // Starts recording an AEC dump. Ownership of the file is transfered to the
+  // native code. If an AEC dump is already in progress, it will be stopped and
+  // a new one will start using the provided file.
+  public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
+    return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
+  }
+
+  // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+  // this call will have no effect.
+  public void stopAecDump() {
+    nativeStopAecDump(nativeFactory);
+  }
+
+  // Starts recording an RTC event log. Ownership of the file is transfered to
+  // the native code. If an RTC event log is already being recorded, it will be
+  // stopped and a new one will start using the provided file.
+  public boolean startRtcEventLog(int file_descriptor) {
+    return nativeStartRtcEventLog(nativeFactory, file_descriptor);
+  }
+
+  // Stops recording an RTC event log. If no RTC event log is currently being
+  // recorded, this call will have no effect.
+  public void StopRtcEventLog() {
+    nativeStopRtcEventLog(nativeFactory);
+  }
+
+  @Deprecated
+  public void setOptions(Options options) {
+    nativeSetOptions(nativeFactory, options);
+  }
+
+  /** Set the EGL context used by HW Video encoding and decoding.
+   *
+   * @param localEGLContext   An instance of EglBase.Context.
+   *                          Must be the same as used by VideoCapturerAndroid and any local
+   *                          video renderer.
+   * @param remoteEGLContext  An instance of EglBase.Context.
+   *                          Must be the same as used by any remote video renderer.
+   */
+  public void setVideoHwAccelerationOptions(Object localEGLContext, Object remoteEGLContext) {
+    nativeSetVideoHwAccelerationOptions(nativeFactory, localEGLContext, remoteEGLContext);
+  }
+
+  public void dispose() {
+    nativeFreeFactory(nativeFactory);
+    signalingThread = null;
+    workerThread = null;
+  }
+
+  public void threadsCallbacks() {
+    nativeThreadsCallbacks(nativeFactory);
+  }
+
+  private static void printStackTrace(Thread thread, String threadName) {
+    if (thread != null) {
+      StackTraceElement[] stackTraces = thread.getStackTrace();
+      if (stackTraces.length > 0) {
+        Logging.d(TAG, threadName + " stacks trace:");
+        for (StackTraceElement stackTrace : stackTraces) {
+          Logging.d(TAG, stackTrace.toString());
+        }
+      }
+    }
+  }
+
+  public static void printStackTraces() {
+    printStackTrace(workerThread, "Worker thread");
+    printStackTrace(signalingThread, "Signaling thread");
+  }
+
+  private static void onWorkerThreadReady() {
+    workerThread = Thread.currentThread();
+    Logging.d(TAG, "onWorkerThreadReady");
+  }
+
+  private static void onSignalingThreadReady() {
+    signalingThread = Thread.currentThread();
+    Logging.d(TAG, "onSignalingThreadReady");
+  }
+
+  private static native long nativeCreatePeerConnectionFactory(Options options);
+
+  private static native long nativeCreateObserver(
+      PeerConnection.Observer observer);
+
+  private static native long nativeCreatePeerConnection(
+      long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
+      MediaConstraints constraints, long nativeObserver);
+
+  private static native long nativeCreateLocalMediaStream(
+      long nativeFactory, String label);
+
+  private static native long nativeCreateVideoSource(
+      long nativeFactory, long nativeVideoCapturer,
+      MediaConstraints constraints);
+
+  private static native long nativeCreateVideoTrack(
+      long nativeFactory, String id, long nativeVideoSource);
+
+  private static native long nativeCreateAudioSource(
+      long nativeFactory, MediaConstraints constraints);
+
+  private static native long nativeCreateAudioTrack(
+      long nativeFactory, String id, long nativeSource);
+
+  private static native boolean nativeStartAecDump(
+      long nativeFactory, int file_descriptor, int filesize_limit_bytes);
+
+  private static native void nativeStopAecDump(long nativeFactory);
+
+  private static native boolean nativeStartRtcEventLog(long nativeFactory, int file_descriptor);
+
+  private static native void nativeStopRtcEventLog(long nativeFactory);
+
+  @Deprecated
+  public native void nativeSetOptions(long nativeFactory, Options options);
+
+  private static native void nativeSetVideoHwAccelerationOptions(
+      long nativeFactory, Object localEGLContext, Object remoteEGLContext);
+
+  private static native void nativeThreadsCallbacks(long nativeFactory);
+
+  private static native void nativeFreeFactory(long nativeFactory);
+}
diff --git a/webrtc/api/java/src/org/webrtc/RtpReceiver.java b/webrtc/api/java/src/org/webrtc/RtpReceiver.java
new file mode 100644
index 0000000..597f441
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/RtpReceiver.java
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpReceiverInterface. */
+public class RtpReceiver {
+  final long nativeRtpReceiver;
+
+  private MediaStreamTrack cachedTrack;
+
+  public RtpReceiver(long nativeRtpReceiver) {
+    this.nativeRtpReceiver = nativeRtpReceiver;
+    long track = nativeGetTrack(nativeRtpReceiver);
+    // We can assume that an RtpReceiver always has an associated track.
+    cachedTrack = new MediaStreamTrack(track);
+  }
+
+  public MediaStreamTrack track() {
+    return cachedTrack;
+  }
+
+  public String id() {
+    return nativeId(nativeRtpReceiver);
+  }
+
+  public void dispose() {
+    cachedTrack.dispose();
+    free(nativeRtpReceiver);
+  }
+
+  // This should increment the reference count of the track.
+  // Will be released in dispose().
+  private static native long nativeGetTrack(long nativeRtpReceiver);
+
+  private static native String nativeId(long nativeRtpReceiver);
+
+  private static native void free(long nativeRtpReceiver);
+};
diff --git a/webrtc/api/java/src/org/webrtc/RtpSender.java b/webrtc/api/java/src/org/webrtc/RtpSender.java
new file mode 100644
index 0000000..9ac2e70
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/RtpSender.java
@@ -0,0 +1,88 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpSenderInterface. */
+public class RtpSender {
+  final long nativeRtpSender;
+
+  private MediaStreamTrack cachedTrack;
+  private boolean ownsTrack = true;
+
+  public RtpSender(long nativeRtpSender) {
+    this.nativeRtpSender = nativeRtpSender;
+    long track = nativeGetTrack(nativeRtpSender);
+    // It may be possible for an RtpSender to be created without a track.
+    cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
+  }
+
+  // If |takeOwnership| is true, the RtpSender takes ownership of the track
+  // from the caller, and will auto-dispose of it when no longer needed.
+  // |takeOwnership| should only be used if the caller owns the track; it is
+  // not appropriate when the track is owned by, for example, another RtpSender
+  // or a MediaStream.
+  public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
+    if (!nativeSetTrack(nativeRtpSender,
+                        (track == null) ? 0 : track.nativeTrack)) {
+        return false;
+    }
+    if (cachedTrack != null && ownsTrack) {
+      cachedTrack.dispose();
+    }
+    cachedTrack = track;
+    ownsTrack = takeOwnership;
+    return true;
+  }
+
+  public MediaStreamTrack track() {
+    return cachedTrack;
+  }
+
+  public String id() {
+    return nativeId(nativeRtpSender);
+  }
+
+  public void dispose() {
+    if (cachedTrack != null && ownsTrack) {
+      cachedTrack.dispose();
+    }
+    free(nativeRtpSender);
+  }
+
+  private static native boolean nativeSetTrack(long nativeRtpSender,
+                                               long nativeTrack);
+
+  // This should increment the reference count of the track.
+  // Will be released in dispose() or setTrack().
+  private static native long nativeGetTrack(long nativeRtpSender);
+
+  private static native String nativeId(long nativeRtpSender);
+
+  private static native void free(long nativeRtpSender);
+}
+;
diff --git a/webrtc/api/java/src/org/webrtc/SdpObserver.java b/webrtc/api/java/src/org/webrtc/SdpObserver.java
new file mode 100644
index 0000000..779bf1b
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/SdpObserver.java
@@ -0,0 +1,43 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+  /** Called on success of Create{Offer,Answer}(). */
+  public void onCreateSuccess(SessionDescription sdp);
+
+  /** Called on success of Set{Local,Remote}Description(). */
+  public void onSetSuccess();
+
+  /** Called on error of Create{Offer,Answer}(). */
+  public void onCreateFailure(String error);
+
+  /** Called on error of Set{Local,Remote}Description(). */
+  public void onSetFailure(String error);
+}
diff --git a/webrtc/api/java/src/org/webrtc/SessionDescription.java b/webrtc/api/java/src/org/webrtc/SessionDescription.java
new file mode 100644
index 0000000..c3dfcd4
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/SessionDescription.java
@@ -0,0 +1,57 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+  /** Java-land enum version of SessionDescriptionInterface's type() string. */
+  public static enum Type {
+    OFFER, PRANSWER, ANSWER;
+
+    public String canonicalForm() {
+      return name().toLowerCase();
+    }
+
+    public static Type fromCanonicalForm(String canonical) {
+      return Type.valueOf(Type.class, canonical.toUpperCase());
+    }
+  }
+
+  public final Type type;
+  public final String description;
+
+  public SessionDescription(Type type, String description) {
+    this.type = type;
+    this.description = description;
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/StatsObserver.java b/webrtc/api/java/src/org/webrtc/StatsObserver.java
new file mode 100644
index 0000000..99223ad
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/StatsObserver.java
@@ -0,0 +1,34 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+  /** Called when the reports are ready.*/
+  public void onComplete(StatsReport[] reports);
+}
diff --git a/webrtc/api/java/src/org/webrtc/StatsReport.java b/webrtc/api/java/src/org/webrtc/StatsReport.java
new file mode 100644
index 0000000..6e32543
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/StatsReport.java
@@ -0,0 +1,72 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+
+  /** Java version of webrtc::StatsReport::Value. */
+  public static class Value {
+    public final String name;
+    public final String value;
+
+    public Value(String name, String value) {
+      this.name = name;
+      this.value = value;
+    }
+
+    public String toString() {
+      StringBuilder builder = new StringBuilder();
+      builder.append("[").append(name).append(": ").append(value).append("]");
+      return builder.toString();
+    }
+  }
+
+  public final String id;
+  public final String type;
+  // Time since 1970-01-01T00:00:00Z in milliseconds.
+  public final double timestamp;
+  public final Value[] values;
+
+  public StatsReport(String id, String type, double timestamp, Value[] values) {
+    this.id = id;
+    this.type = type;
+    this.timestamp = timestamp;
+    this.values = values;
+  }
+
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("id: ").append(id).append(", type: ").append(type)
+        .append(", timestamp: ").append(timestamp).append(", values: ");
+    for (int i = 0; i < values.length; ++i) {
+      builder.append(values[i].toString()).append(", ");
+    }
+    return builder.toString();
+  }
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoCapturer.java b/webrtc/api/java/src/org/webrtc/VideoCapturer.java
new file mode 100644
index 0000000..ad41053
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoCapturer.java
@@ -0,0 +1,62 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+/** Java version of cricket::VideoCapturer. */
+// TODO(perkj): Merge VideoCapturer and VideoCapturerAndroid.
+public class VideoCapturer {
+  private long nativeVideoCapturer;
+
+  protected VideoCapturer() {
+  }
+
+  // Sets |nativeCapturer| to be owned by VideoCapturer.
+  protected void setNativeCapturer(long nativeCapturer) {
+    this.nativeVideoCapturer = nativeCapturer;
+  }
+
+  // Package-visible for PeerConnectionFactory.
+  long takeNativeVideoCapturer() {
+    if (nativeVideoCapturer == 0) {
+      throw new RuntimeException("Capturer can only be taken once!");
+    }
+    long ret = nativeVideoCapturer;
+    nativeVideoCapturer = 0;
+    return ret;
+  }
+
+  public void dispose() {
+    // No-op iff this capturer is owned by a source (see comment on
+    // PeerConnectionFactoryInterface::CreateVideoSource()).
+    if (nativeVideoCapturer != 0) {
+      free(nativeVideoCapturer);
+    }
+  }
+
+  private static native void free(long nativeVideoCapturer);
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoRenderer.java b/webrtc/api/java/src/org/webrtc/VideoRenderer.java
new file mode 100644
index 0000000..c14802e
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoRenderer.java
@@ -0,0 +1,170 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Java version of VideoRendererInterface.  In addition to allowing clients to
+ * define their own rendering behavior (by passing in a Callbacks object), this
+ * class also provides a createGui() method for creating a GUI-rendering window
+ * on various platforms.
+ */
+public class VideoRenderer {
+  /**
+   * Java version of cricket::VideoFrame. Frames are only constructed from native code and test
+   * code.
+   */
+  public static class I420Frame {
+    public final int width;
+    public final int height;
+    public final int[] yuvStrides;
+    public ByteBuffer[] yuvPlanes;
+    public final boolean yuvFrame;
+    // Matrix that transforms standard coordinates to their proper sampling locations in
+    // the texture. This transform compensates for any properties of the video source that
+    // cause it to appear different from a normalized texture. This matrix does not take
+    // |rotationDegree| into account.
+    public final float[] samplingMatrix;
+    public int textureId;
+    // Frame pointer in C++.
+    private long nativeFramePointer;
+
+    // rotationDegree is the degree that the frame must be rotated clockwisely
+    // to be rendered correctly.
+    public int rotationDegree;
+
+    /**
+     * Construct a frame of the given dimensions with the specified planar data.
+     */
+    I420Frame(int width, int height, int rotationDegree, int[] yuvStrides, ByteBuffer[] yuvPlanes,
+        long nativeFramePointer) {
+      this.width = width;
+      this.height = height;
+      this.yuvStrides = yuvStrides;
+      this.yuvPlanes = yuvPlanes;
+      this.yuvFrame = true;
+      this.rotationDegree = rotationDegree;
+      this.nativeFramePointer = nativeFramePointer;
+      if (rotationDegree % 90 != 0) {
+        throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+      }
+      // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+      // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+      // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
+      // matrix.
+      samplingMatrix = new float[] {
+          1,  0, 0, 0,
+          0, -1, 0, 0,
+          0,  0, 1, 0,
+          0,  1, 0, 1};
+    }
+
+    /**
+     * Construct a texture frame of the given dimensions with data in SurfaceTexture
+     */
+    I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
+        long nativeFramePointer) {
+      this.width = width;
+      this.height = height;
+      this.yuvStrides = null;
+      this.yuvPlanes = null;
+      this.samplingMatrix = samplingMatrix;
+      this.textureId = textureId;
+      this.yuvFrame = false;
+      this.rotationDegree = rotationDegree;
+      this.nativeFramePointer = nativeFramePointer;
+      if (rotationDegree % 90 != 0) {
+        throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+      }
+    }
+
+    public int rotatedWidth() {
+      return (rotationDegree % 180 == 0) ? width : height;
+    }
+
+    public int rotatedHeight() {
+      return (rotationDegree % 180 == 0) ? height : width;
+    }
+
+    @Override
+    public String toString() {
+      return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
+          ":" + yuvStrides[2];
+    }
+  }
+
+  // Helper native function to do a video frame plane copying.
+  public static native void nativeCopyPlane(ByteBuffer src, int width,
+      int height, int srcStride, ByteBuffer dst, int dstStride);
+
+  /** The real meat of VideoRendererInterface. */
+  public static interface Callbacks {
+    // |frame| might have pending rotation and implementation of Callbacks
+    // should handle that by applying rotation during rendering. The callee
+    // is responsible for signaling when it is done with |frame| by calling
+    // renderFrameDone(frame).
+    public void renderFrame(I420Frame frame);
+  }
+
+   /**
+    * This must be called after every renderFrame() to release the frame.
+    */
+   public static void renderFrameDone(I420Frame frame) {
+     frame.yuvPlanes = null;
+     frame.textureId = 0;
+     if (frame.nativeFramePointer != 0) {
+       releaseNativeFrame(frame.nativeFramePointer);
+       frame.nativeFramePointer = 0;
+     }
+   }
+
+  long nativeVideoRenderer;
+
+  public VideoRenderer(Callbacks callbacks) {
+    nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
+  }
+
+  private VideoRenderer(long nativeVideoRenderer) {
+    this.nativeVideoRenderer = nativeVideoRenderer;
+  }
+
+  public void dispose() {
+    if (nativeVideoRenderer == 0) {
+      // Already disposed.
+      return;
+    }
+
+    freeWrappedVideoRenderer(nativeVideoRenderer);
+    nativeVideoRenderer = 0;
+  }
+
+  private static native long nativeWrapVideoRenderer(Callbacks callbacks);
+  private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
+  private static native void releaseNativeFrame(long nativeFramePointer);
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoSource.java b/webrtc/api/java/src/org/webrtc/VideoSource.java
new file mode 100644
index 0000000..7151748
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoSource.java
@@ -0,0 +1,63 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Java version of VideoSourceInterface, extended with stop/restart
+ * functionality to allow explicit control of the camera device on android,
+ * where there is no support for multiple open capture devices and the cost of
+ * holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
+ * its output to the encoder) can be too high to bear.
+ */
+public class VideoSource extends MediaSource {
+
+  public VideoSource(long nativeSource) {
+    super(nativeSource);
+  }
+
+  // Stop capture feeding this source.
+  public void stop() {
+    stop(nativeSource);
+  }
+
+  // Restart capture feeding this source.  stop() must have been called since
+  // the last call to restart() (if any).  Note that this isn't "start()";
+  // sources are started by default at birth.
+  public void restart() {
+    restart(nativeSource);
+  }
+
+  @Override
+  public void dispose() {
+    super.dispose();
+  }
+
+  private static native void stop(long nativeSource);
+  private static native void restart(long nativeSource);
+}
diff --git a/webrtc/api/java/src/org/webrtc/VideoTrack.java b/webrtc/api/java/src/org/webrtc/VideoTrack.java
new file mode 100644
index 0000000..7333a90
--- /dev/null
+++ b/webrtc/api/java/src/org/webrtc/VideoTrack.java
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java version of VideoTrackInterface. */
+public class VideoTrack extends MediaStreamTrack {
+  private final LinkedList<VideoRenderer> renderers =
+      new LinkedList<VideoRenderer>();
+
+  public VideoTrack(long nativeTrack) {
+    super(nativeTrack);
+  }
+
+  public void addRenderer(VideoRenderer renderer) {
+    renderers.add(renderer);
+    nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
+  }
+
+  public void removeRenderer(VideoRenderer renderer) {
+    if (!renderers.remove(renderer)) {
+      return;
+    }
+    nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
+    renderer.dispose();
+  }
+
+  public void dispose() {
+    while (!renderers.isEmpty()) {
+      removeRenderer(renderers.getFirst());
+    }
+    super.dispose();
+  }
+
+  private static native void free(long nativeTrack);
+
+  private static native void nativeAddRenderer(
+      long nativeTrack, long nativeRenderer);
+
+  private static native void nativeRemoveRenderer(
+      long nativeTrack, long nativeRenderer);
+}
diff --git a/webrtc/api/jsep.h b/webrtc/api/jsep.h
new file mode 100644
index 0000000..c49a16b
--- /dev/null
+++ b/webrtc/api/jsep.h
@@ -0,0 +1,155 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Interfaces matching the draft-ietf-rtcweb-jsep-01.
+
+#ifndef WEBRTC_API_JSEP_H_
+#define WEBRTC_API_JSEP_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/refcount.h"
+
+namespace cricket {
+class SessionDescription;
+class Candidate;
+}  // namespace cricket
+
+namespace webrtc {
+
+struct SdpParseError {
+ public:
+  // The sdp line that causes the error.
+  std::string line;
+  // Explains the error.
+  std::string description;
+};
+
+// Class representation of an ICE candidate.
+// An instance of this interface is supposed to be owned by one class at
+// a time and is therefore not expected to be thread safe.
+class IceCandidateInterface {
+ public:
+  virtual ~IceCandidateInterface() {}
+  /// If present, this contains the identierfier of the "media stream
+  // identification" as defined in [RFC 3388] for m-line this candidate is
+  // assocated with.
+  virtual std::string sdp_mid() const = 0;
+  // This indeicates the index (starting at zero) of m-line in the SDP this
+  // candidate is assocated with.
+  virtual int sdp_mline_index() const = 0;
+  virtual const cricket::Candidate& candidate() const = 0;
+  // Creates a SDP-ized form of this candidate.
+  virtual bool ToString(std::string* out) const = 0;
+};
+
+// Creates a IceCandidateInterface based on SDP string.
+// Returns NULL if the sdp string can't be parsed.
+// |error| can be NULL if doesn't care about the failure reason.
+IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
+                                          int sdp_mline_index,
+                                          const std::string& sdp,
+                                          SdpParseError* error);
+
+// This class represents a collection of candidates for a specific m-line.
+// This class is used in SessionDescriptionInterface to represent all known
+// candidates for a certain m-line.
+class IceCandidateCollection {
+ public:
+  virtual ~IceCandidateCollection() {}
+  virtual size_t count() const = 0;
+  // Returns true if an equivalent |candidate| exist in the collection.
+  virtual bool HasCandidate(const IceCandidateInterface* candidate) const = 0;
+  virtual const IceCandidateInterface* at(size_t index) const = 0;
+};
+
+// Class representation of a Session description.
+// An instance of this interface is supposed to be owned by one class at
+// a time and is therefore not expected to be thread safe.
+class SessionDescriptionInterface {
+ public:
+  // Supported types:
+  static const char kOffer[];
+  static const char kPrAnswer[];
+  static const char kAnswer[];
+
+  virtual ~SessionDescriptionInterface() {}
+  virtual cricket::SessionDescription* description() = 0;
+  virtual const cricket::SessionDescription* description() const = 0;
+  // Get the session id and session version, which are defined based on
+  // RFC 4566 for the SDP o= line.
+  virtual std::string session_id() const = 0;
+  virtual std::string session_version() const = 0;
+  virtual std::string type() const = 0;
+  // Adds the specified candidate to the description.
+  // Ownership is not transferred.
+  // Returns false if the session description does not have a media section that
+  // corresponds to the |candidate| label.
+  virtual bool AddCandidate(const IceCandidateInterface* candidate) = 0;
+  // Returns the number of m- lines in the session description.
+  virtual size_t number_of_mediasections() const = 0;
+  // Returns a collection of all candidates that belong to a certain m-line
+  virtual const IceCandidateCollection* candidates(
+      size_t mediasection_index) const = 0;
+  // Serializes the description to SDP.
+  virtual bool ToString(std::string* out) const = 0;
+};
+
+// Creates a SessionDescriptionInterface based on SDP string and the type.
+// Returns NULL if the sdp string can't be parsed or the type is unsupported.
+// |error| can be NULL if doesn't care about the failure reason.
+SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
+                                                      const std::string& sdp,
+                                                      SdpParseError* error);
+
+// Jsep CreateOffer and CreateAnswer callback interface.
+class CreateSessionDescriptionObserver : public rtc::RefCountInterface {
+ public:
+  // The implementation of the CreateSessionDescriptionObserver takes
+  // the ownership of the |desc|.
+  virtual void OnSuccess(SessionDescriptionInterface* desc) = 0;
+  virtual void OnFailure(const std::string& error) = 0;
+
+ protected:
+  ~CreateSessionDescriptionObserver() {}
+};
+
+// Jsep SetLocalDescription and SetRemoteDescription callback interface.
+class SetSessionDescriptionObserver : public rtc::RefCountInterface {
+ public:
+  virtual void OnSuccess() = 0;
+  virtual void OnFailure(const std::string& error) = 0;
+
+ protected:
+  ~SetSessionDescriptionObserver() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_JSEP_H_
diff --git a/webrtc/api/jsepicecandidate.cc b/webrtc/api/jsepicecandidate.cc
new file mode 100644
index 0000000..172c52e
--- /dev/null
+++ b/webrtc/api/jsepicecandidate.cc
@@ -0,0 +1,99 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/jsepicecandidate.h"
+
+#include <vector>
+
+#include "webrtc/api/webrtcsdp.h"
+#include "webrtc/base/stringencode.h"
+
+namespace webrtc {
+
+IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid,
+                                          int sdp_mline_index,
+                                          const std::string& sdp,
+                                          SdpParseError* error) {
+  JsepIceCandidate* jsep_ice = new JsepIceCandidate(sdp_mid, sdp_mline_index);
+  if (!jsep_ice->Initialize(sdp, error)) {
+    delete jsep_ice;
+    return NULL;
+  }
+  return jsep_ice;
+}
+
+JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
+                                   int sdp_mline_index)
+    : sdp_mid_(sdp_mid),
+      sdp_mline_index_(sdp_mline_index) {
+}
+
+JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid,
+                                   int sdp_mline_index,
+                                   const cricket::Candidate& candidate)
+    : sdp_mid_(sdp_mid),
+      sdp_mline_index_(sdp_mline_index),
+      candidate_(candidate) {
+}
+
+JsepIceCandidate::~JsepIceCandidate() {
+}
+
+bool JsepIceCandidate::Initialize(const std::string& sdp, SdpParseError* err) {
+  return SdpDeserializeCandidate(sdp, this, err);
+}
+
+bool JsepIceCandidate::ToString(std::string* out) const {
+  if (!out)
+    return false;
+  *out = SdpSerializeCandidate(*this);
+  return !out->empty();
+}
+
+JsepCandidateCollection::~JsepCandidateCollection() {
+  for (std::vector<JsepIceCandidate*>::iterator it = candidates_.begin();
+       it != candidates_.end(); ++it) {
+    delete *it;
+  }
+}
+
+bool JsepCandidateCollection::HasCandidate(
+    const IceCandidateInterface* candidate) const {
+  bool ret = false;
+  for (std::vector<JsepIceCandidate*>::const_iterator it = candidates_.begin();
+      it != candidates_.end(); ++it) {
+    if ((*it)->sdp_mid() == candidate->sdp_mid() &&
+        (*it)->sdp_mline_index() == candidate->sdp_mline_index() &&
+        (*it)->candidate().IsEquivalent(candidate->candidate())) {
+      ret = true;
+      break;
+    }
+  }
+  return ret;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/jsepicecandidate.h b/webrtc/api/jsepicecandidate.h
new file mode 100644
index 0000000..957d7c0
--- /dev/null
+++ b/webrtc/api/jsepicecandidate.h
@@ -0,0 +1,92 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Implements the IceCandidateInterface.
+
+#ifndef WEBRTC_API_JSEPICECANDIDATE_H_
+#define WEBRTC_API_JSEPICECANDIDATE_H_
+
+#include <string>
+
+#include "webrtc/api/jsep.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/p2p/base/candidate.h"
+
+namespace webrtc {
+
+class JsepIceCandidate : public IceCandidateInterface {
+ public:
+  JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index);
+  JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index,
+                   const cricket::Candidate& candidate);
+  ~JsepIceCandidate();
+  // |error| can be NULL if don't care about the failure reason.
+  bool Initialize(const std::string& sdp, SdpParseError* err);
+  void SetCandidate(const cricket::Candidate& candidate) {
+    candidate_ = candidate;
+  }
+
+  virtual std::string sdp_mid() const { return sdp_mid_; }
+  virtual int sdp_mline_index() const { return sdp_mline_index_; }
+  virtual const cricket::Candidate& candidate() const {
+    return candidate_;
+  }
+
+  virtual bool ToString(std::string* out) const;
+
+ private:
+  std::string sdp_mid_;
+  int sdp_mline_index_;
+  cricket::Candidate candidate_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(JsepIceCandidate);
+};
+
+// Implementation of IceCandidateCollection.
+// This implementation stores JsepIceCandidates.
+class JsepCandidateCollection : public IceCandidateCollection {
+ public:
+  ~JsepCandidateCollection();
+  virtual size_t count() const {
+    return candidates_.size();
+  }
+  virtual bool HasCandidate(const IceCandidateInterface* candidate) const;
+  // Adds and takes ownership of the JsepIceCandidate.
+  virtual void add(JsepIceCandidate* candidate) {
+    candidates_.push_back(candidate);
+  }
+  virtual const IceCandidateInterface* at(size_t index) const {
+    return candidates_[index];
+  }
+
+ private:
+  std::vector<JsepIceCandidate*> candidates_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_JSEPICECANDIDATE_H_
diff --git a/webrtc/api/jsepsessiondescription.cc b/webrtc/api/jsepsessiondescription.cc
new file mode 100644
index 0000000..2ffc2de
--- /dev/null
+++ b/webrtc/api/jsepsessiondescription.cc
@@ -0,0 +1,203 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/jsepsessiondescription.h"
+
+#include "webrtc/api/webrtcsdp.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/arraysize.h"
+#include "webrtc/base/stringencode.h"
+
+using rtc::scoped_ptr;
+using cricket::SessionDescription;
+
+namespace webrtc {
+
+static const char* kSupportedTypes[] = {
+    JsepSessionDescription::kOffer,
+    JsepSessionDescription::kPrAnswer,
+    JsepSessionDescription::kAnswer
+};
+
+static bool IsTypeSupported(const std::string& type) {
+  bool type_supported = false;
+  for (size_t i = 0; i < arraysize(kSupportedTypes); ++i) {
+    if (kSupportedTypes[i] == type) {
+      type_supported = true;
+      break;
+    }
+  }
+  return type_supported;
+}
+
+const char SessionDescriptionInterface::kOffer[] = "offer";
+const char SessionDescriptionInterface::kPrAnswer[] = "pranswer";
+const char SessionDescriptionInterface::kAnswer[] = "answer";
+
+const int JsepSessionDescription::kDefaultVideoCodecId = 100;
+// This is effectively a max value of the frame rate. 30 is default from camera.
+const int JsepSessionDescription::kDefaultVideoCodecFramerate = 60;
+const char JsepSessionDescription::kDefaultVideoCodecName[] = "VP8";
+// Used as default max video codec size before we have it in signaling.
+#if defined(ANDROID) || defined(WEBRTC_IOS)
+// Limit default max video codec size for Android to avoid
+// HW VP8 codec initialization failure for resolutions higher
+// than 1280x720 or 720x1280.
+// Same patch for iOS to support 720P in portrait mode.
+const int JsepSessionDescription::kMaxVideoCodecWidth = 1280;
+const int JsepSessionDescription::kMaxVideoCodecHeight = 1280;
+#else
+const int JsepSessionDescription::kMaxVideoCodecWidth = 1920;
+const int JsepSessionDescription::kMaxVideoCodecHeight = 1080;
+#endif
+const int JsepSessionDescription::kDefaultVideoCodecPreference = 1;
+
+SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
+                                                      const std::string& sdp,
+                                                      SdpParseError* error) {
+  if (!IsTypeSupported(type)) {
+    return NULL;
+  }
+
+  JsepSessionDescription* jsep_desc = new JsepSessionDescription(type);
+  if (!jsep_desc->Initialize(sdp, error)) {
+    delete jsep_desc;
+    return NULL;
+  }
+  return jsep_desc;
+}
+
+JsepSessionDescription::JsepSessionDescription(const std::string& type)
+    : type_(type) {
+}
+
+JsepSessionDescription::~JsepSessionDescription() {}
+
+bool JsepSessionDescription::Initialize(
+    cricket::SessionDescription* description,
+    const std::string& session_id,
+    const std::string& session_version) {
+  if (!description)
+    return false;
+
+  session_id_ = session_id;
+  session_version_ = session_version;
+  description_.reset(description);
+  candidate_collection_.resize(number_of_mediasections());
+  return true;
+}
+
+bool JsepSessionDescription::Initialize(const std::string& sdp,
+                                        SdpParseError* error) {
+  return SdpDeserialize(sdp, this, error);
+}
+
+bool JsepSessionDescription::AddCandidate(
+    const IceCandidateInterface* candidate) {
+  if (!candidate || candidate->sdp_mline_index() < 0)
+    return false;
+  size_t mediasection_index = 0;
+  if (!GetMediasectionIndex(candidate, &mediasection_index)) {
+    return false;
+  }
+  if (mediasection_index >= number_of_mediasections())
+    return false;
+  const std::string& content_name =
+      description_->contents()[mediasection_index].name;
+  const cricket::TransportInfo* transport_info =
+      description_->GetTransportInfoByName(content_name);
+  if (!transport_info) {
+    return false;
+  }
+
+  cricket::Candidate updated_candidate = candidate->candidate();
+  if (updated_candidate.username().empty()) {
+    updated_candidate.set_username(transport_info->description.ice_ufrag);
+  }
+  if (updated_candidate.password().empty()) {
+    updated_candidate.set_password(transport_info->description.ice_pwd);
+  }
+
+  scoped_ptr<JsepIceCandidate> updated_candidate_wrapper(
+      new JsepIceCandidate(candidate->sdp_mid(),
+                           static_cast<int>(mediasection_index),
+                           updated_candidate));
+  if (!candidate_collection_[mediasection_index].HasCandidate(
+          updated_candidate_wrapper.get()))
+    candidate_collection_[mediasection_index].add(
+        updated_candidate_wrapper.release());
+
+  return true;
+}
+
+size_t JsepSessionDescription::number_of_mediasections() const {
+  if (!description_)
+    return 0;
+  return description_->contents().size();
+}
+
+const IceCandidateCollection* JsepSessionDescription::candidates(
+    size_t mediasection_index) const {
+  if (mediasection_index >= candidate_collection_.size())
+    return NULL;
+  return &candidate_collection_[mediasection_index];
+}
+
+bool JsepSessionDescription::ToString(std::string* out) const {
+  if (!description_ || !out)
+    return false;
+  *out = SdpSerialize(*this);
+  return !out->empty();
+}
+
+bool JsepSessionDescription::GetMediasectionIndex(
+    const IceCandidateInterface* candidate,
+    size_t* index) {
+  if (!candidate || !index) {
+    return false;
+  }
+  *index = static_cast<size_t>(candidate->sdp_mline_index());
+  if (description_ && !candidate->sdp_mid().empty()) {
+    bool found = false;
+    // Try to match the sdp_mid with content name.
+    for (size_t i = 0; i < description_->contents().size(); ++i) {
+      if (candidate->sdp_mid() == description_->contents().at(i).name) {
+        *index = i;
+        found = true;
+        break;
+      }
+    }
+    if (!found) {
+      // If the sdp_mid is presented but we can't find a match, we consider
+      // this as an error.
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/jsepsessiondescription.h b/webrtc/api/jsepsessiondescription.h
new file mode 100644
index 0000000..b6e6348
--- /dev/null
+++ b/webrtc/api/jsepsessiondescription.h
@@ -0,0 +1,106 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// Implements the SessionDescriptionInterface.
+
+#ifndef WEBRTC_API_JSEPSESSIONDESCRIPTION_H_
+#define WEBRTC_API_JSEPSESSIONDESCRIPTION_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/api/jsep.h"
+#include "webrtc/api/jsepicecandidate.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace cricket {
+class SessionDescription;
+}
+
+namespace webrtc {
+
+class JsepSessionDescription : public SessionDescriptionInterface {
+ public:
+  explicit JsepSessionDescription(const std::string& type);
+  virtual ~JsepSessionDescription();
+
+  // |error| can be NULL if don't care about the failure reason.
+  bool Initialize(const std::string& sdp, SdpParseError* error);
+
+  // Takes ownership of |description|.
+  bool Initialize(cricket::SessionDescription* description,
+      const std::string& session_id,
+      const std::string& session_version);
+
+  virtual cricket::SessionDescription* description() {
+    return description_.get();
+  }
+  virtual const cricket::SessionDescription* description() const {
+    return description_.get();
+  }
+  virtual std::string session_id() const {
+    return session_id_;
+  }
+  virtual std::string session_version() const {
+    return session_version_;
+  }
+  virtual std::string type() const {
+    return type_;
+  }
+  // Allow changing the type. Used for testing.
+  void set_type(const std::string& type) { type_ = type; }
+  virtual bool AddCandidate(const IceCandidateInterface* candidate);
+  virtual size_t number_of_mediasections() const;
+  virtual const IceCandidateCollection* candidates(
+      size_t mediasection_index) const;
+  virtual bool ToString(std::string* out) const;
+
+  // Default video encoder settings. The resolution is the max resolution.
+  // TODO(perkj): Implement proper negotiation of video resolution.
+  static const int kDefaultVideoCodecId;
+  static const int kDefaultVideoCodecFramerate;
+  static const char kDefaultVideoCodecName[];
+  static const int kMaxVideoCodecWidth;
+  static const int kMaxVideoCodecHeight;
+  static const int kDefaultVideoCodecPreference;
+
+ private:
+  rtc::scoped_ptr<cricket::SessionDescription> description_;
+  std::string session_id_;
+  std::string session_version_;
+  std::string type_;
+  std::vector<JsepCandidateCollection> candidate_collection_;
+
+  bool GetMediasectionIndex(const IceCandidateInterface* candidate,
+                            size_t* index);
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_JSEPSESSIONDESCRIPTION_H_
diff --git a/webrtc/api/jsepsessiondescription_unittest.cc b/webrtc/api/jsepsessiondescription_unittest.cc
new file mode 100644
index 0000000..90de058
--- /dev/null
+++ b/webrtc/api/jsepsessiondescription_unittest.cc
@@ -0,0 +1,238 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "webrtc/api/jsepicecandidate.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/p2p/base/candidate.h"
+#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/sessiondescription.h"
+
+using webrtc::IceCandidateCollection;
+using webrtc::IceCandidateInterface;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::SessionDescriptionInterface;
+using rtc::scoped_ptr;
+
+static const char kCandidateUfrag[] = "ufrag";
+static const char kCandidatePwd[] = "pwd";
+static const char kCandidateUfragVoice[] = "ufrag_voice";
+static const char kCandidatePwdVoice[] = "pwd_voice";
+static const char kCandidateUfragVideo[] = "ufrag_video";
+static const char kCandidatePwdVideo[] = "pwd_video";
+
+// This creates a session description with both audio and video media contents.
+// In SDP this is described by two m lines, one audio and one video.
+static cricket::SessionDescription* CreateCricketSessionDescription() {
+  cricket::SessionDescription* desc(new cricket::SessionDescription());
+  // AudioContentDescription
+  scoped_ptr<cricket::AudioContentDescription> audio(
+      new cricket::AudioContentDescription());
+
+  // VideoContentDescription
+  scoped_ptr<cricket::VideoContentDescription> video(
+      new cricket::VideoContentDescription());
+
+  audio->AddCodec(cricket::AudioCodec(103, "ISAC", 16000, 0, 0, 0));
+  desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+                   audio.release());
+
+  video->AddCodec(cricket::VideoCodec(120, "VP8", 640, 480, 30, 0));
+  desc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
+                   video.release());
+
+  EXPECT_TRUE(desc->AddTransportInfo(cricket::TransportInfo(
+      cricket::CN_AUDIO,
+      cricket::TransportDescription(
+          std::vector<std::string>(), kCandidateUfragVoice, kCandidatePwdVoice,
+          cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_NONE, NULL))));
+  EXPECT_TRUE(desc->AddTransportInfo(cricket::TransportInfo(
+      cricket::CN_VIDEO,
+      cricket::TransportDescription(
+          std::vector<std::string>(), kCandidateUfragVideo, kCandidatePwdVideo,
+          cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_NONE, NULL))));
+  return desc;
+}
+
+class JsepSessionDescriptionTest : public testing::Test {
+ protected:
+  virtual void SetUp() {
+    int port = 1234;
+    rtc::SocketAddress address("127.0.0.1", port++);
+    cricket::Candidate candidate(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+                                 address, 1, "", "", "local", 0, "1");
+    candidate_ = candidate;
+    const std::string session_id =
+        rtc::ToString(rtc::CreateRandomId64());
+    const std::string session_version =
+        rtc::ToString(rtc::CreateRandomId());
+    jsep_desc_.reset(new JsepSessionDescription("dummy"));
+    ASSERT_TRUE(jsep_desc_->Initialize(CreateCricketSessionDescription(),
+        session_id, session_version));
+  }
+
+  std::string Serialize(const SessionDescriptionInterface* desc) {
+    std::string sdp;
+    EXPECT_TRUE(desc->ToString(&sdp));
+    EXPECT_FALSE(sdp.empty());
+    return sdp;
+  }
+
+  SessionDescriptionInterface* DeSerialize(const std::string& sdp) {
+    JsepSessionDescription* desc(new JsepSessionDescription("dummy"));
+    EXPECT_TRUE(desc->Initialize(sdp, NULL));
+    return desc;
+  }
+
+  cricket::Candidate candidate_;
+  rtc::scoped_ptr<JsepSessionDescription> jsep_desc_;
+};
+
+// Test that number_of_mediasections() returns the number of media contents in
+// a session description.
+TEST_F(JsepSessionDescriptionTest, CheckSessionDescription) {
+  EXPECT_EQ(2u, jsep_desc_->number_of_mediasections());
+}
+
+// Test that we can add a candidate to a session description.
+TEST_F(JsepSessionDescriptionTest, AddCandidateWithoutMid) {
+  JsepIceCandidate jsep_candidate("", 0, candidate_);
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+  const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
+  ASSERT_TRUE(ice_candidates != NULL);
+  EXPECT_EQ(1u, ice_candidates->count());
+  const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+  ASSERT_TRUE(ice_candidate != NULL);
+  candidate_.set_username(kCandidateUfragVoice);
+  candidate_.set_password(kCandidatePwdVoice);
+  EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+  EXPECT_EQ(0, ice_candidate->sdp_mline_index());
+  EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+TEST_F(JsepSessionDescriptionTest, AddCandidateWithMid) {
+  // mid and m-line index don't match, in this case mid is preferred.
+  JsepIceCandidate jsep_candidate("video", 0, candidate_);
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+  EXPECT_EQ(0u, jsep_desc_->candidates(0)->count());
+  const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(1);
+  ASSERT_TRUE(ice_candidates != NULL);
+  EXPECT_EQ(1u, ice_candidates->count());
+  const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+  ASSERT_TRUE(ice_candidate != NULL);
+  candidate_.set_username(kCandidateUfragVideo);
+  candidate_.set_password(kCandidatePwdVideo);
+  EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+  // The mline index should have been updated according to mid.
+  EXPECT_EQ(1, ice_candidate->sdp_mline_index());
+}
+
+TEST_F(JsepSessionDescriptionTest, AddCandidateAlreadyHasUfrag) {
+  candidate_.set_username(kCandidateUfrag);
+  candidate_.set_password(kCandidatePwd);
+  JsepIceCandidate jsep_candidate("audio", 0, candidate_);
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+  const IceCandidateCollection* ice_candidates = jsep_desc_->candidates(0);
+  ASSERT_TRUE(ice_candidates != NULL);
+  EXPECT_EQ(1u, ice_candidates->count());
+  const IceCandidateInterface* ice_candidate = ice_candidates->at(0);
+  ASSERT_TRUE(ice_candidate != NULL);
+  candidate_.set_username(kCandidateUfrag);
+  candidate_.set_password(kCandidatePwd);
+  EXPECT_TRUE(ice_candidate->candidate().IsEquivalent(candidate_));
+
+  EXPECT_EQ(0u, jsep_desc_->candidates(1)->count());
+}
+
+// Test that we can not add a candidate if there is no corresponding media
+// content in the session description.
+TEST_F(JsepSessionDescriptionTest, AddBadCandidate) {
+  JsepIceCandidate bad_candidate1("", 55, candidate_);
+  EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate1));
+
+  JsepIceCandidate bad_candidate2("some weird mid", 0, candidate_);
+  EXPECT_FALSE(jsep_desc_->AddCandidate(&bad_candidate2));
+}
+
+// Tests that repeatedly adding the same candidate, with or without credentials,
+// does not increase the number of candidates in the description.
+TEST_F(JsepSessionDescriptionTest, AddCandidateDuplicates) {
+  JsepIceCandidate jsep_candidate("", 0, candidate_);
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+  EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+
+  // Add the same candidate again.  It should be ignored.
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+  EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+
+  // Create a new candidate, identical except that the ufrag and pwd are now
+  // populated.
+  candidate_.set_username(kCandidateUfragVoice);
+  candidate_.set_password(kCandidatePwdVoice);
+  JsepIceCandidate jsep_candidate_with_credentials("", 0, candidate_);
+
+  // This should also be identified as redundant and ignored.
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate_with_credentials));
+  EXPECT_EQ(1u, jsep_desc_->candidates(0)->count());
+}
+
+// Test that we can serialize a JsepSessionDescription and deserialize it again.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserialize) {
+  std::string sdp = Serialize(jsep_desc_.get());
+
+  scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(DeSerialize(sdp));
+  EXPECT_EQ(2u, parsed_jsep_desc->number_of_mediasections());
+
+  std::string parsed_sdp = Serialize(parsed_jsep_desc.get());
+  EXPECT_EQ(sdp, parsed_sdp);
+}
+
+// Tests that we can serialize and deserialize a JsepSesssionDescription
+// with candidates.
+TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithCandidates) {
+  std::string sdp = Serialize(jsep_desc_.get());
+
+  // Add a candidate and check that the serialized result is different.
+  JsepIceCandidate jsep_candidate("audio", 0, candidate_);
+  EXPECT_TRUE(jsep_desc_->AddCandidate(&jsep_candidate));
+  std::string sdp_with_candidate = Serialize(jsep_desc_.get());
+  EXPECT_NE(sdp, sdp_with_candidate);
+
+  scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(
+      DeSerialize(sdp_with_candidate));
+  std::string parsed_sdp_with_candidate = Serialize(parsed_jsep_desc.get());
+
+  EXPECT_EQ(sdp_with_candidate, parsed_sdp_with_candidate);
+}
diff --git a/webrtc/api/localaudiosource.cc b/webrtc/api/localaudiosource.cc
new file mode 100644
index 0000000..7aa05bb
--- /dev/null
+++ b/webrtc/api/localaudiosource.cc
@@ -0,0 +1,113 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/localaudiosource.h"
+
+#include <vector>
+
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/media/base/mediaengine.h"
+
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+
+namespace webrtc {
+
+namespace {
+
+// Convert constraints to audio options. Return false if constraints are
+// invalid.
+void FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
+                     cricket::AudioOptions* options) {
+  // This design relies on the fact that all the audio constraints are actually
+  // "options", i.e. boolean-valued and always satisfiable.  If the constraints
+  // are extended to include non-boolean values or actual format constraints,
+  // a different algorithm will be required.
+  struct {
+    const char* name;
+    rtc::Optional<bool>& value;
+  } key_to_value[] = {
+      {MediaConstraintsInterface::kGoogEchoCancellation,
+       options->echo_cancellation},
+      {MediaConstraintsInterface::kExtendedFilterEchoCancellation,
+       options->extended_filter_aec},
+      {MediaConstraintsInterface::kDAEchoCancellation,
+       options->delay_agnostic_aec},
+      {MediaConstraintsInterface::kAutoGainControl, options->auto_gain_control},
+      {MediaConstraintsInterface::kExperimentalAutoGainControl,
+       options->experimental_agc},
+      {MediaConstraintsInterface::kNoiseSuppression,
+       options->noise_suppression},
+      {MediaConstraintsInterface::kExperimentalNoiseSuppression,
+       options->experimental_ns},
+      {MediaConstraintsInterface::kHighpassFilter, options->highpass_filter},
+      {MediaConstraintsInterface::kTypingNoiseDetection,
+       options->typing_detection},
+      {MediaConstraintsInterface::kAudioMirroring, options->stereo_swapping},
+      {MediaConstraintsInterface::kAecDump, options->aec_dump}
+  };
+
+  for (const auto& constraint : constraints) {
+    bool value = false;
+    if (!rtc::FromString(constraint.value, &value))
+      continue;
+
+    for (auto& entry : key_to_value) {
+      if (constraint.key.compare(entry.name) == 0)
+        entry.value = rtc::Optional<bool>(value);
+    }
+  }
+}
+
+}  // namespace
+
+rtc::scoped_refptr<LocalAudioSource> LocalAudioSource::Create(
+    const PeerConnectionFactoryInterface::Options& options,
+    const MediaConstraintsInterface* constraints) {
+  rtc::scoped_refptr<LocalAudioSource> source(
+      new rtc::RefCountedObject<LocalAudioSource>());
+  source->Initialize(options, constraints);
+  return source;
+}
+
+void LocalAudioSource::Initialize(
+    const PeerConnectionFactoryInterface::Options& options,
+    const MediaConstraintsInterface* constraints) {
+  if (!constraints)
+    return;
+
+  // Apply optional constraints first, they will be overwritten by mandatory
+  // constraints.
+  FromConstraints(constraints->GetOptional(), &options_);
+
+  cricket::AudioOptions mandatory_options;
+  FromConstraints(constraints->GetMandatory(), &mandatory_options);
+  options_.SetAll(mandatory_options);
+  source_state_ = kLive;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/localaudiosource.h b/webrtc/api/localaudiosource.h
new file mode 100644
index 0000000..5b6133a
--- /dev/null
+++ b/webrtc/api/localaudiosource.h
@@ -0,0 +1,73 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_LOCALAUDIOSOURCE_H_
+#define WEBRTC_API_LOCALAUDIOSOURCE_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/notifier.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/media/base/mediachannel.h"
+
+// LocalAudioSource implements AudioSourceInterface.
+// This contains settings for switching audio processing on and off.
+
+namespace webrtc {
+
+class MediaConstraintsInterface;
+
+class LocalAudioSource : public Notifier<AudioSourceInterface> {
+ public:
+  // Creates an instance of LocalAudioSource.
+  static rtc::scoped_refptr<LocalAudioSource> Create(
+      const PeerConnectionFactoryInterface::Options& options,
+      const MediaConstraintsInterface* constraints);
+
+  SourceState state() const override { return source_state_; }
+  bool remote() const override { return false; }
+
+  virtual const cricket::AudioOptions& options() const { return options_; }
+
+  void AddSink(AudioTrackSinkInterface* sink) override {}
+  void RemoveSink(AudioTrackSinkInterface* sink) override {}
+
+ protected:
+  LocalAudioSource() : source_state_(kInitializing) {}
+  ~LocalAudioSource() override {}
+
+ private:
+  void Initialize(const PeerConnectionFactoryInterface::Options& options,
+                  const MediaConstraintsInterface* constraints);
+
+  cricket::AudioOptions options_;
+  SourceState source_state_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_LOCALAUDIOSOURCE_H_
diff --git a/webrtc/api/localaudiosource_unittest.cc b/webrtc/api/localaudiosource_unittest.cc
new file mode 100644
index 0000000..01469e2
--- /dev/null
+++ b/webrtc/api/localaudiosource_unittest.cc
@@ -0,0 +1,117 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/localaudiosource.h"
+
+#include <string>
+#include <vector>
+
+#include "webrtc/api/test/fakeconstraints.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/media/base/fakemediaengine.h"
+#include "webrtc/media/base/fakevideorenderer.h"
+
+using webrtc::LocalAudioSource;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::PeerConnectionFactoryInterface;
+
+TEST(LocalAudioSourceTest, SetValidOptions) {
+  webrtc::FakeConstraints constraints;
+  constraints.AddMandatory(
+      MediaConstraintsInterface::kGoogEchoCancellation, false);
+  constraints.AddOptional(
+      MediaConstraintsInterface::kExtendedFilterEchoCancellation, true);
+  constraints.AddOptional(MediaConstraintsInterface::kDAEchoCancellation, true);
+  constraints.AddOptional(MediaConstraintsInterface::kAutoGainControl, true);
+  constraints.AddOptional(
+      MediaConstraintsInterface::kExperimentalAutoGainControl, true);
+  constraints.AddMandatory(MediaConstraintsInterface::kNoiseSuppression, false);
+  constraints.AddOptional(MediaConstraintsInterface::kHighpassFilter, true);
+  constraints.AddOptional(MediaConstraintsInterface::kAecDump, true);
+
+  rtc::scoped_refptr<LocalAudioSource> source =
+      LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+                               &constraints);
+
+  EXPECT_EQ(rtc::Optional<bool>(false), source->options().echo_cancellation);
+  EXPECT_EQ(rtc::Optional<bool>(true), source->options().extended_filter_aec);
+  EXPECT_EQ(rtc::Optional<bool>(true), source->options().delay_agnostic_aec);
+  EXPECT_EQ(rtc::Optional<bool>(true), source->options().auto_gain_control);
+  EXPECT_EQ(rtc::Optional<bool>(true), source->options().experimental_agc);
+  EXPECT_EQ(rtc::Optional<bool>(false), source->options().noise_suppression);
+  EXPECT_EQ(rtc::Optional<bool>(true), source->options().highpass_filter);
+  EXPECT_EQ(rtc::Optional<bool>(true), source->options().aec_dump);
+}
+
+TEST(LocalAudioSourceTest, OptionNotSet) {
+  webrtc::FakeConstraints constraints;
+  rtc::scoped_refptr<LocalAudioSource> source =
+      LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+                               &constraints);
+  EXPECT_EQ(rtc::Optional<bool>(), source->options().highpass_filter);
+}
+
+TEST(LocalAudioSourceTest, MandatoryOverridesOptional) {
+  webrtc::FakeConstraints constraints;
+  constraints.AddMandatory(
+      MediaConstraintsInterface::kGoogEchoCancellation, false);
+  constraints.AddOptional(
+      MediaConstraintsInterface::kGoogEchoCancellation, true);
+
+  rtc::scoped_refptr<LocalAudioSource> source =
+      LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+                               &constraints);
+
+  EXPECT_EQ(rtc::Optional<bool>(false), source->options().echo_cancellation);
+}
+
+TEST(LocalAudioSourceTest, InvalidOptional) {
+  webrtc::FakeConstraints constraints;
+  constraints.AddOptional(MediaConstraintsInterface::kHighpassFilter, false);
+  constraints.AddOptional("invalidKey", false);
+
+  rtc::scoped_refptr<LocalAudioSource> source =
+      LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+                               &constraints);
+
+  EXPECT_EQ(MediaSourceInterface::kLive, source->state());
+  EXPECT_EQ(rtc::Optional<bool>(false), source->options().highpass_filter);
+}
+
+TEST(LocalAudioSourceTest, InvalidMandatory) {
+  webrtc::FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kHighpassFilter, false);
+  constraints.AddMandatory("invalidKey", false);
+
+  rtc::scoped_refptr<LocalAudioSource> source =
+      LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
+                               &constraints);
+
+  EXPECT_EQ(MediaSourceInterface::kLive, source->state());
+  EXPECT_EQ(rtc::Optional<bool>(false), source->options().highpass_filter);
+}
diff --git a/webrtc/api/mediaconstraintsinterface.cc b/webrtc/api/mediaconstraintsinterface.cc
new file mode 100644
index 0000000..b8575ae
--- /dev/null
+++ b/webrtc/api/mediaconstraintsinterface.cc
@@ -0,0 +1,149 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/mediaconstraintsinterface.h"
+
+#include "webrtc/base/stringencode.h"
+
+namespace webrtc {
+
+const char MediaConstraintsInterface::kValueTrue[] = "true";
+const char MediaConstraintsInterface::kValueFalse[] = "false";
+
+// Constraints declared as static members in mediastreaminterface.h
+// Specified by draft-alvestrand-constraints-resolution-00b
+const char MediaConstraintsInterface::kMinAspectRatio[] = "minAspectRatio";
+const char MediaConstraintsInterface::kMaxAspectRatio[] = "maxAspectRatio";
+const char MediaConstraintsInterface::kMaxWidth[] = "maxWidth";
+const char MediaConstraintsInterface::kMinWidth[] = "minWidth";
+const char MediaConstraintsInterface::kMaxHeight[] = "maxHeight";
+const char MediaConstraintsInterface::kMinHeight[] = "minHeight";
+const char MediaConstraintsInterface::kMaxFrameRate[] = "maxFrameRate";
+const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
+
+// Audio constraints.
+const char MediaConstraintsInterface::kEchoCancellation[] =
+    "echoCancellation";
+const char MediaConstraintsInterface::kGoogEchoCancellation[] =
+    "googEchoCancellation";
+const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] =
+    "googEchoCancellation2";
+const char MediaConstraintsInterface::kDAEchoCancellation[] =
+    "googDAEchoCancellation";
+const char MediaConstraintsInterface::kAutoGainControl[] =
+    "googAutoGainControl";
+const char MediaConstraintsInterface::kExperimentalAutoGainControl[] =
+    "googAutoGainControl2";
+const char MediaConstraintsInterface::kNoiseSuppression[] =
+    "googNoiseSuppression";
+const char MediaConstraintsInterface::kExperimentalNoiseSuppression[] =
+    "googNoiseSuppression2";
+const char MediaConstraintsInterface::kHighpassFilter[] =
+    "googHighpassFilter";
+const char MediaConstraintsInterface::kTypingNoiseDetection[] =
+    "googTypingNoiseDetection";
+const char MediaConstraintsInterface::kAudioMirroring[] = "googAudioMirroring";
+const char MediaConstraintsInterface::kAecDump[] = "audioDebugRecording";
+
+// Google-specific constraint keys for a local video source (getUserMedia).
+const char MediaConstraintsInterface::kNoiseReduction[] = "googNoiseReduction";
+
+// Constraint keys for CreateOffer / CreateAnswer defined in W3C specification.
+const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
+    "OfferToReceiveAudio";
+const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
+    "OfferToReceiveVideo";
+const char MediaConstraintsInterface::kVoiceActivityDetection[] =
+    "VoiceActivityDetection";
+const char MediaConstraintsInterface::kIceRestart[] =
+    "IceRestart";
+// Google specific constraint for BUNDLE enable/disable.
+const char MediaConstraintsInterface::kUseRtpMux[] =
+    "googUseRtpMUX";
+
+// Below constraints should be used during PeerConnection construction.
+const char MediaConstraintsInterface::kEnableDtlsSrtp[] =
+    "DtlsSrtpKeyAgreement";
+const char MediaConstraintsInterface::kEnableRtpDataChannels[] =
+    "RtpDataChannels";
+// Google-specific constraint keys.
+const char MediaConstraintsInterface::kEnableDscp[] = "googDscp";
+const char MediaConstraintsInterface::kEnableIPv6[] = "googIPv6";
+const char MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate[] =
+    "googSuspendBelowMinBitrate";
+const char MediaConstraintsInterface::kCombinedAudioVideoBwe[] =
+    "googCombinedAudioVideoBwe";
+const char MediaConstraintsInterface::kScreencastMinBitrate[] =
+    "googScreencastMinBitrate";
+// TODO(ronghuawu): Remove once cpu overuse detection is stable.
+const char MediaConstraintsInterface::kCpuOveruseDetection[] =
+    "googCpuOveruseDetection";
+const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding";
+
+
+// Set |value| to the value associated with the first appearance of |key|, or
+// return false if |key| is not found.
+bool MediaConstraintsInterface::Constraints::FindFirst(
+    const std::string& key, std::string* value) const {
+  for (Constraints::const_iterator iter = begin(); iter != end(); ++iter) {
+    if (iter->key == key) {
+      *value = iter->value;
+      return true;
+    }
+  }
+  return false;
+}
+
+// Find the highest-priority instance of the boolean-valued constraint) named by
+// |key| and return its value as |value|. |constraints| can be null.
+// If |mandatory_constraints| is non-null, it is incremented if the key appears
+// among the mandatory constraints.
+// Returns true if the key was found and has a valid boolean value.
+// If the key appears multiple times as an optional constraint, appearances
+// after the first are ignored.
+// Note: Because this uses FindFirst, repeated optional constraints whose
+// first instance has an unrecognized value are not handled precisely in
+// accordance with the specification.
+bool FindConstraint(const MediaConstraintsInterface* constraints,
+                    const std::string& key, bool* value,
+                    size_t* mandatory_constraints) {
+  std::string string_value;
+  if (!constraints) {
+    return false;
+  }
+  if (constraints->GetMandatory().FindFirst(key, &string_value)) {
+    if (mandatory_constraints)
+      ++*mandatory_constraints;
+    return rtc::FromString(string_value, value);
+  }
+  if (constraints->GetOptional().FindFirst(key, &string_value)) {
+    return rtc::FromString(string_value, value);
+  }
+  return false;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/mediaconstraintsinterface.h b/webrtc/api/mediaconstraintsinterface.h
new file mode 100644
index 0000000..d759e00
--- /dev/null
+++ b/webrtc/api/mediaconstraintsinterface.h
@@ -0,0 +1,141 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the interface for MediaConstraints, corresponding to
+// the definition at
+// http://www.w3.org/TR/mediacapture-streams/#mediastreamconstraints and also
+// used in WebRTC: http://dev.w3.org/2011/webrtc/editor/webrtc.html#constraints.
+
+#ifndef WEBRTC_API_MEDIACONSTRAINTSINTERFACE_H_
+#define WEBRTC_API_MEDIACONSTRAINTSINTERFACE_H_
+
+#include <string>
+#include <vector>
+
+namespace webrtc {
+
+// MediaConstraintsInterface
+// Interface used for passing arguments about media constraints
+// to the MediaStream and PeerConnection implementation.
+class MediaConstraintsInterface {
+ public:
+  struct Constraint {
+    Constraint() {}
+    Constraint(const std::string& key, const std::string value)
+        : key(key), value(value) {
+    }
+    std::string key;
+    std::string value;
+  };
+
+  class Constraints : public std::vector<Constraint> {
+   public:
+    bool FindFirst(const std::string& key, std::string* value) const;
+  };
+
+  virtual const Constraints& GetMandatory() const = 0;
+  virtual const Constraints& GetOptional() const = 0;
+
+  // Constraint keys used by a local video source.
+  // Specified by draft-alvestrand-constraints-resolution-00b
+  static const char kMinAspectRatio[];  // minAspectRatio
+  static const char kMaxAspectRatio[];  // maxAspectRatio
+  static const char kMaxWidth[];  // maxWidth
+  static const char kMinWidth[];  // minWidth
+  static const char kMaxHeight[];  // maxHeight
+  static const char kMinHeight[];  // minHeight
+  static const char kMaxFrameRate[];  // maxFrameRate
+  static const char kMinFrameRate[];  // minFrameRate
+
+  // Constraint keys used by a local audio source.
+  static const char kEchoCancellation[];  // echoCancellation
+
+  // These keys are google specific.
+  static const char kGoogEchoCancellation[];  // googEchoCancellation
+
+  static const char kExtendedFilterEchoCancellation[];  // googEchoCancellation2
+  static const char kDAEchoCancellation[];  // googDAEchoCancellation
+  static const char kAutoGainControl[];  // googAutoGainControl
+  static const char kExperimentalAutoGainControl[];  // googAutoGainControl2
+  static const char kNoiseSuppression[];  // googNoiseSuppression
+  static const char kExperimentalNoiseSuppression[];  // googNoiseSuppression2
+  static const char kHighpassFilter[];  // googHighpassFilter
+  static const char kTypingNoiseDetection[];  // googTypingNoiseDetection
+  static const char kAudioMirroring[];  // googAudioMirroring
+  static const char kAecDump[];               // audioDebugRecording
+
+  // Google-specific constraint keys for a local video source
+  static const char kNoiseReduction[];  // googNoiseReduction
+
+  // Constraint keys for CreateOffer / CreateAnswer
+  // Specified by the W3C PeerConnection spec
+  static const char kOfferToReceiveVideo[];  // OfferToReceiveVideo
+  static const char kOfferToReceiveAudio[];  // OfferToReceiveAudio
+  static const char kVoiceActivityDetection[];  // VoiceActivityDetection
+  static const char kIceRestart[];  // IceRestart
+  // These keys are google specific.
+  static const char kUseRtpMux[];  // googUseRtpMUX
+
+  // Constraints values.
+  static const char kValueTrue[];  // true
+  static const char kValueFalse[];  // false
+
+  // PeerConnection constraint keys.
+  // Temporary pseudo-constraints used to enable DTLS-SRTP
+  static const char kEnableDtlsSrtp[];  // Enable DTLS-SRTP
+  // Temporary pseudo-constraints used to enable DataChannels
+  static const char kEnableRtpDataChannels[];  // Enable RTP DataChannels
+  // Google-specific constraint keys.
+  // Temporary pseudo-constraint for enabling DSCP through JS.
+  static const char kEnableDscp[];  // googDscp
+  // Constraint to enable IPv6 through JS.
+  static const char kEnableIPv6[];  // googIPv6
+  // Temporary constraint to enable suspend below min bitrate feature.
+  static const char kEnableVideoSuspendBelowMinBitrate[];
+      // googSuspendBelowMinBitrate
+  // Constraint to enable combined audio+video bandwidth estimation.
+  static const char kCombinedAudioVideoBwe[];  // googCombinedAudioVideoBwe
+  static const char kScreencastMinBitrate[];  // googScreencastMinBitrate
+  static const char kCpuOveruseDetection[];  // googCpuOveruseDetection
+  static const char kPayloadPadding[];  // googPayloadPadding
+
+  // The prefix of internal-only constraints whose JS set values should be
+  // stripped by Chrome before passed down to Libjingle.
+  static const char kInternalConstraintPrefix[];
+
+ protected:
+  // Dtor protected as objects shouldn't be deleted via this interface
+  virtual ~MediaConstraintsInterface() {}
+};
+
+bool FindConstraint(const MediaConstraintsInterface* constraints,
+                    const std::string& key, bool* value,
+                    size_t* mandatory_constraints);
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIACONSTRAINTSINTERFACE_H_
diff --git a/webrtc/api/mediacontroller.cc b/webrtc/api/mediacontroller.cc
new file mode 100644
index 0000000..7d94a1a
--- /dev/null
+++ b/webrtc/api/mediacontroller.cc
@@ -0,0 +1,96 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/mediacontroller.h"
+
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/call.h"
+
+namespace {
+
+const int kMinBandwidthBps = 30000;
+const int kStartBandwidthBps = 300000;
+const int kMaxBandwidthBps = 2000000;
+
+class MediaController : public webrtc::MediaControllerInterface,
+                        public sigslot::has_slots<> {
+ public:
+  MediaController(rtc::Thread* worker_thread,
+                  cricket::ChannelManager* channel_manager)
+      : worker_thread_(worker_thread), channel_manager_(channel_manager) {
+    RTC_DCHECK(nullptr != worker_thread);
+    worker_thread_->Invoke<void>(
+        rtc::Bind(&MediaController::Construct_w, this,
+                  channel_manager_->media_engine()));
+  }
+  ~MediaController() override {
+    worker_thread_->Invoke<void>(rtc::Bind(&MediaController::Destruct_w, this));
+  }
+
+  webrtc::Call* call_w() override {
+    RTC_DCHECK(worker_thread_->IsCurrent());
+    return call_.get();
+  }
+
+  cricket::ChannelManager* channel_manager() const override {
+    return channel_manager_;
+  }
+
+ private:
+  void Construct_w(cricket::MediaEngineInterface* media_engine) {
+    RTC_DCHECK(worker_thread_->IsCurrent());
+    RTC_DCHECK(media_engine);
+    webrtc::Call::Config config;
+    config.audio_state = media_engine->GetAudioState();
+    config.bitrate_config.min_bitrate_bps = kMinBandwidthBps;
+    config.bitrate_config.start_bitrate_bps = kStartBandwidthBps;
+    config.bitrate_config.max_bitrate_bps = kMaxBandwidthBps;
+    call_.reset(webrtc::Call::Create(config));
+  }
+  void Destruct_w() {
+    RTC_DCHECK(worker_thread_->IsCurrent());
+    call_.reset();
+  }
+
+  rtc::Thread* const worker_thread_;
+  cricket::ChannelManager* const channel_manager_;
+  rtc::scoped_ptr<webrtc::Call> call_;
+
+  RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MediaController);
+};
+}  // namespace {
+
+namespace webrtc {
+
+MediaControllerInterface* MediaControllerInterface::Create(
+    rtc::Thread* worker_thread,
+    cricket::ChannelManager* channel_manager) {
+  return new MediaController(worker_thread, channel_manager);
+}
+}  // namespace webrtc
diff --git a/webrtc/api/mediacontroller.h b/webrtc/api/mediacontroller.h
new file mode 100644
index 0000000..f07ddf5
--- /dev/null
+++ b/webrtc/api/mediacontroller.h
@@ -0,0 +1,55 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_MEDIACONTROLLER_H_
+#define WEBRTC_API_MEDIACONTROLLER_H_
+
+#include "webrtc/base/thread.h"
+
+namespace cricket {
+class ChannelManager;
+}  // namespace cricket
+
+namespace webrtc {
+class Call;
+class VoiceEngine;
+
+// The MediaController currently owns shared state between media channels, but
+// in the future will create and own RtpSenders and RtpReceivers.
+class MediaControllerInterface {
+ public:
+  static MediaControllerInterface* Create(
+      rtc::Thread* worker_thread,
+      cricket::ChannelManager* channel_manager);
+
+  virtual ~MediaControllerInterface() {}
+  virtual webrtc::Call* call_w() = 0;
+  virtual cricket::ChannelManager* channel_manager() const = 0;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIACONTROLLER_H_
diff --git a/webrtc/api/mediastream.cc b/webrtc/api/mediastream.cc
new file mode 100644
index 0000000..fe7db9f
--- /dev/null
+++ b/webrtc/api/mediastream.cc
@@ -0,0 +1,112 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/mediastream.h"
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+template <class V>
+static typename V::iterator FindTrack(V* vector,
+                                      const std::string& track_id) {
+  typename V::iterator it = vector->begin();
+  for (; it != vector->end(); ++it) {
+    if ((*it)->id() == track_id) {
+      break;
+    }
+  }
+  return it;
+};
+
+rtc::scoped_refptr<MediaStream> MediaStream::Create(
+    const std::string& label) {
+  rtc::RefCountedObject<MediaStream>* stream =
+      new rtc::RefCountedObject<MediaStream>(label);
+  return stream;
+}
+
+MediaStream::MediaStream(const std::string& label)
+    : label_(label) {
+}
+
+bool MediaStream::AddTrack(AudioTrackInterface* track) {
+  return AddTrack<AudioTrackVector, AudioTrackInterface>(&audio_tracks_, track);
+}
+
+bool MediaStream::AddTrack(VideoTrackInterface* track) {
+  return AddTrack<VideoTrackVector, VideoTrackInterface>(&video_tracks_, track);
+}
+
+bool MediaStream::RemoveTrack(AudioTrackInterface* track) {
+  return RemoveTrack<AudioTrackVector>(&audio_tracks_, track);
+}
+
+bool MediaStream::RemoveTrack(VideoTrackInterface* track) {
+  return RemoveTrack<VideoTrackVector>(&video_tracks_, track);
+}
+
+rtc::scoped_refptr<AudioTrackInterface>
+MediaStream::FindAudioTrack(const std::string& track_id) {
+  AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id);
+  if (it == audio_tracks_.end())
+    return NULL;
+  return *it;
+}
+
+rtc::scoped_refptr<VideoTrackInterface>
+MediaStream::FindVideoTrack(const std::string& track_id) {
+  VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id);
+  if (it == video_tracks_.end())
+    return NULL;
+  return *it;
+}
+
+template <typename TrackVector, typename Track>
+bool MediaStream::AddTrack(TrackVector* tracks, Track* track) {
+  typename TrackVector::iterator it = FindTrack(tracks, track->id());
+  if (it != tracks->end())
+    return false;
+  tracks->push_back(track);
+  FireOnChanged();
+  return true;
+}
+
+template <typename TrackVector>
+bool MediaStream::RemoveTrack(TrackVector* tracks,
+                              MediaStreamTrackInterface* track) {
+  ASSERT(tracks != NULL);
+  if (!track)
+    return false;
+  typename TrackVector::iterator it = FindTrack(tracks, track->id());
+  if (it == tracks->end())
+    return false;
+  tracks->erase(it);
+  FireOnChanged();
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/mediastream.h b/webrtc/api/mediastream.h
new file mode 100644
index 0000000..94f21eb
--- /dev/null
+++ b/webrtc/api/mediastream.h
@@ -0,0 +1,75 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the implementation of MediaStreamInterface interface.
+
+#ifndef WEBRTC_API_MEDIASTREAM_H_
+#define WEBRTC_API_MEDIASTREAM_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/notifier.h"
+
+namespace webrtc {
+
+class MediaStream : public Notifier<MediaStreamInterface> {
+ public:
+  static rtc::scoped_refptr<MediaStream> Create(const std::string& label);
+
+  std::string label() const override { return label_; }
+
+  bool AddTrack(AudioTrackInterface* track) override;
+  bool AddTrack(VideoTrackInterface* track) override;
+  bool RemoveTrack(AudioTrackInterface* track) override;
+  bool RemoveTrack(VideoTrackInterface* track) override;
+  virtual rtc::scoped_refptr<AudioTrackInterface>
+      FindAudioTrack(const std::string& track_id);
+  virtual rtc::scoped_refptr<VideoTrackInterface>
+      FindVideoTrack(const std::string& track_id);
+
+  AudioTrackVector GetAudioTracks() override { return audio_tracks_; }
+  VideoTrackVector GetVideoTracks() override { return video_tracks_; }
+
+ protected:
+  explicit MediaStream(const std::string& label);
+
+ private:
+  template <typename TrackVector, typename Track>
+  bool AddTrack(TrackVector* Tracks, Track* track);
+  template <typename TrackVector>
+  bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track);
+
+  std::string label_;
+  AudioTrackVector audio_tracks_;
+  VideoTrackVector video_tracks_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAM_H_
diff --git a/webrtc/api/mediastream_unittest.cc b/webrtc/api/mediastream_unittest.cc
new file mode 100644
index 0000000..5d6d15d
--- /dev/null
+++ b/webrtc/api/mediastream_unittest.cc
@@ -0,0 +1,175 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/mediastream.h"
+#include "webrtc/api/videotrack.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kVideoTrackId[] = "dummy_video_cam_1";
+static const char kAudioTrackId[] = "dummy_microphone_1";
+
+using rtc::scoped_refptr;
+using ::testing::Exactly;
+
+namespace webrtc {
+
+// Helper class to test Observer.
+class MockObserver : public ObserverInterface {
+ public:
+  explicit MockObserver(NotifierInterface* notifier) : notifier_(notifier) {
+    notifier_->RegisterObserver(this);
+  }
+
+  ~MockObserver() { Unregister(); }
+
+  void Unregister() {
+    if (notifier_) {
+      notifier_->UnregisterObserver(this);
+      notifier_ = nullptr;
+    }
+  }
+
+  MOCK_METHOD0(OnChanged, void());
+
+ private:
+  NotifierInterface* notifier_;
+};
+
+class MediaStreamTest: public testing::Test {
+ protected:
+  virtual void SetUp() {
+    stream_ = MediaStream::Create(kStreamLabel1);
+    ASSERT_TRUE(stream_.get() != NULL);
+
+    video_track_ = VideoTrack::Create(kVideoTrackId, NULL);
+    ASSERT_TRUE(video_track_.get() != NULL);
+    EXPECT_EQ(MediaStreamTrackInterface::kInitializing, video_track_->state());
+
+    audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
+
+    ASSERT_TRUE(audio_track_.get() != NULL);
+    EXPECT_EQ(MediaStreamTrackInterface::kInitializing, audio_track_->state());
+
+    EXPECT_TRUE(stream_->AddTrack(video_track_));
+    EXPECT_FALSE(stream_->AddTrack(video_track_));
+    EXPECT_TRUE(stream_->AddTrack(audio_track_));
+    EXPECT_FALSE(stream_->AddTrack(audio_track_));
+  }
+
+  void ChangeTrack(MediaStreamTrackInterface* track) {
+    MockObserver observer(track);
+
+    EXPECT_CALL(observer, OnChanged())
+        .Times(Exactly(1));
+    track->set_enabled(false);
+    EXPECT_FALSE(track->enabled());
+
+    EXPECT_CALL(observer, OnChanged())
+        .Times(Exactly(1));
+    track->set_state(MediaStreamTrackInterface::kLive);
+    EXPECT_EQ(MediaStreamTrackInterface::kLive, track->state());
+  }
+
+  scoped_refptr<MediaStreamInterface> stream_;
+  scoped_refptr<AudioTrackInterface> audio_track_;
+  scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+TEST_F(MediaStreamTest, GetTrackInfo) {
+  ASSERT_EQ(1u, stream_->GetVideoTracks().size());
+  ASSERT_EQ(1u, stream_->GetAudioTracks().size());
+
+  // Verify the video track.
+  scoped_refptr<webrtc::MediaStreamTrackInterface> video_track(
+      stream_->GetVideoTracks()[0]);
+  EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
+  EXPECT_TRUE(video_track->enabled());
+
+  ASSERT_EQ(1u, stream_->GetVideoTracks().size());
+  EXPECT_TRUE(stream_->GetVideoTracks()[0].get() == video_track.get());
+  EXPECT_TRUE(stream_->FindVideoTrack(video_track->id()).get()
+              == video_track.get());
+  video_track = stream_->GetVideoTracks()[0];
+  EXPECT_EQ(0, video_track->id().compare(kVideoTrackId));
+  EXPECT_TRUE(video_track->enabled());
+
+  // Verify the audio track.
+  scoped_refptr<webrtc::MediaStreamTrackInterface> audio_track(
+      stream_->GetAudioTracks()[0]);
+  EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
+  EXPECT_TRUE(audio_track->enabled());
+  ASSERT_EQ(1u, stream_->GetAudioTracks().size());
+  EXPECT_TRUE(stream_->GetAudioTracks()[0].get() == audio_track.get());
+  EXPECT_TRUE(stream_->FindAudioTrack(audio_track->id()).get()
+              == audio_track.get());
+  audio_track = stream_->GetAudioTracks()[0];
+  EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId));
+  EXPECT_TRUE(audio_track->enabled());
+}
+
+TEST_F(MediaStreamTest, RemoveTrack) {
+  MockObserver observer(stream_);
+
+  EXPECT_CALL(observer, OnChanged())
+      .Times(Exactly(2));
+
+  EXPECT_TRUE(stream_->RemoveTrack(audio_track_));
+  EXPECT_FALSE(stream_->RemoveTrack(audio_track_));
+  EXPECT_EQ(0u, stream_->GetAudioTracks().size());
+  EXPECT_EQ(0u, stream_->GetAudioTracks().size());
+
+  EXPECT_TRUE(stream_->RemoveTrack(video_track_));
+  EXPECT_FALSE(stream_->RemoveTrack(video_track_));
+
+  EXPECT_EQ(0u, stream_->GetVideoTracks().size());
+  EXPECT_EQ(0u, stream_->GetVideoTracks().size());
+
+  EXPECT_FALSE(stream_->RemoveTrack(static_cast<AudioTrackInterface*>(NULL)));
+  EXPECT_FALSE(stream_->RemoveTrack(static_cast<VideoTrackInterface*>(NULL)));
+}
+
+TEST_F(MediaStreamTest, ChangeVideoTrack) {
+  scoped_refptr<webrtc::VideoTrackInterface> video_track(
+      stream_->GetVideoTracks()[0]);
+  ChangeTrack(video_track.get());
+}
+
+TEST_F(MediaStreamTest, ChangeAudioTrack) {
+  scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+      stream_->GetAudioTracks()[0]);
+  ChangeTrack(audio_track.get());
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/mediastreamhandler.cc b/webrtc/api/mediastreamhandler.cc
new file mode 100644
index 0000000..be493f1
--- /dev/null
+++ b/webrtc/api/mediastreamhandler.cc
@@ -0,0 +1,29 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(deadbeef): Remove this file once Chrome build files no longer reference
+// it.
diff --git a/webrtc/api/mediastreamhandler.h b/webrtc/api/mediastreamhandler.h
new file mode 100644
index 0000000..be493f1
--- /dev/null
+++ b/webrtc/api/mediastreamhandler.h
@@ -0,0 +1,29 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(deadbeef): Remove this file once Chrome build files no longer reference
+// it.
diff --git a/webrtc/api/mediastreaminterface.h b/webrtc/api/mediastreaminterface.h
new file mode 100644
index 0000000..28f243a
--- /dev/null
+++ b/webrtc/api/mediastreaminterface.h
@@ -0,0 +1,296 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for MediaStream, MediaTrack and MediaSource.
+// These interfaces are used for implementing MediaStream and MediaTrack as
+// defined in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These
+// interfaces must be used only with PeerConnection. PeerConnectionManager
+// interface provides the factory methods to create MediaStream and MediaTracks.
+
+#ifndef WEBRTC_API_MEDIASTREAMINTERFACE_H_
+#define WEBRTC_API_MEDIASTREAMINTERFACE_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/media/base/videosinkinterface.h"
+
+namespace cricket {
+
+class AudioRenderer;
+class VideoCapturer;
+class VideoRenderer;
+class VideoFrame;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+// Generic observer interface.
+class ObserverInterface {
+ public:
+  virtual void OnChanged() = 0;
+
+ protected:
+  virtual ~ObserverInterface() {}
+};
+
+class NotifierInterface {
+ public:
+  virtual void RegisterObserver(ObserverInterface* observer) = 0;
+  virtual void UnregisterObserver(ObserverInterface* observer) = 0;
+
+  virtual ~NotifierInterface() {}
+};
+
+// Base class for sources. A MediaStreamTrack have an underlying source that
+// provide media. A source can be shared with multiple tracks.
+class MediaSourceInterface : public rtc::RefCountInterface,
+                             public NotifierInterface {
+ public:
+  enum SourceState {
+    kInitializing,
+    kLive,
+    kEnded,
+    kMuted
+  };
+
+  virtual SourceState state() const = 0;
+
+  virtual bool remote() const = 0;
+
+ protected:
+  virtual ~MediaSourceInterface() {}
+};
+
+// Information about a track.
+class MediaStreamTrackInterface : public rtc::RefCountInterface,
+                                  public NotifierInterface {
+ public:
+  enum TrackState {
+    kInitializing,  // Track is beeing negotiated.
+    kLive = 1,  // Track alive
+    kEnded = 2,  // Track have ended
+    kFailed = 3,  // Track negotiation failed.
+  };
+
+  static const char kAudioKind[];
+  static const char kVideoKind[];
+
+  virtual std::string kind() const = 0;
+  virtual std::string id() const = 0;
+  virtual bool enabled() const = 0;
+  virtual TrackState state() const = 0;
+  virtual bool set_enabled(bool enable) = 0;
+  // These methods should be called by implementation only.
+  virtual bool set_state(TrackState new_state) = 0;
+
+ protected:
+  virtual ~MediaStreamTrackInterface() {}
+};
+
+// Interface for rendering VideoFrames from a VideoTrack
+class VideoRendererInterface
+    : public rtc::VideoSinkInterface<cricket::VideoFrame> {
+ public:
+  // |frame| may have pending rotation. For clients which can't apply rotation,
+  // |frame|->GetCopyWithRotationApplied() will return a frame that has the
+  // rotation applied.
+  virtual void RenderFrame(const cricket::VideoFrame* frame) = 0;
+  // Intended to replace RenderFrame.
+  void OnFrame(const cricket::VideoFrame& frame) override {
+    RenderFrame(&frame);
+  }
+
+ protected:
+  // The destructor is protected to prevent deletion via the interface.
+  // This is so that we allow reference counted classes, where the destructor
+  // should never be public, to implement the interface.
+  virtual ~VideoRendererInterface() {}
+};
+
+class VideoSourceInterface;
+
+class VideoTrackInterface : public MediaStreamTrackInterface {
+ public:
+  // Register a renderer that will render all frames received on this track.
+  virtual void AddRenderer(VideoRendererInterface* renderer) = 0;
+  // Deregister a renderer.
+  virtual void RemoveRenderer(VideoRendererInterface* renderer) = 0;
+
+  virtual VideoSourceInterface* GetSource() const = 0;
+
+  // Return the track input sink. I.e., frames sent to this sink are
+  // propagated to all renderers registered with the track. The
+  // returned sink must not change between calls. Currently, this
+  // method is used for remote tracks (VideoRtpReceiver); further
+  // refactoring is planned for this path, it's unclear if this method
+  // belongs here long term.
+
+  // We do this instead of simply implementing the
+  // VideoSourceInterface directly, because if we did the latter, we'd
+  // need an OnFrame method in VideoTrackProxy, with a thread jump on
+  // each call.
+
+  // TODO(nisse): It has a default implementation so that mock
+  // objects, in particular, chrome's MockWebRtcVideoTrack, doesn't
+  // need to know about it. Consider removing the implementation (or
+  // this comment) after refactoring dust settles.
+  virtual rtc::VideoSinkInterface<cricket::VideoFrame>* GetSink() {
+    return nullptr;
+  };
+
+ protected:
+  virtual ~VideoTrackInterface() {}
+};
+
+// Interface for receiving audio data from a AudioTrack.
+class AudioTrackSinkInterface {
+ public:
+  virtual void OnData(const void* audio_data,
+                      int bits_per_sample,
+                      int sample_rate,
+                      size_t number_of_channels,
+                      size_t number_of_frames) = 0;
+
+ protected:
+  virtual ~AudioTrackSinkInterface() {}
+};
+
+// AudioSourceInterface is a reference counted source used for AudioTracks.
+// The same source can be used in multiple AudioTracks.
+class AudioSourceInterface : public MediaSourceInterface {
+ public:
+  class AudioObserver {
+   public:
+    virtual void OnSetVolume(double volume) = 0;
+
+   protected:
+    virtual ~AudioObserver() {}
+  };
+
+  // TODO(xians): Makes all the interface pure virtual after Chrome has their
+  // implementations.
+  // Sets the volume to the source. |volume| is in  the range of [0, 10].
+  // TODO(tommi): This method should be on the track and ideally volume should
+  // be applied in the track in a way that does not affect clones of the track.
+  virtual void SetVolume(double volume) {}
+
+  // Registers/unregisters observer to the audio source.
+  virtual void RegisterAudioObserver(AudioObserver* observer) {}
+  virtual void UnregisterAudioObserver(AudioObserver* observer) {}
+
+  // TODO(tommi): Make pure virtual.
+  virtual void AddSink(AudioTrackSinkInterface* sink) {}
+  virtual void RemoveSink(AudioTrackSinkInterface* sink) {}
+};
+
+// Interface of the audio processor used by the audio track to collect
+// statistics.
+class AudioProcessorInterface : public rtc::RefCountInterface {
+ public:
+  struct AudioProcessorStats {
+    AudioProcessorStats() : typing_noise_detected(false),
+                            echo_return_loss(0),
+                            echo_return_loss_enhancement(0),
+                            echo_delay_median_ms(0),
+                            aec_quality_min(0.0),
+                            echo_delay_std_ms(0) {}
+    ~AudioProcessorStats() {}
+
+    bool typing_noise_detected;
+    int echo_return_loss;
+    int echo_return_loss_enhancement;
+    int echo_delay_median_ms;
+    float aec_quality_min;
+    int echo_delay_std_ms;
+  };
+
+  // Get audio processor statistics.
+  virtual void GetStats(AudioProcessorStats* stats) = 0;
+
+ protected:
+  virtual ~AudioProcessorInterface() {}
+};
+
+class AudioTrackInterface : public MediaStreamTrackInterface {
+ public:
+  // TODO(xians): Figure out if the following interface should be const or not.
+  virtual AudioSourceInterface* GetSource() const =  0;
+
+  // Add/Remove a sink that will receive the audio data from the track.
+  virtual void AddSink(AudioTrackSinkInterface* sink) = 0;
+  virtual void RemoveSink(AudioTrackSinkInterface* sink) = 0;
+
+  // Get the signal level from the audio track.
+  // Return true on success, otherwise false.
+  // TODO(xians): Change the interface to int GetSignalLevel() and pure virtual
+  // after Chrome has the correct implementation of the interface.
+  virtual bool GetSignalLevel(int* level) { return false; }
+
+  // Get the audio processor used by the audio track. Return NULL if the track
+  // does not have any processor.
+  // TODO(xians): Make the interface pure virtual.
+  virtual rtc::scoped_refptr<AudioProcessorInterface>
+      GetAudioProcessor() { return NULL; }
+
+ protected:
+  virtual ~AudioTrackInterface() {}
+};
+
+typedef std::vector<rtc::scoped_refptr<AudioTrackInterface> >
+    AudioTrackVector;
+typedef std::vector<rtc::scoped_refptr<VideoTrackInterface> >
+    VideoTrackVector;
+
+class MediaStreamInterface : public rtc::RefCountInterface,
+                             public NotifierInterface {
+ public:
+  virtual std::string label() const = 0;
+
+  virtual AudioTrackVector GetAudioTracks() = 0;
+  virtual VideoTrackVector GetVideoTracks() = 0;
+  virtual rtc::scoped_refptr<AudioTrackInterface>
+      FindAudioTrack(const std::string& track_id) = 0;
+  virtual rtc::scoped_refptr<VideoTrackInterface>
+      FindVideoTrack(const std::string& track_id) = 0;
+
+  virtual bool AddTrack(AudioTrackInterface* track) = 0;
+  virtual bool AddTrack(VideoTrackInterface* track) = 0;
+  virtual bool RemoveTrack(AudioTrackInterface* track) = 0;
+  virtual bool RemoveTrack(VideoTrackInterface* track) = 0;
+
+ protected:
+  virtual ~MediaStreamInterface() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAMINTERFACE_H_
diff --git a/webrtc/api/mediastreamobserver.cc b/webrtc/api/mediastreamobserver.cc
new file mode 100644
index 0000000..3f47f13
--- /dev/null
+++ b/webrtc/api/mediastreamobserver.cc
@@ -0,0 +1,101 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/mediastreamobserver.h"
+
+#include <algorithm>
+
+namespace webrtc {
+
+MediaStreamObserver::MediaStreamObserver(MediaStreamInterface* stream)
+    : stream_(stream),
+      cached_audio_tracks_(stream->GetAudioTracks()),
+      cached_video_tracks_(stream->GetVideoTracks()) {
+  stream_->RegisterObserver(this);
+}
+
+MediaStreamObserver::~MediaStreamObserver() {
+  stream_->UnregisterObserver(this);
+}
+
+void MediaStreamObserver::OnChanged() {
+  AudioTrackVector new_audio_tracks = stream_->GetAudioTracks();
+  VideoTrackVector new_video_tracks = stream_->GetVideoTracks();
+
+  // Find removed audio tracks.
+  for (const auto& cached_track : cached_audio_tracks_) {
+    auto it = std::find_if(
+        new_audio_tracks.begin(), new_audio_tracks.end(),
+        [cached_track](const AudioTrackVector::value_type& new_track) {
+          return new_track->id().compare(cached_track->id()) == 0;
+        });
+    if (it == new_audio_tracks.end()) {
+      SignalAudioTrackRemoved(cached_track.get(), stream_);
+    }
+  }
+
+  // Find added audio tracks.
+  for (const auto& new_track : new_audio_tracks) {
+    auto it = std::find_if(
+        cached_audio_tracks_.begin(), cached_audio_tracks_.end(),
+        [new_track](const AudioTrackVector::value_type& cached_track) {
+          return new_track->id().compare(cached_track->id()) == 0;
+        });
+    if (it == cached_audio_tracks_.end()) {
+      SignalAudioTrackAdded(new_track.get(), stream_);
+    }
+  }
+
+  // Find removed video tracks.
+  for (const auto& cached_track : cached_video_tracks_) {
+    auto it = std::find_if(
+        new_video_tracks.begin(), new_video_tracks.end(),
+        [cached_track](const VideoTrackVector::value_type& new_track) {
+          return new_track->id().compare(cached_track->id()) == 0;
+        });
+    if (it == new_video_tracks.end()) {
+      SignalVideoTrackRemoved(cached_track.get(), stream_);
+    }
+  }
+
+  // Find added video tracks.
+  for (const auto& new_track : new_video_tracks) {
+    auto it = std::find_if(
+        cached_video_tracks_.begin(), cached_video_tracks_.end(),
+        [new_track](const VideoTrackVector::value_type& cached_track) {
+          return new_track->id().compare(cached_track->id()) == 0;
+        });
+    if (it == cached_video_tracks_.end()) {
+      SignalVideoTrackAdded(new_track.get(), stream_);
+    }
+  }
+
+  cached_audio_tracks_ = new_audio_tracks;
+  cached_video_tracks_ = new_video_tracks;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/mediastreamobserver.h b/webrtc/api/mediastreamobserver.h
new file mode 100644
index 0000000..1546942
--- /dev/null
+++ b/webrtc/api/mediastreamobserver.h
@@ -0,0 +1,65 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_MEDIASTREAMOBSERVER_H_
+#define WEBRTC_API_MEDIASTREAMOBSERVER_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+
+namespace webrtc {
+
+// Helper class which will listen for changes to a stream and emit the
+// corresponding signals.
+class MediaStreamObserver : public ObserverInterface {
+ public:
+  explicit MediaStreamObserver(MediaStreamInterface* stream);
+  ~MediaStreamObserver();
+
+  const MediaStreamInterface* stream() const { return stream_; }
+
+  void OnChanged() override;
+
+  sigslot::signal2<AudioTrackInterface*, MediaStreamInterface*>
+      SignalAudioTrackAdded;
+  sigslot::signal2<AudioTrackInterface*, MediaStreamInterface*>
+      SignalAudioTrackRemoved;
+  sigslot::signal2<VideoTrackInterface*, MediaStreamInterface*>
+      SignalVideoTrackAdded;
+  sigslot::signal2<VideoTrackInterface*, MediaStreamInterface*>
+      SignalVideoTrackRemoved;
+
+ private:
+  rtc::scoped_refptr<MediaStreamInterface> stream_;
+  AudioTrackVector cached_audio_tracks_;
+  VideoTrackVector cached_video_tracks_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAMOBSERVER_H_
diff --git a/webrtc/api/mediastreamprovider.h b/webrtc/api/mediastreamprovider.h
new file mode 100644
index 0000000..4ee27aa
--- /dev/null
+++ b/webrtc/api/mediastreamprovider.h
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_MEDIASTREAMPROVIDER_H_
+#define WEBRTC_API_MEDIASTREAMPROVIDER_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/media/base/videosinkinterface.h"
+
+namespace cricket {
+
+class AudioRenderer;
+class VideoCapturer;
+class VideoFrame;
+class VideoRenderer;
+struct AudioOptions;
+struct VideoOptions;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+class AudioSinkInterface;
+
+// TODO(deadbeef): Change the key from an ssrc to a "sender_id" or
+// "receiver_id" string, which will be the MSID in the short term and MID in
+// the long term.
+
+// TODO(deadbeef): These interfaces are effectively just a way for the
+// RtpSenders/Receivers to get to the BaseChannels. These interfaces should be
+// refactored away eventually, as the classes converge.
+
+// This interface is called by AudioRtpSender/Receivers to change the settings
+// of an audio track connected to certain PeerConnection.
+class AudioProviderInterface {
+ public:
+  // Enable/disable the audio playout of a remote audio track with |ssrc|.
+  virtual void SetAudioPlayout(uint32_t ssrc, bool enable) = 0;
+  // Enable/disable sending audio on the local audio track with |ssrc|.
+  // When |enable| is true |options| should be applied to the audio track.
+  virtual void SetAudioSend(uint32_t ssrc,
+                            bool enable,
+                            const cricket::AudioOptions& options,
+                            cricket::AudioRenderer* renderer) = 0;
+
+  // Sets the audio playout volume of a remote audio track with |ssrc|.
+  // |volume| is in the range of [0, 10].
+  virtual void SetAudioPlayoutVolume(uint32_t ssrc, double volume) = 0;
+
+  // Allows for setting a direct audio sink for an incoming audio source.
+  // Only one audio sink is supported per ssrc and ownership of the sink is
+  // passed to the provider.
+  virtual void SetRawAudioSink(
+      uint32_t ssrc,
+      rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) = 0;
+
+ protected:
+  virtual ~AudioProviderInterface() {}
+};
+
+// This interface is called by VideoRtpSender/Receivers to change the settings
+// of a video track connected to a certain PeerConnection.
+class VideoProviderInterface {
+ public:
+  virtual bool SetCaptureDevice(uint32_t ssrc,
+                                cricket::VideoCapturer* camera) = 0;
+  // Enable/disable the video playout of a remote video track with |ssrc|.
+  virtual void SetVideoPlayout(
+      uint32_t ssrc,
+      bool enable,
+      rtc::VideoSinkInterface<cricket::VideoFrame>* sink) = 0;
+  // Enable sending video on the local video track with |ssrc|.
+  virtual void SetVideoSend(uint32_t ssrc,
+                            bool enable,
+                            const cricket::VideoOptions* options) = 0;
+
+ protected:
+  virtual ~VideoProviderInterface() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAMPROVIDER_H_
diff --git a/webrtc/api/mediastreamproxy.h b/webrtc/api/mediastreamproxy.h
new file mode 100644
index 0000000..635f458
--- /dev/null
+++ b/webrtc/api/mediastreamproxy.h
@@ -0,0 +1,54 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_MEDIASTREAMPROXY_H_
+#define WEBRTC_API_MEDIASTREAMPROXY_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/proxy.h"
+
+namespace webrtc {
+
+BEGIN_PROXY_MAP(MediaStream)
+  PROXY_CONSTMETHOD0(std::string, label)
+  PROXY_METHOD0(AudioTrackVector, GetAudioTracks)
+  PROXY_METHOD0(VideoTrackVector, GetVideoTracks)
+  PROXY_METHOD1(rtc::scoped_refptr<AudioTrackInterface>,
+                FindAudioTrack, const std::string&)
+  PROXY_METHOD1(rtc::scoped_refptr<VideoTrackInterface>,
+                FindVideoTrack, const std::string&)
+  PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*)
+  PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*)
+  PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*)
+  PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*)
+  PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+  PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAMPROXY_H_
diff --git a/webrtc/api/mediastreamtrack.h b/webrtc/api/mediastreamtrack.h
new file mode 100644
index 0000000..2e9f774
--- /dev/null
+++ b/webrtc/api/mediastreamtrack.h
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_MEDIASTREAMTRACK_H_
+#define WEBRTC_API_MEDIASTREAMTRACK_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/notifier.h"
+
+namespace webrtc {
+
+// MediaTrack implements the interface common to AudioTrackInterface and
+// VideoTrackInterface.
+template <typename T>
+class MediaStreamTrack : public Notifier<T> {
+ public:
+  typedef typename T::TrackState TypedTrackState;
+
+  virtual std::string id() const { return id_; }
+  virtual MediaStreamTrackInterface::TrackState state() const {
+    return state_;
+  }
+  virtual bool enabled() const { return enabled_; }
+  virtual bool set_enabled(bool enable) {
+    bool fire_on_change = (enable != enabled_);
+    enabled_ = enable;
+    if (fire_on_change) {
+      Notifier<T>::FireOnChanged();
+    }
+    return fire_on_change;
+  }
+  virtual bool set_state(MediaStreamTrackInterface::TrackState new_state) {
+    bool fire_on_change = (state_ != new_state);
+    state_ = new_state;
+    if (fire_on_change)
+      Notifier<T>::FireOnChanged();
+    return true;
+  }
+
+ protected:
+  explicit MediaStreamTrack(const std::string& id)
+      : enabled_(true),
+        id_(id),
+        state_(MediaStreamTrackInterface::kInitializing) {
+  }
+
+ private:
+  bool enabled_;
+  std::string id_;
+  MediaStreamTrackInterface::TrackState state_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAMTRACK_H_
diff --git a/webrtc/api/mediastreamtrackproxy.h b/webrtc/api/mediastreamtrackproxy.h
new file mode 100644
index 0000000..eabb0cf
--- /dev/null
+++ b/webrtc/api/mediastreamtrackproxy.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file includes proxy classes for tracks. The purpose is
+// to make sure tracks are only accessed from the signaling thread.
+
+#ifndef WEBRTC_API_MEDIASTREAMTRACKPROXY_H_
+#define WEBRTC_API_MEDIASTREAMTRACKPROXY_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/proxy.h"
+
+namespace webrtc {
+
+BEGIN_PROXY_MAP(AudioTrack)
+  PROXY_CONSTMETHOD0(std::string, kind)
+  PROXY_CONSTMETHOD0(std::string, id)
+  PROXY_CONSTMETHOD0(TrackState, state)
+  PROXY_CONSTMETHOD0(bool, enabled)
+  PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource)
+  PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*)
+  PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*)
+  PROXY_METHOD1(bool, GetSignalLevel, int*)
+  PROXY_METHOD0(rtc::scoped_refptr<AudioProcessorInterface>,
+                GetAudioProcessor)
+
+  PROXY_METHOD1(bool, set_enabled, bool)
+  PROXY_METHOD1(bool, set_state, TrackState)
+
+  PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+  PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+BEGIN_PROXY_MAP(VideoTrack)
+  PROXY_CONSTMETHOD0(std::string, kind)
+  PROXY_CONSTMETHOD0(std::string, id)
+  PROXY_CONSTMETHOD0(TrackState, state)
+  PROXY_CONSTMETHOD0(bool, enabled)
+  PROXY_METHOD1(bool, set_enabled, bool)
+  PROXY_METHOD1(bool, set_state, TrackState)
+
+  PROXY_METHOD1(void, AddRenderer, VideoRendererInterface*)
+  PROXY_METHOD1(void, RemoveRenderer, VideoRendererInterface*)
+  PROXY_CONSTMETHOD0(VideoSourceInterface*, GetSource)
+  PROXY_METHOD0(rtc::VideoSinkInterface<cricket::VideoFrame>*, GetSink)
+
+  PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+  PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_MEDIASTREAMTRACKPROXY_H_
diff --git a/webrtc/api/notifier.h b/webrtc/api/notifier.h
new file mode 100644
index 0000000..a6dbba7
--- /dev/null
+++ b/webrtc/api/notifier.h
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_NOTIFIER_H_
+#define WEBRTC_API_NOTIFIER_H_
+
+#include <list>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/base/common.h"
+
+namespace webrtc {
+
+// Implement a template version of a notifier.
+template <class T>
+class Notifier : public T {
+ public:
+  Notifier() {
+  }
+
+  virtual void RegisterObserver(ObserverInterface* observer) {
+    ASSERT(observer != NULL);
+    observers_.push_back(observer);
+  }
+
+  virtual void UnregisterObserver(ObserverInterface* observer) {
+    for (std::list<ObserverInterface*>::iterator it = observers_.begin();
+         it != observers_.end(); it++) {
+      if (*it == observer) {
+        observers_.erase(it);
+        break;
+      }
+    }
+  }
+
+  void FireOnChanged() {
+    // Copy the list of observers to avoid a crash if the observer object
+    // unregisters as a result of the OnChanged() call. If the same list is used
+    // UnregisterObserver will affect the list make the iterator invalid.
+    std::list<ObserverInterface*> observers = observers_;
+    for (std::list<ObserverInterface*>::iterator it = observers.begin();
+         it != observers.end(); ++it) {
+      (*it)->OnChanged();
+    }
+  }
+
+ protected:
+  std::list<ObserverInterface*> observers_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_NOTIFIER_H_
diff --git a/webrtc/api/objc/RTCAudioTrack+Private.h b/webrtc/api/objc/RTCAudioTrack+Private.h
index 36f72c7..ce3298e 100644
--- a/webrtc/api/objc/RTCAudioTrack+Private.h
+++ b/webrtc/api/objc/RTCAudioTrack+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCAudioTrack.h"
 
-#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/api/mediastreaminterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCConfiguration+Private.h b/webrtc/api/objc/RTCConfiguration+Private.h
index e14f92b..001dac6 100644
--- a/webrtc/api/objc/RTCConfiguration+Private.h
+++ b/webrtc/api/objc/RTCConfiguration+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCConfiguration.h"
 
-#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCDataChannel+Private.h b/webrtc/api/objc/RTCDataChannel+Private.h
index cc44923..179192c 100644
--- a/webrtc/api/objc/RTCDataChannel+Private.h
+++ b/webrtc/api/objc/RTCDataChannel+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCDataChannel.h"
 
-#include "talk/app/webrtc/datachannelinterface.h"
+#include "webrtc/api/datachannelinterface.h"
 #include "webrtc/base/scoped_ref_ptr.h"
 
 NS_ASSUME_NONNULL_BEGIN
diff --git a/webrtc/api/objc/RTCDataChannelConfiguration+Private.h b/webrtc/api/objc/RTCDataChannelConfiguration+Private.h
index e99ba7c..13478e7 100644
--- a/webrtc/api/objc/RTCDataChannelConfiguration+Private.h
+++ b/webrtc/api/objc/RTCDataChannelConfiguration+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCDataChannelConfiguration.h"
 
-#include "talk/app/webrtc/datachannelinterface.h"
+#include "webrtc/api/datachannelinterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCIceCandidate+Private.h b/webrtc/api/objc/RTCIceCandidate+Private.h
index ca95a43..b65f113 100644
--- a/webrtc/api/objc/RTCIceCandidate+Private.h
+++ b/webrtc/api/objc/RTCIceCandidate+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCIceCandidate.h"
 
-#include "talk/app/webrtc/jsep.h"
+#include "webrtc/api/jsep.h"
 #include "webrtc/base/scoped_ptr.h"
 
 NS_ASSUME_NONNULL_BEGIN
diff --git a/webrtc/api/objc/RTCIceServer+Private.h b/webrtc/api/objc/RTCIceServer+Private.h
index 3890567..556936d 100644
--- a/webrtc/api/objc/RTCIceServer+Private.h
+++ b/webrtc/api/objc/RTCIceServer+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCIceServer.h"
 
-#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCMediaConstraints+Private.h b/webrtc/api/objc/RTCMediaConstraints+Private.h
index 2c4b722..fa582ec 100644
--- a/webrtc/api/objc/RTCMediaConstraints+Private.h
+++ b/webrtc/api/objc/RTCMediaConstraints+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCMediaConstraints.h"
 
-#include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
 #include "webrtc/base/scoped_ptr.h"
 
 namespace webrtc {
diff --git a/webrtc/api/objc/RTCMediaStream+Private.h b/webrtc/api/objc/RTCMediaStream+Private.h
index 2c2662b..4c83288 100644
--- a/webrtc/api/objc/RTCMediaStream+Private.h
+++ b/webrtc/api/objc/RTCMediaStream+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCMediaStream.h"
 
-#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/api/mediastreaminterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCMediaStreamTrack+Private.h b/webrtc/api/objc/RTCMediaStreamTrack+Private.h
index fcdcdad..155e312 100644
--- a/webrtc/api/objc/RTCMediaStreamTrack+Private.h
+++ b/webrtc/api/objc/RTCMediaStreamTrack+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCMediaStreamTrack.h"
 
-#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/api/mediastreaminterface.h"
 #include "webrtc/base/scoped_ptr.h"
 
 typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
diff --git a/webrtc/api/objc/RTCPeerConnectionFactory+Private.h b/webrtc/api/objc/RTCPeerConnectionFactory+Private.h
index a5f2350..55a473b 100644
--- a/webrtc/api/objc/RTCPeerConnectionFactory+Private.h
+++ b/webrtc/api/objc/RTCPeerConnectionFactory+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCPeerConnectionFactory.h"
 
-#include "talk/app/webrtc/peerconnectionfactory.h"
+#include "webrtc/api/peerconnectionfactory.h"
 #include "webrtc/base/scoped_ref_ptr.h"
 
 NS_ASSUME_NONNULL_BEGIN
diff --git a/webrtc/api/objc/RTCSessionDescription+Private.h b/webrtc/api/objc/RTCSessionDescription+Private.h
index aa0314d..b5c0fff 100644
--- a/webrtc/api/objc/RTCSessionDescription+Private.h
+++ b/webrtc/api/objc/RTCSessionDescription+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCSessionDescription.h"
 
-#include "talk/app/webrtc/jsep.h"
+#include "webrtc/api/jsep.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCStatsReport+Private.h b/webrtc/api/objc/RTCStatsReport+Private.h
index 5b7dc32..5ce5801 100644
--- a/webrtc/api/objc/RTCStatsReport+Private.h
+++ b/webrtc/api/objc/RTCStatsReport+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCStatsReport.h"
 
-#include "talk/app/webrtc/statstypes.h"
+#include "webrtc/api/statstypes.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCVideoRendererAdapter+Private.h b/webrtc/api/objc/RTCVideoRendererAdapter+Private.h
index 807eea4..c181b9b 100644
--- a/webrtc/api/objc/RTCVideoRendererAdapter+Private.h
+++ b/webrtc/api/objc/RTCVideoRendererAdapter+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCVideoRendererAdapter.h"
 
-#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/api/mediastreaminterface.h"
 
 #import "RTCVideoRenderer.h"
 
diff --git a/webrtc/api/objc/RTCVideoSource+Private.h b/webrtc/api/objc/RTCVideoSource+Private.h
index 2300848..c363d33 100644
--- a/webrtc/api/objc/RTCVideoSource+Private.h
+++ b/webrtc/api/objc/RTCVideoSource+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCVideoSource.h"
 
-#include "talk/app/webrtc/videosourceinterface.h"
+#include "webrtc/api/videosourceinterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objc/RTCVideoTrack+Private.h b/webrtc/api/objc/RTCVideoTrack+Private.h
index 4f55481..cd7de48 100644
--- a/webrtc/api/objc/RTCVideoTrack+Private.h
+++ b/webrtc/api/objc/RTCVideoTrack+Private.h
@@ -10,7 +10,7 @@
 
 #import "RTCVideoTrack.h"
 
-#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/api/mediastreaminterface.h"
 
 NS_ASSUME_NONNULL_BEGIN
 
diff --git a/webrtc/api/objctests/RTCIceCandidateTest.mm b/webrtc/api/objctests/RTCIceCandidateTest.mm
index 391db44..2163ce2 100644
--- a/webrtc/api/objctests/RTCIceCandidateTest.mm
+++ b/webrtc/api/objctests/RTCIceCandidateTest.mm
@@ -1,11 +1,28 @@
 /*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ * libjingle
+ * Copyright 2015 Google Inc.
  *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
 #import <Foundation/Foundation.h>
diff --git a/webrtc/api/objctests/RTCIceServerTest.mm b/webrtc/api/objctests/RTCIceServerTest.mm
index 2e6fb25..1ddb13c 100644
--- a/webrtc/api/objctests/RTCIceServerTest.mm
+++ b/webrtc/api/objctests/RTCIceServerTest.mm
@@ -1,11 +1,28 @@
 /*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ * libjingle
+ * Copyright 2015 Google Inc.
  *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
 #import <Foundation/Foundation.h>
diff --git a/webrtc/api/objctests/RTCMediaConstraintsTest.mm b/webrtc/api/objctests/RTCMediaConstraintsTest.mm
index 44ffe3d..c1e1886 100644
--- a/webrtc/api/objctests/RTCMediaConstraintsTest.mm
+++ b/webrtc/api/objctests/RTCMediaConstraintsTest.mm
@@ -1,11 +1,28 @@
 /*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ * libjingle
+ * Copyright 2015 Google Inc.
  *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
 #import <Foundation/Foundation.h>
diff --git a/webrtc/api/objctests/RTCSessionDescriptionTest.mm b/webrtc/api/objctests/RTCSessionDescriptionTest.mm
index 2404dedd..6eaa36f 100644
--- a/webrtc/api/objctests/RTCSessionDescriptionTest.mm
+++ b/webrtc/api/objctests/RTCSessionDescriptionTest.mm
@@ -1,11 +1,28 @@
 /*
- *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ * libjingle
+ * Copyright 2015 Google Inc.
  *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
 #import <Foundation/Foundation.h>
diff --git a/webrtc/api/peerconnection.cc b/webrtc/api/peerconnection.cc
new file mode 100644
index 0000000..cdc5861
--- /dev/null
+++ b/webrtc/api/peerconnection.cc
@@ -0,0 +1,2091 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/peerconnection.h"
+
+#include <algorithm>
+#include <cctype>  // for isdigit
+#include <utility>
+#include <vector>
+
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/dtmfsender.h"
+#include "webrtc/api/jsepicecandidate.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/mediastream.h"
+#include "webrtc/api/mediastreamobserver.h"
+#include "webrtc/api/mediastreamproxy.h"
+#include "webrtc/api/mediastreamtrackproxy.h"
+#include "webrtc/api/remoteaudiosource.h"
+#include "webrtc/api/remotevideocapturer.h"
+#include "webrtc/api/rtpreceiver.h"
+#include "webrtc/api/rtpsender.h"
+#include "webrtc/api/streamcollection.h"
+#include "webrtc/api/videosource.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/base/arraysize.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/trace_event.h"
+#include "webrtc/media/sctp/sctpdataengine.h"
+#include "webrtc/p2p/client/basicportallocator.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+
+namespace {
+
+using webrtc::DataChannel;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::StreamCollection;
+
+static const char kDefaultStreamLabel[] = "default";
+static const char kDefaultAudioTrackLabel[] = "defaulta0";
+static const char kDefaultVideoTrackLabel[] = "defaultv0";
+
+// The min number of tokens must present in Turn host uri.
+// e.g. user@turn.example.org
+static const size_t kTurnHostTokensNum = 2;
+// Number of tokens must be preset when TURN uri has transport param.
+static const size_t kTurnTransportTokensNum = 2;
+// The default stun port.
+static const int kDefaultStunPort = 3478;
+static const int kDefaultStunTlsPort = 5349;
+static const char kTransport[] = "transport";
+
+// NOTE: Must be in the same order as the ServiceType enum.
+static const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"};
+
+// NOTE: A loop below assumes that the first value of this enum is 0 and all
+// other values are incremental.
+enum ServiceType {
+  STUN = 0,  // Indicates a STUN server.
+  STUNS,     // Indicates a STUN server used with a TLS session.
+  TURN,      // Indicates a TURN server
+  TURNS,     // Indicates a TURN server used with a TLS session.
+  INVALID,   // Unknown.
+};
+static_assert(INVALID == arraysize(kValidIceServiceTypes),
+              "kValidIceServiceTypes must have as many strings as ServiceType "
+              "has values.");
+
+enum {
+  MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0,
+  MSG_SET_SESSIONDESCRIPTION_FAILED,
+  MSG_CREATE_SESSIONDESCRIPTION_FAILED,
+  MSG_GETSTATS,
+  MSG_FREE_DATACHANNELS,
+};
+
+struct SetSessionDescriptionMsg : public rtc::MessageData {
+  explicit SetSessionDescriptionMsg(
+      webrtc::SetSessionDescriptionObserver* observer)
+      : observer(observer) {
+  }
+
+  rtc::scoped_refptr<webrtc::SetSessionDescriptionObserver> observer;
+  std::string error;
+};
+
+struct CreateSessionDescriptionMsg : public rtc::MessageData {
+  explicit CreateSessionDescriptionMsg(
+      webrtc::CreateSessionDescriptionObserver* observer)
+      : observer(observer) {}
+
+  rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
+  std::string error;
+};
+
+struct GetStatsMsg : public rtc::MessageData {
+  GetStatsMsg(webrtc::StatsObserver* observer,
+              webrtc::MediaStreamTrackInterface* track)
+      : observer(observer), track(track) {
+  }
+  rtc::scoped_refptr<webrtc::StatsObserver> observer;
+  rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> track;
+};
+
+// |in_str| should be of format
+// stunURI       = scheme ":" stun-host [ ":" stun-port ]
+// scheme        = "stun" / "stuns"
+// stun-host     = IP-literal / IPv4address / reg-name
+// stun-port     = *DIGIT
+//
+// draft-petithuguenin-behave-turn-uris-01
+// turnURI       = scheme ":" turn-host [ ":" turn-port ]
+// turn-host     = username@IP-literal / IPv4address / reg-name
+bool GetServiceTypeAndHostnameFromUri(const std::string& in_str,
+                                      ServiceType* service_type,
+                                      std::string* hostname) {
+  const std::string::size_type colonpos = in_str.find(':');
+  if (colonpos == std::string::npos) {
+    LOG(LS_WARNING) << "Missing ':' in ICE URI: " << in_str;
+    return false;
+  }
+  if ((colonpos + 1) == in_str.length()) {
+    LOG(LS_WARNING) << "Empty hostname in ICE URI: " << in_str;
+    return false;
+  }
+  *service_type = INVALID;
+  for (size_t i = 0; i < arraysize(kValidIceServiceTypes); ++i) {
+    if (in_str.compare(0, colonpos, kValidIceServiceTypes[i]) == 0) {
+      *service_type = static_cast<ServiceType>(i);
+      break;
+    }
+  }
+  if (*service_type == INVALID) {
+    return false;
+  }
+  *hostname = in_str.substr(colonpos + 1, std::string::npos);
+  return true;
+}
+
+bool ParsePort(const std::string& in_str, int* port) {
+  // Make sure port only contains digits. FromString doesn't check this.
+  for (const char& c : in_str) {
+    if (!std::isdigit(c)) {
+      return false;
+    }
+  }
+  return rtc::FromString(in_str, port);
+}
+
+// This method parses IPv6 and IPv4 literal strings, along with hostnames in
+// standard hostname:port format.
+// Consider following formats as correct.
+// |hostname:port|, |[IPV6 address]:port|, |IPv4 address|:port,
+// |hostname|, |[IPv6 address]|, |IPv4 address|.
+bool ParseHostnameAndPortFromString(const std::string& in_str,
+                                    std::string* host,
+                                    int* port) {
+  RTC_DCHECK(host->empty());
+  if (in_str.at(0) == '[') {
+    std::string::size_type closebracket = in_str.rfind(']');
+    if (closebracket != std::string::npos) {
+      std::string::size_type colonpos = in_str.find(':', closebracket);
+      if (std::string::npos != colonpos) {
+        if (!ParsePort(in_str.substr(closebracket + 2, std::string::npos),
+                       port)) {
+          return false;
+        }
+      }
+      *host = in_str.substr(1, closebracket - 1);
+    } else {
+      return false;
+    }
+  } else {
+    std::string::size_type colonpos = in_str.find(':');
+    if (std::string::npos != colonpos) {
+      if (!ParsePort(in_str.substr(colonpos + 1, std::string::npos), port)) {
+        return false;
+      }
+      *host = in_str.substr(0, colonpos);
+    } else {
+      *host = in_str;
+    }
+  }
+  return !host->empty();
+}
+
+// Adds a STUN or TURN server to the appropriate list,
+// by parsing |url| and using the username/password in |server|.
+bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
+                       const std::string& url,
+                       cricket::ServerAddresses* stun_servers,
+                       std::vector<cricket::RelayServerConfig>* turn_servers) {
+  // draft-nandakumar-rtcweb-stun-uri-01
+  // stunURI       = scheme ":" stun-host [ ":" stun-port ]
+  // scheme        = "stun" / "stuns"
+  // stun-host     = IP-literal / IPv4address / reg-name
+  // stun-port     = *DIGIT
+
+  // draft-petithuguenin-behave-turn-uris-01
+  // turnURI       = scheme ":" turn-host [ ":" turn-port ]
+  //                 [ "?transport=" transport ]
+  // scheme        = "turn" / "turns"
+  // transport     = "udp" / "tcp" / transport-ext
+  // transport-ext = 1*unreserved
+  // turn-host     = IP-literal / IPv4address / reg-name
+  // turn-port     = *DIGIT
+  RTC_DCHECK(stun_servers != nullptr);
+  RTC_DCHECK(turn_servers != nullptr);
+  std::vector<std::string> tokens;
+  cricket::ProtocolType turn_transport_type = cricket::PROTO_UDP;
+  RTC_DCHECK(!url.empty());
+  rtc::tokenize(url, '?', &tokens);
+  std::string uri_without_transport = tokens[0];
+  // Let's look into transport= param, if it exists.
+  if (tokens.size() == kTurnTransportTokensNum) {  // ?transport= is present.
+    std::string uri_transport_param = tokens[1];
+    rtc::tokenize(uri_transport_param, '=', &tokens);
+    if (tokens[0] == kTransport) {
+      // As per above grammar transport param will be consist of lower case
+      // letters.
+      if (!cricket::StringToProto(tokens[1].c_str(), &turn_transport_type) ||
+          (turn_transport_type != cricket::PROTO_UDP &&
+           turn_transport_type != cricket::PROTO_TCP)) {
+        LOG(LS_WARNING) << "Transport param should always be udp or tcp.";
+        return false;
+      }
+    }
+  }
+
+  std::string hoststring;
+  ServiceType service_type;
+  if (!GetServiceTypeAndHostnameFromUri(uri_without_transport,
+                                       &service_type,
+                                       &hoststring)) {
+    LOG(LS_WARNING) << "Invalid transport parameter in ICE URI: " << url;
+    return false;
+  }
+
+  // GetServiceTypeAndHostnameFromUri should never give an empty hoststring
+  RTC_DCHECK(!hoststring.empty());
+
+  // Let's break hostname.
+  tokens.clear();
+  rtc::tokenize_with_empty_tokens(hoststring, '@', &tokens);
+
+  std::string username(server.username);
+  if (tokens.size() > kTurnHostTokensNum) {
+    LOG(LS_WARNING) << "Invalid user@hostname format: " << hoststring;
+    return false;
+  }
+  if (tokens.size() == kTurnHostTokensNum) {
+    if (tokens[0].empty() || tokens[1].empty()) {
+      LOG(LS_WARNING) << "Invalid user@hostname format: " << hoststring;
+      return false;
+    }
+    username.assign(rtc::s_url_decode(tokens[0]));
+    hoststring = tokens[1];
+  } else {
+    hoststring = tokens[0];
+  }
+
+  int port = kDefaultStunPort;
+  if (service_type == TURNS) {
+    port = kDefaultStunTlsPort;
+    turn_transport_type = cricket::PROTO_TCP;
+  }
+
+  std::string address;
+  if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) {
+    LOG(WARNING) << "Invalid hostname format: " << uri_without_transport;
+    return false;
+  }
+
+  if (port <= 0 || port > 0xffff) {
+    LOG(WARNING) << "Invalid port: " << port;
+    return false;
+  }
+
+  switch (service_type) {
+    case STUN:
+    case STUNS:
+      stun_servers->insert(rtc::SocketAddress(address, port));
+      break;
+    case TURN:
+    case TURNS: {
+      bool secure = (service_type == TURNS);
+      turn_servers->push_back(
+          cricket::RelayServerConfig(address, port, username, server.password,
+                                     turn_transport_type, secure));
+      break;
+    }
+    case INVALID:
+    default:
+      LOG(WARNING) << "Configuration not supported: " << url;
+      return false;
+  }
+  return true;
+}
+
+// Check if we can send |new_stream| on a PeerConnection.
+bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams,
+                            webrtc::MediaStreamInterface* new_stream) {
+  if (!new_stream || !current_streams) {
+    return false;
+  }
+  if (current_streams->find(new_stream->label()) != nullptr) {
+    LOG(LS_ERROR) << "MediaStream with label " << new_stream->label()
+                  << " is already added.";
+    return false;
+  }
+  return true;
+}
+
+bool MediaContentDirectionHasSend(cricket::MediaContentDirection dir) {
+  return dir == cricket::MD_SENDONLY || dir == cricket::MD_SENDRECV;
+}
+
+// If the direction is "recvonly" or "inactive", treat the description
+// as containing no streams.
+// See: https://code.google.com/p/webrtc/issues/detail?id=5054
+std::vector<cricket::StreamParams> GetActiveStreams(
+    const cricket::MediaContentDescription* desc) {
+  return MediaContentDirectionHasSend(desc->direction())
+             ? desc->streams()
+             : std::vector<cricket::StreamParams>();
+}
+
+bool IsValidOfferToReceiveMedia(int value) {
+  typedef PeerConnectionInterface::RTCOfferAnswerOptions Options;
+  return (value >= Options::kUndefined) &&
+         (value <= Options::kMaxOfferToReceiveMedia);
+}
+
+// Add the stream and RTP data channel info to |session_options|.
+void AddSendStreams(
+    cricket::MediaSessionOptions* session_options,
+    const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+    const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
+        rtp_data_channels) {
+  session_options->streams.clear();
+  for (const auto& sender : senders) {
+    session_options->AddSendStream(sender->media_type(), sender->id(),
+                                   sender->stream_id());
+  }
+
+  // Check for data channels.
+  for (const auto& kv : rtp_data_channels) {
+    const DataChannel* channel = kv.second;
+    if (channel->state() == DataChannel::kConnecting ||
+        channel->state() == DataChannel::kOpen) {
+      // |streamid| and |sync_label| are both set to the DataChannel label
+      // here so they can be signaled the same way as MediaStreams and Tracks.
+      // For MediaStreams, the sync_label is the MediaStream label and the
+      // track label is the same as |streamid|.
+      const std::string& streamid = channel->label();
+      const std::string& sync_label = channel->label();
+      session_options->AddSendStream(cricket::MEDIA_TYPE_DATA, streamid,
+                                     sync_label);
+    }
+  }
+}
+
+}  // namespace
+
+namespace webrtc {
+
+// Factory class for creating remote MediaStreams and MediaStreamTracks.
+class RemoteMediaStreamFactory {
+ public:
+  explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread,
+                                    cricket::ChannelManager* channel_manager)
+      : signaling_thread_(signaling_thread),
+        channel_manager_(channel_manager) {}
+
+  rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream(
+      const std::string& stream_label) {
+    return MediaStreamProxy::Create(signaling_thread_,
+                                    MediaStream::Create(stream_label));
+  }
+
+  AudioTrackInterface* AddAudioTrack(uint32_t ssrc,
+                                     AudioProviderInterface* provider,
+                                     webrtc::MediaStreamInterface* stream,
+                                     const std::string& track_id) {
+    return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
+        stream, track_id, RemoteAudioSource::Create(ssrc, provider));
+  }
+
+  VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
+                                     const std::string& track_id) {
+    return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
+        stream, track_id,
+        VideoSource::Create(channel_manager_, new RemoteVideoCapturer(),
+                            nullptr, true)
+            .get());
+  }
+
+ private:
+  template <typename TI, typename T, typename TP, typename S>
+  TI* AddTrack(MediaStreamInterface* stream,
+               const std::string& track_id,
+               const S& source) {
+    rtc::scoped_refptr<TI> track(
+        TP::Create(signaling_thread_, T::Create(track_id, source)));
+    track->set_state(webrtc::MediaStreamTrackInterface::kLive);
+    if (stream->AddTrack(track)) {
+      return track;
+    }
+    return nullptr;
+  }
+
+  rtc::Thread* signaling_thread_;
+  cricket::ChannelManager* channel_manager_;
+};
+
+bool ConvertRtcOptionsForOffer(
+    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+    cricket::MediaSessionOptions* session_options) {
+  typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+  if (!IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) ||
+      !IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video)) {
+    return false;
+  }
+
+  if (rtc_options.offer_to_receive_audio != RTCOfferAnswerOptions::kUndefined) {
+    session_options->recv_audio = (rtc_options.offer_to_receive_audio > 0);
+  }
+  if (rtc_options.offer_to_receive_video != RTCOfferAnswerOptions::kUndefined) {
+    session_options->recv_video = (rtc_options.offer_to_receive_video > 0);
+  }
+
+  session_options->vad_enabled = rtc_options.voice_activity_detection;
+  session_options->audio_transport_options.ice_restart =
+      rtc_options.ice_restart;
+  session_options->video_transport_options.ice_restart =
+      rtc_options.ice_restart;
+  session_options->data_transport_options.ice_restart = rtc_options.ice_restart;
+  session_options->bundle_enabled = rtc_options.use_rtp_mux;
+
+  return true;
+}
+
+bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
+                               cricket::MediaSessionOptions* session_options) {
+  bool value = false;
+  size_t mandatory_constraints_satisfied = 0;
+
+  // kOfferToReceiveAudio defaults to true according to spec.
+  if (!FindConstraint(constraints,
+                      MediaConstraintsInterface::kOfferToReceiveAudio, &value,
+                      &mandatory_constraints_satisfied) ||
+      value) {
+    session_options->recv_audio = true;
+  }
+
+  // kOfferToReceiveVideo defaults to false according to spec. But
+  // if it is an answer and video is offered, we should still accept video
+  // per default.
+  value = false;
+  if (!FindConstraint(constraints,
+                      MediaConstraintsInterface::kOfferToReceiveVideo, &value,
+                      &mandatory_constraints_satisfied) ||
+      value) {
+    session_options->recv_video = true;
+  }
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kVoiceActivityDetection, &value,
+                     &mandatory_constraints_satisfied)) {
+    session_options->vad_enabled = value;
+  }
+
+  if (FindConstraint(constraints, MediaConstraintsInterface::kUseRtpMux, &value,
+                     &mandatory_constraints_satisfied)) {
+    session_options->bundle_enabled = value;
+  } else {
+    // kUseRtpMux defaults to true according to spec.
+    session_options->bundle_enabled = true;
+  }
+
+  if (FindConstraint(constraints, MediaConstraintsInterface::kIceRestart,
+                     &value, &mandatory_constraints_satisfied)) {
+    session_options->audio_transport_options.ice_restart = value;
+    session_options->video_transport_options.ice_restart = value;
+    session_options->data_transport_options.ice_restart = value;
+  } else {
+    // kIceRestart defaults to false according to spec.
+    session_options->audio_transport_options.ice_restart = false;
+    session_options->video_transport_options.ice_restart = false;
+    session_options->data_transport_options.ice_restart = false;
+  }
+
+  if (!constraints) {
+    return true;
+  }
+  return mandatory_constraints_satisfied == constraints->GetMandatory().size();
+}
+
+bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
+                     cricket::ServerAddresses* stun_servers,
+                     std::vector<cricket::RelayServerConfig>* turn_servers) {
+  for (const webrtc::PeerConnectionInterface::IceServer& server : servers) {
+    if (!server.urls.empty()) {
+      for (const std::string& url : server.urls) {
+        if (url.empty()) {
+          LOG(LS_ERROR) << "Empty uri.";
+          return false;
+        }
+        if (!ParseIceServerUrl(server, url, stun_servers, turn_servers)) {
+          return false;
+        }
+      }
+    } else if (!server.uri.empty()) {
+      // Fallback to old .uri if new .urls isn't present.
+      if (!ParseIceServerUrl(server, server.uri, stun_servers, turn_servers)) {
+        return false;
+      }
+    } else {
+      LOG(LS_ERROR) << "Empty uri.";
+      return false;
+    }
+  }
+  // Candidates must have unique priorities, so that connectivity checks
+  // are performed in a well-defined order.
+  int priority = static_cast<int>(turn_servers->size() - 1);
+  for (cricket::RelayServerConfig& turn_server : *turn_servers) {
+    // First in the list gets highest priority.
+    turn_server.priority = priority--;
+  }
+  return true;
+}
+
+PeerConnection::PeerConnection(PeerConnectionFactory* factory)
+    : factory_(factory),
+      observer_(NULL),
+      uma_observer_(NULL),
+      signaling_state_(kStable),
+      ice_state_(kIceNew),
+      ice_connection_state_(kIceConnectionNew),
+      ice_gathering_state_(kIceGatheringNew),
+      local_streams_(StreamCollection::Create()),
+      remote_streams_(StreamCollection::Create()) {}
+
+PeerConnection::~PeerConnection() {
+  TRACE_EVENT0("webrtc", "PeerConnection::~PeerConnection");
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  // Need to detach RTP senders/receivers from WebRtcSession,
+  // since it's about to be destroyed.
+  for (const auto& sender : senders_) {
+    sender->Stop();
+  }
+  for (const auto& receiver : receivers_) {
+    receiver->Stop();
+  }
+}
+
+bool PeerConnection::Initialize(
+    const PeerConnectionInterface::RTCConfiguration& configuration,
+    const MediaConstraintsInterface* constraints,
+    rtc::scoped_ptr<cricket::PortAllocator> allocator,
+    rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+    PeerConnectionObserver* observer) {
+  TRACE_EVENT0("webrtc", "PeerConnection::Initialize");
+  RTC_DCHECK(observer != nullptr);
+  if (!observer) {
+    return false;
+  }
+  observer_ = observer;
+
+  port_allocator_ = std::move(allocator);
+
+  cricket::ServerAddresses stun_servers;
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  if (!ParseIceServers(configuration.servers, &stun_servers, &turn_servers)) {
+    return false;
+  }
+  port_allocator_->SetIceServers(stun_servers, turn_servers);
+
+  // To handle both internal and externally created port allocator, we will
+  // enable BUNDLE here.
+  int portallocator_flags = port_allocator_->flags();
+  portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET |
+                         cricket::PORTALLOCATOR_ENABLE_IPV6;
+  bool value;
+  // If IPv6 flag was specified, we'll not override it by experiment.
+  if (FindConstraint(constraints, MediaConstraintsInterface::kEnableIPv6,
+                     &value, nullptr)) {
+    if (!value) {
+      portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+    }
+  } else if (webrtc::field_trial::FindFullName("WebRTC-IPv6Default") ==
+             "Disabled") {
+    portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+  }
+
+  if (configuration.tcp_candidate_policy == kTcpCandidatePolicyDisabled) {
+    portallocator_flags |= cricket::PORTALLOCATOR_DISABLE_TCP;
+    LOG(LS_INFO) << "TCP candidates are disabled.";
+  }
+
+  port_allocator_->set_flags(portallocator_flags);
+  // No step delay is used while allocating ports.
+  port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
+
+  media_controller_.reset(factory_->CreateMediaController());
+
+  remote_stream_factory_.reset(new RemoteMediaStreamFactory(
+      factory_->signaling_thread(), media_controller_->channel_manager()));
+
+  session_.reset(
+      new WebRtcSession(media_controller_.get(), factory_->signaling_thread(),
+                        factory_->worker_thread(), port_allocator_.get()));
+  stats_.reset(new StatsCollector(this));
+
+  // Initialize the WebRtcSession. It creates transport channels etc.
+  if (!session_->Initialize(factory_->options(), constraints,
+                            std::move(dtls_identity_store), configuration)) {
+    return false;
+  }
+
+  // Register PeerConnection as receiver of local ice candidates.
+  // All the callbacks will be posted to the application from PeerConnection.
+  session_->RegisterIceObserver(this);
+  session_->SignalState.connect(this, &PeerConnection::OnSessionStateChange);
+  session_->SignalVoiceChannelDestroyed.connect(
+      this, &PeerConnection::OnVoiceChannelDestroyed);
+  session_->SignalVideoChannelDestroyed.connect(
+      this, &PeerConnection::OnVideoChannelDestroyed);
+  session_->SignalDataChannelCreated.connect(
+      this, &PeerConnection::OnDataChannelCreated);
+  session_->SignalDataChannelDestroyed.connect(
+      this, &PeerConnection::OnDataChannelDestroyed);
+  session_->SignalDataChannelOpenMessage.connect(
+      this, &PeerConnection::OnDataChannelOpenMessage);
+  return true;
+}
+
+rtc::scoped_refptr<StreamCollectionInterface>
+PeerConnection::local_streams() {
+  return local_streams_;
+}
+
+rtc::scoped_refptr<StreamCollectionInterface>
+PeerConnection::remote_streams() {
+  return remote_streams_;
+}
+
+bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
+  TRACE_EVENT0("webrtc", "PeerConnection::AddStream");
+  if (IsClosed()) {
+    return false;
+  }
+  if (!CanAddLocalMediaStream(local_streams_, local_stream)) {
+    return false;
+  }
+
+  local_streams_->AddStream(local_stream);
+  MediaStreamObserver* observer = new MediaStreamObserver(local_stream);
+  observer->SignalAudioTrackAdded.connect(this,
+                                          &PeerConnection::OnAudioTrackAdded);
+  observer->SignalAudioTrackRemoved.connect(
+      this, &PeerConnection::OnAudioTrackRemoved);
+  observer->SignalVideoTrackAdded.connect(this,
+                                          &PeerConnection::OnVideoTrackAdded);
+  observer->SignalVideoTrackRemoved.connect(
+      this, &PeerConnection::OnVideoTrackRemoved);
+  stream_observers_.push_back(rtc::scoped_ptr<MediaStreamObserver>(observer));
+
+  for (const auto& track : local_stream->GetAudioTracks()) {
+    OnAudioTrackAdded(track.get(), local_stream);
+  }
+  for (const auto& track : local_stream->GetVideoTracks()) {
+    OnVideoTrackAdded(track.get(), local_stream);
+  }
+
+  stats_->AddStream(local_stream);
+  observer_->OnRenegotiationNeeded();
+  return true;
+}
+
+void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
+  TRACE_EVENT0("webrtc", "PeerConnection::RemoveStream");
+  for (const auto& track : local_stream->GetAudioTracks()) {
+    OnAudioTrackRemoved(track.get(), local_stream);
+  }
+  for (const auto& track : local_stream->GetVideoTracks()) {
+    OnVideoTrackRemoved(track.get(), local_stream);
+  }
+
+  local_streams_->RemoveStream(local_stream);
+  stream_observers_.erase(
+      std::remove_if(
+          stream_observers_.begin(), stream_observers_.end(),
+          [local_stream](const rtc::scoped_ptr<MediaStreamObserver>& observer) {
+            return observer->stream()->label().compare(local_stream->label()) ==
+                   0;
+          }),
+      stream_observers_.end());
+
+  if (IsClosed()) {
+    return;
+  }
+  observer_->OnRenegotiationNeeded();
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnection::AddTrack(
+    MediaStreamTrackInterface* track,
+    std::vector<MediaStreamInterface*> streams) {
+  TRACE_EVENT0("webrtc", "PeerConnection::AddTrack");
+  if (IsClosed()) {
+    return nullptr;
+  }
+  if (streams.size() >= 2) {
+    LOG(LS_ERROR)
+        << "Adding a track with two streams is not currently supported.";
+    return nullptr;
+  }
+  // TODO(deadbeef): Support adding a track to two different senders.
+  if (FindSenderForTrack(track) != senders_.end()) {
+    LOG(LS_ERROR) << "Sender for track " << track->id() << " already exists.";
+    return nullptr;
+  }
+
+  // TODO(deadbeef): Support adding a track to multiple streams.
+  rtc::scoped_refptr<RtpSenderInterface> new_sender;
+  if (track->kind() == MediaStreamTrackInterface::kAudioKind) {
+    new_sender = RtpSenderProxy::Create(
+        signaling_thread(),
+        new AudioRtpSender(static_cast<AudioTrackInterface*>(track),
+                           session_.get(), stats_.get()));
+    if (!streams.empty()) {
+      new_sender->set_stream_id(streams[0]->label());
+    }
+    const TrackInfo* track_info = FindTrackInfo(
+        local_audio_tracks_, new_sender->stream_id(), track->id());
+    if (track_info) {
+      new_sender->SetSsrc(track_info->ssrc);
+    }
+  } else if (track->kind() == MediaStreamTrackInterface::kVideoKind) {
+    new_sender = RtpSenderProxy::Create(
+        signaling_thread(),
+        new VideoRtpSender(static_cast<VideoTrackInterface*>(track),
+                           session_.get()));
+    if (!streams.empty()) {
+      new_sender->set_stream_id(streams[0]->label());
+    }
+    const TrackInfo* track_info = FindTrackInfo(
+        local_video_tracks_, new_sender->stream_id(), track->id());
+    if (track_info) {
+      new_sender->SetSsrc(track_info->ssrc);
+    }
+  } else {
+    LOG(LS_ERROR) << "CreateSender called with invalid kind: " << track->kind();
+    return rtc::scoped_refptr<RtpSenderInterface>();
+  }
+
+  senders_.push_back(new_sender);
+  observer_->OnRenegotiationNeeded();
+  return new_sender;
+}
+
+bool PeerConnection::RemoveTrack(RtpSenderInterface* sender) {
+  TRACE_EVENT0("webrtc", "PeerConnection::RemoveTrack");
+  if (IsClosed()) {
+    return false;
+  }
+
+  auto it = std::find(senders_.begin(), senders_.end(), sender);
+  if (it == senders_.end()) {
+    LOG(LS_ERROR) << "Couldn't find sender " << sender->id() << " to remove.";
+    return false;
+  }
+  (*it)->Stop();
+  senders_.erase(it);
+
+  observer_->OnRenegotiationNeeded();
+  return true;
+}
+
+rtc::scoped_refptr<DtmfSenderInterface> PeerConnection::CreateDtmfSender(
+    AudioTrackInterface* track) {
+  TRACE_EVENT0("webrtc", "PeerConnection::CreateDtmfSender");
+  if (!track) {
+    LOG(LS_ERROR) << "CreateDtmfSender - track is NULL.";
+    return NULL;
+  }
+  if (!local_streams_->FindAudioTrack(track->id())) {
+    LOG(LS_ERROR) << "CreateDtmfSender is called with a non local audio track.";
+    return NULL;
+  }
+
+  rtc::scoped_refptr<DtmfSenderInterface> sender(
+      DtmfSender::Create(track, signaling_thread(), session_.get()));
+  if (!sender.get()) {
+    LOG(LS_ERROR) << "CreateDtmfSender failed on DtmfSender::Create.";
+    return NULL;
+  }
+  return DtmfSenderProxy::Create(signaling_thread(), sender.get());
+}
+
+rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
+    const std::string& kind,
+    const std::string& stream_id) {
+  TRACE_EVENT0("webrtc", "PeerConnection::CreateSender");
+  rtc::scoped_refptr<RtpSenderInterface> new_sender;
+  if (kind == MediaStreamTrackInterface::kAudioKind) {
+    new_sender = RtpSenderProxy::Create(
+        signaling_thread(), new AudioRtpSender(session_.get(), stats_.get()));
+  } else if (kind == MediaStreamTrackInterface::kVideoKind) {
+    new_sender = RtpSenderProxy::Create(signaling_thread(),
+                                        new VideoRtpSender(session_.get()));
+  } else {
+    LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind;
+    return new_sender;
+  }
+  if (!stream_id.empty()) {
+    new_sender->set_stream_id(stream_id);
+  }
+  senders_.push_back(new_sender);
+  return new_sender;
+}
+
+std::vector<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::GetSenders()
+    const {
+  return senders_;
+}
+
+std::vector<rtc::scoped_refptr<RtpReceiverInterface>>
+PeerConnection::GetReceivers() const {
+  return receivers_;
+}
+
+bool PeerConnection::GetStats(StatsObserver* observer,
+                              MediaStreamTrackInterface* track,
+                              StatsOutputLevel level) {
+  TRACE_EVENT0("webrtc", "PeerConnection::GetStats");
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  if (!VERIFY(observer != NULL)) {
+    LOG(LS_ERROR) << "GetStats - observer is NULL.";
+    return false;
+  }
+
+  stats_->UpdateStats(level);
+  signaling_thread()->Post(this, MSG_GETSTATS,
+                           new GetStatsMsg(observer, track));
+  return true;
+}
+
+PeerConnectionInterface::SignalingState PeerConnection::signaling_state() {
+  return signaling_state_;
+}
+
+PeerConnectionInterface::IceState PeerConnection::ice_state() {
+  return ice_state_;
+}
+
+PeerConnectionInterface::IceConnectionState
+PeerConnection::ice_connection_state() {
+  return ice_connection_state_;
+}
+
+PeerConnectionInterface::IceGatheringState
+PeerConnection::ice_gathering_state() {
+  return ice_gathering_state_;
+}
+
+rtc::scoped_refptr<DataChannelInterface>
+PeerConnection::CreateDataChannel(
+    const std::string& label,
+    const DataChannelInit* config) {
+  TRACE_EVENT0("webrtc", "PeerConnection::CreateDataChannel");
+  bool first_datachannel = !HasDataChannels();
+
+  rtc::scoped_ptr<InternalDataChannelInit> internal_config;
+  if (config) {
+    internal_config.reset(new InternalDataChannelInit(*config));
+  }
+  rtc::scoped_refptr<DataChannelInterface> channel(
+      InternalCreateDataChannel(label, internal_config.get()));
+  if (!channel.get()) {
+    return nullptr;
+  }
+
+  // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or
+  // the first SCTP DataChannel.
+  if (session_->data_channel_type() == cricket::DCT_RTP || first_datachannel) {
+    observer_->OnRenegotiationNeeded();
+  }
+
+  return DataChannelProxy::Create(signaling_thread(), channel.get());
+}
+
+void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
+                                 const MediaConstraintsInterface* constraints) {
+  TRACE_EVENT0("webrtc", "PeerConnection::CreateOffer");
+  if (!VERIFY(observer != nullptr)) {
+    LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
+    return;
+  }
+  RTCOfferAnswerOptions options;
+
+  bool value;
+  size_t mandatory_constraints = 0;
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kOfferToReceiveAudio,
+                     &value,
+                     &mandatory_constraints)) {
+    options.offer_to_receive_audio =
+        value ? RTCOfferAnswerOptions::kOfferToReceiveMediaTrue : 0;
+  }
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kOfferToReceiveVideo,
+                     &value,
+                     &mandatory_constraints)) {
+    options.offer_to_receive_video =
+        value ? RTCOfferAnswerOptions::kOfferToReceiveMediaTrue : 0;
+  }
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kVoiceActivityDetection,
+                     &value,
+                     &mandatory_constraints)) {
+    options.voice_activity_detection = value;
+  }
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kIceRestart,
+                     &value,
+                     &mandatory_constraints)) {
+    options.ice_restart = value;
+  }
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kUseRtpMux,
+                     &value,
+                     &mandatory_constraints)) {
+    options.use_rtp_mux = value;
+  }
+
+  CreateOffer(observer, options);
+}
+
+void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
+                                 const RTCOfferAnswerOptions& options) {
+  TRACE_EVENT0("webrtc", "PeerConnection::CreateOffer");
+  if (!VERIFY(observer != nullptr)) {
+    LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
+    return;
+  }
+
+  cricket::MediaSessionOptions session_options;
+  if (!GetOptionsForOffer(options, &session_options)) {
+    std::string error = "CreateOffer called with invalid options.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailure(observer, error);
+    return;
+  }
+
+  session_->CreateOffer(observer, options, session_options);
+}
+
+void PeerConnection::CreateAnswer(
+    CreateSessionDescriptionObserver* observer,
+    const MediaConstraintsInterface* constraints) {
+  TRACE_EVENT0("webrtc", "PeerConnection::CreateAnswer");
+  if (!VERIFY(observer != nullptr)) {
+    LOG(LS_ERROR) << "CreateAnswer - observer is NULL.";
+    return;
+  }
+
+  cricket::MediaSessionOptions session_options;
+  if (!GetOptionsForAnswer(constraints, &session_options)) {
+    std::string error = "CreateAnswer called with invalid constraints.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailure(observer, error);
+    return;
+  }
+
+  session_->CreateAnswer(observer, constraints, session_options);
+}
+
+void PeerConnection::SetLocalDescription(
+    SetSessionDescriptionObserver* observer,
+    SessionDescriptionInterface* desc) {
+  TRACE_EVENT0("webrtc", "PeerConnection::SetLocalDescription");
+  if (!VERIFY(observer != nullptr)) {
+    LOG(LS_ERROR) << "SetLocalDescription - observer is NULL.";
+    return;
+  }
+  if (!desc) {
+    PostSetSessionDescriptionFailure(observer, "SessionDescription is NULL.");
+    return;
+  }
+  // Update stats here so that we have the most recent stats for tracks and
+  // streams that might be removed by updating the session description.
+  stats_->UpdateStats(kStatsOutputLevelStandard);
+  std::string error;
+  if (!session_->SetLocalDescription(desc, &error)) {
+    PostSetSessionDescriptionFailure(observer, error);
+    return;
+  }
+
+  // If setting the description decided our SSL role, allocate any necessary
+  // SCTP sids.
+  rtc::SSLRole role;
+  if (session_->data_channel_type() == cricket::DCT_SCTP &&
+      session_->GetSslRole(session_->data_channel(), &role)) {
+    AllocateSctpSids(role);
+  }
+
+  // Update state and SSRC of local MediaStreams and DataChannels based on the
+  // local session description.
+  const cricket::ContentInfo* audio_content =
+      GetFirstAudioContent(desc->description());
+  if (audio_content) {
+    if (audio_content->rejected) {
+      RemoveTracks(cricket::MEDIA_TYPE_AUDIO);
+    } else {
+      const cricket::AudioContentDescription* audio_desc =
+          static_cast<const cricket::AudioContentDescription*>(
+              audio_content->description);
+      UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
+    }
+  }
+
+  const cricket::ContentInfo* video_content =
+      GetFirstVideoContent(desc->description());
+  if (video_content) {
+    if (video_content->rejected) {
+      RemoveTracks(cricket::MEDIA_TYPE_VIDEO);
+    } else {
+      const cricket::VideoContentDescription* video_desc =
+          static_cast<const cricket::VideoContentDescription*>(
+              video_content->description);
+      UpdateLocalTracks(video_desc->streams(), video_desc->type());
+    }
+  }
+
+  const cricket::ContentInfo* data_content =
+      GetFirstDataContent(desc->description());
+  if (data_content) {
+    const cricket::DataContentDescription* data_desc =
+        static_cast<const cricket::DataContentDescription*>(
+            data_content->description);
+    if (rtc::starts_with(data_desc->protocol().data(),
+                         cricket::kMediaProtocolRtpPrefix)) {
+      UpdateLocalRtpDataChannels(data_desc->streams());
+    }
+  }
+
+  SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
+
+  // MaybeStartGathering needs to be called after posting
+  // MSG_SET_SESSIONDESCRIPTION_SUCCESS, so that we don't signal any candidates
+  // before signaling that SetLocalDescription completed.
+  session_->MaybeStartGathering();
+}
+
+void PeerConnection::SetRemoteDescription(
+    SetSessionDescriptionObserver* observer,
+    SessionDescriptionInterface* desc) {
+  TRACE_EVENT0("webrtc", "PeerConnection::SetRemoteDescription");
+  if (!VERIFY(observer != nullptr)) {
+    LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL.";
+    return;
+  }
+  if (!desc) {
+    PostSetSessionDescriptionFailure(observer, "SessionDescription is NULL.");
+    return;
+  }
+  // Update stats here so that we have the most recent stats for tracks and
+  // streams that might be removed by updating the session description.
+  stats_->UpdateStats(kStatsOutputLevelStandard);
+  std::string error;
+  if (!session_->SetRemoteDescription(desc, &error)) {
+    PostSetSessionDescriptionFailure(observer, error);
+    return;
+  }
+
+  // If setting the description decided our SSL role, allocate any necessary
+  // SCTP sids.
+  rtc::SSLRole role;
+  if (session_->data_channel_type() == cricket::DCT_SCTP &&
+      session_->GetSslRole(session_->data_channel(), &role)) {
+    AllocateSctpSids(role);
+  }
+
+  const cricket::SessionDescription* remote_desc = desc->description();
+  const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
+  const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
+  const cricket::AudioContentDescription* audio_desc =
+      GetFirstAudioContentDescription(remote_desc);
+  const cricket::VideoContentDescription* video_desc =
+      GetFirstVideoContentDescription(remote_desc);
+  const cricket::DataContentDescription* data_desc =
+      GetFirstDataContentDescription(remote_desc);
+
+  // Check if the descriptions include streams, just in case the peer supports
+  // MSID, but doesn't indicate so with "a=msid-semantic".
+  if (remote_desc->msid_supported() ||
+      (audio_desc && !audio_desc->streams().empty()) ||
+      (video_desc && !video_desc->streams().empty())) {
+    remote_peer_supports_msid_ = true;
+  }
+
+  // We wait to signal new streams until we finish processing the description,
+  // since only at that point will new streams have all their tracks.
+  rtc::scoped_refptr<StreamCollection> new_streams(StreamCollection::Create());
+
+  // Find all audio rtp streams and create corresponding remote AudioTracks
+  // and MediaStreams.
+  if (audio_content) {
+    if (audio_content->rejected) {
+      RemoveTracks(cricket::MEDIA_TYPE_AUDIO);
+    } else {
+      bool default_audio_track_needed =
+          !remote_peer_supports_msid_ &&
+          MediaContentDirectionHasSend(audio_desc->direction());
+      UpdateRemoteStreamsList(GetActiveStreams(audio_desc),
+                              default_audio_track_needed, audio_desc->type(),
+                              new_streams);
+    }
+  }
+
+  // Find all video rtp streams and create corresponding remote VideoTracks
+  // and MediaStreams.
+  if (video_content) {
+    if (video_content->rejected) {
+      RemoveTracks(cricket::MEDIA_TYPE_VIDEO);
+    } else {
+      bool default_video_track_needed =
+          !remote_peer_supports_msid_ &&
+          MediaContentDirectionHasSend(video_desc->direction());
+      UpdateRemoteStreamsList(GetActiveStreams(video_desc),
+                              default_video_track_needed, video_desc->type(),
+                              new_streams);
+    }
+  }
+
+  // Update the DataChannels with the information from the remote peer.
+  if (data_desc) {
+    if (rtc::starts_with(data_desc->protocol().data(),
+                         cricket::kMediaProtocolRtpPrefix)) {
+      UpdateRemoteRtpDataChannels(GetActiveStreams(data_desc));
+    }
+  }
+
+  // Iterate new_streams and notify the observer about new MediaStreams.
+  for (size_t i = 0; i < new_streams->count(); ++i) {
+    MediaStreamInterface* new_stream = new_streams->at(i);
+    stats_->AddStream(new_stream);
+    observer_->OnAddStream(new_stream);
+  }
+
+  UpdateEndedRemoteMediaStreams();
+
+  SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
+}
+
+bool PeerConnection::SetConfiguration(const RTCConfiguration& config) {
+  TRACE_EVENT0("webrtc", "PeerConnection::SetConfiguration");
+  if (port_allocator_) {
+    cricket::ServerAddresses stun_servers;
+    std::vector<cricket::RelayServerConfig> turn_servers;
+    if (!ParseIceServers(config.servers, &stun_servers, &turn_servers)) {
+      return false;
+    }
+    port_allocator_->SetIceServers(stun_servers, turn_servers);
+  }
+  session_->SetIceConfig(session_->ParseIceConfig(config));
+  return session_->SetIceTransports(config.type);
+}
+
+bool PeerConnection::AddIceCandidate(
+    const IceCandidateInterface* ice_candidate) {
+  TRACE_EVENT0("webrtc", "PeerConnection::AddIceCandidate");
+  return session_->ProcessIceMessage(ice_candidate);
+}
+
+void PeerConnection::RegisterUMAObserver(UMAObserver* observer) {
+  TRACE_EVENT0("webrtc", "PeerConnection::RegisterUmaObserver");
+  uma_observer_ = observer;
+
+  if (session_) {
+    session_->set_metrics_observer(uma_observer_);
+  }
+
+  // Send information about IPv4/IPv6 status.
+  if (uma_observer_ && port_allocator_) {
+    if (port_allocator_->flags() & cricket::PORTALLOCATOR_ENABLE_IPV6) {
+      uma_observer_->IncrementEnumCounter(
+          kEnumCounterAddressFamily, kPeerConnection_IPv6,
+          kPeerConnectionAddressFamilyCounter_Max);
+    } else {
+      uma_observer_->IncrementEnumCounter(
+          kEnumCounterAddressFamily, kPeerConnection_IPv4,
+          kPeerConnectionAddressFamilyCounter_Max);
+    }
+  }
+}
+
+const SessionDescriptionInterface* PeerConnection::local_description() const {
+  return session_->local_description();
+}
+
+const SessionDescriptionInterface* PeerConnection::remote_description() const {
+  return session_->remote_description();
+}
+
+void PeerConnection::Close() {
+  TRACE_EVENT0("webrtc", "PeerConnection::Close");
+  // Update stats here so that we have the most recent stats for tracks and
+  // streams before the channels are closed.
+  stats_->UpdateStats(kStatsOutputLevelStandard);
+
+  session_->Close();
+}
+
+void PeerConnection::OnSessionStateChange(WebRtcSession* /*session*/,
+                                          WebRtcSession::State state) {
+  switch (state) {
+    case WebRtcSession::STATE_INIT:
+      ChangeSignalingState(PeerConnectionInterface::kStable);
+      break;
+    case WebRtcSession::STATE_SENTOFFER:
+      ChangeSignalingState(PeerConnectionInterface::kHaveLocalOffer);
+      break;
+    case WebRtcSession::STATE_SENTPRANSWER:
+      ChangeSignalingState(PeerConnectionInterface::kHaveLocalPrAnswer);
+      break;
+    case WebRtcSession::STATE_RECEIVEDOFFER:
+      ChangeSignalingState(PeerConnectionInterface::kHaveRemoteOffer);
+      break;
+    case WebRtcSession::STATE_RECEIVEDPRANSWER:
+      ChangeSignalingState(PeerConnectionInterface::kHaveRemotePrAnswer);
+      break;
+    case WebRtcSession::STATE_INPROGRESS:
+      ChangeSignalingState(PeerConnectionInterface::kStable);
+      break;
+    case WebRtcSession::STATE_CLOSED:
+      ChangeSignalingState(PeerConnectionInterface::kClosed);
+      break;
+    default:
+      break;
+  }
+}
+
+void PeerConnection::OnMessage(rtc::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_SET_SESSIONDESCRIPTION_SUCCESS: {
+      SetSessionDescriptionMsg* param =
+          static_cast<SetSessionDescriptionMsg*>(msg->pdata);
+      param->observer->OnSuccess();
+      delete param;
+      break;
+    }
+    case MSG_SET_SESSIONDESCRIPTION_FAILED: {
+      SetSessionDescriptionMsg* param =
+          static_cast<SetSessionDescriptionMsg*>(msg->pdata);
+      param->observer->OnFailure(param->error);
+      delete param;
+      break;
+    }
+    case MSG_CREATE_SESSIONDESCRIPTION_FAILED: {
+      CreateSessionDescriptionMsg* param =
+          static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+      param->observer->OnFailure(param->error);
+      delete param;
+      break;
+    }
+    case MSG_GETSTATS: {
+      GetStatsMsg* param = static_cast<GetStatsMsg*>(msg->pdata);
+      StatsReports reports;
+      stats_->GetStats(param->track, &reports);
+      param->observer->OnComplete(reports);
+      delete param;
+      break;
+    }
+    case MSG_FREE_DATACHANNELS: {
+      sctp_data_channels_to_free_.clear();
+      break;
+    }
+    default:
+      RTC_DCHECK(false && "Not implemented");
+      break;
+  }
+}
+
+void PeerConnection::CreateAudioReceiver(MediaStreamInterface* stream,
+                                         AudioTrackInterface* audio_track,
+                                         uint32_t ssrc) {
+  receivers_.push_back(RtpReceiverProxy::Create(
+      signaling_thread(),
+      new AudioRtpReceiver(audio_track, ssrc, session_.get())));
+}
+
+void PeerConnection::CreateVideoReceiver(MediaStreamInterface* stream,
+                                         VideoTrackInterface* video_track,
+                                         uint32_t ssrc) {
+  receivers_.push_back(RtpReceiverProxy::Create(
+      signaling_thread(),
+      new VideoRtpReceiver(video_track, ssrc, session_.get())));
+}
+
+// TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
+// description.
+void PeerConnection::DestroyAudioReceiver(MediaStreamInterface* stream,
+                                          AudioTrackInterface* audio_track) {
+  auto it = FindReceiverForTrack(audio_track);
+  if (it == receivers_.end()) {
+    LOG(LS_WARNING) << "RtpReceiver for track with id " << audio_track->id()
+                    << " doesn't exist.";
+  } else {
+    (*it)->Stop();
+    receivers_.erase(it);
+  }
+}
+
+void PeerConnection::DestroyVideoReceiver(MediaStreamInterface* stream,
+                                          VideoTrackInterface* video_track) {
+  auto it = FindReceiverForTrack(video_track);
+  if (it == receivers_.end()) {
+    LOG(LS_WARNING) << "RtpReceiver for track with id " << video_track->id()
+                    << " doesn't exist.";
+  } else {
+    (*it)->Stop();
+    receivers_.erase(it);
+  }
+}
+
+void PeerConnection::OnIceConnectionChange(
+    PeerConnectionInterface::IceConnectionState new_state) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  // After transitioning to "closed", ignore any additional states from
+  // WebRtcSession (such as "disconnected").
+  if (IsClosed()) {
+    return;
+  }
+  ice_connection_state_ = new_state;
+  observer_->OnIceConnectionChange(ice_connection_state_);
+}
+
+void PeerConnection::OnIceGatheringChange(
+    PeerConnectionInterface::IceGatheringState new_state) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  if (IsClosed()) {
+    return;
+  }
+  ice_gathering_state_ = new_state;
+  observer_->OnIceGatheringChange(ice_gathering_state_);
+}
+
+void PeerConnection::OnIceCandidate(const IceCandidateInterface* candidate) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  observer_->OnIceCandidate(candidate);
+}
+
+void PeerConnection::OnIceConnectionReceivingChange(bool receiving) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  observer_->OnIceConnectionReceivingChange(receiving);
+}
+
+void PeerConnection::ChangeSignalingState(
+    PeerConnectionInterface::SignalingState signaling_state) {
+  signaling_state_ = signaling_state;
+  if (signaling_state == kClosed) {
+    ice_connection_state_ = kIceConnectionClosed;
+    observer_->OnIceConnectionChange(ice_connection_state_);
+    if (ice_gathering_state_ != kIceGatheringComplete) {
+      ice_gathering_state_ = kIceGatheringComplete;
+      observer_->OnIceGatheringChange(ice_gathering_state_);
+    }
+  }
+  observer_->OnSignalingChange(signaling_state_);
+}
+
+void PeerConnection::OnAudioTrackAdded(AudioTrackInterface* track,
+                                       MediaStreamInterface* stream) {
+  auto sender = FindSenderForTrack(track);
+  if (sender != senders_.end()) {
+    // We already have a sender for this track, so just change the stream_id
+    // so that it's correct in the next call to CreateOffer.
+    (*sender)->set_stream_id(stream->label());
+    return;
+  }
+
+  // Normal case; we've never seen this track before.
+  rtc::scoped_refptr<RtpSenderInterface> new_sender = RtpSenderProxy::Create(
+      signaling_thread(),
+      new AudioRtpSender(track, stream->label(), session_.get(), stats_.get()));
+  senders_.push_back(new_sender);
+  // If the sender has already been configured in SDP, we call SetSsrc,
+  // which will connect the sender to the underlying transport. This can
+  // occur if a local session description that contains the ID of the sender
+  // is set before AddStream is called. It can also occur if the local
+  // session description is not changed and RemoveStream is called, and
+  // later AddStream is called again with the same stream.
+  const TrackInfo* track_info =
+      FindTrackInfo(local_audio_tracks_, stream->label(), track->id());
+  if (track_info) {
+    new_sender->SetSsrc(track_info->ssrc);
+  }
+}
+
+// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
+// indefinitely, when we have unified plan SDP.
+void PeerConnection::OnAudioTrackRemoved(AudioTrackInterface* track,
+                                         MediaStreamInterface* stream) {
+  auto sender = FindSenderForTrack(track);
+  if (sender == senders_.end()) {
+    LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
+                    << " doesn't exist.";
+    return;
+  }
+  (*sender)->Stop();
+  senders_.erase(sender);
+}
+
+void PeerConnection::OnVideoTrackAdded(VideoTrackInterface* track,
+                                       MediaStreamInterface* stream) {
+  auto sender = FindSenderForTrack(track);
+  if (sender != senders_.end()) {
+    // We already have a sender for this track, so just change the stream_id
+    // so that it's correct in the next call to CreateOffer.
+    (*sender)->set_stream_id(stream->label());
+    return;
+  }
+
+  // Normal case; we've never seen this track before.
+  rtc::scoped_refptr<RtpSenderInterface> new_sender = RtpSenderProxy::Create(
+      signaling_thread(),
+      new VideoRtpSender(track, stream->label(), session_.get()));
+  senders_.push_back(new_sender);
+  const TrackInfo* track_info =
+      FindTrackInfo(local_video_tracks_, stream->label(), track->id());
+  if (track_info) {
+    new_sender->SetSsrc(track_info->ssrc);
+  }
+}
+
+void PeerConnection::OnVideoTrackRemoved(VideoTrackInterface* track,
+                                         MediaStreamInterface* stream) {
+  auto sender = FindSenderForTrack(track);
+  if (sender == senders_.end()) {
+    LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
+                    << " doesn't exist.";
+    return;
+  }
+  (*sender)->Stop();
+  senders_.erase(sender);
+}
+
+void PeerConnection::PostSetSessionDescriptionFailure(
+    SetSessionDescriptionObserver* observer,
+    const std::string& error) {
+  SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+  msg->error = error;
+  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_FAILED, msg);
+}
+
+void PeerConnection::PostCreateSessionDescriptionFailure(
+    CreateSessionDescriptionObserver* observer,
+    const std::string& error) {
+  CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+  msg->error = error;
+  signaling_thread()->Post(this, MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg);
+}
+
+bool PeerConnection::GetOptionsForOffer(
+    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+    cricket::MediaSessionOptions* session_options) {
+  if (!ConvertRtcOptionsForOffer(rtc_options, session_options)) {
+    return false;
+  }
+
+  AddSendStreams(session_options, senders_, rtp_data_channels_);
+  // Offer to receive audio/video if the constraint is not set and there are
+  // send streams, or we're currently receiving.
+  if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) {
+    session_options->recv_audio =
+        session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO) ||
+        !remote_audio_tracks_.empty();
+  }
+  if (rtc_options.offer_to_receive_video == RTCOfferAnswerOptions::kUndefined) {
+    session_options->recv_video =
+        session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO) ||
+        !remote_video_tracks_.empty();
+  }
+  session_options->bundle_enabled =
+      session_options->bundle_enabled &&
+      (session_options->has_audio() || session_options->has_video() ||
+       session_options->has_data());
+
+  if (session_->data_channel_type() == cricket::DCT_SCTP && HasDataChannels()) {
+    session_options->data_channel_type = cricket::DCT_SCTP;
+  }
+  return true;
+}
+
+bool PeerConnection::GetOptionsForAnswer(
+    const MediaConstraintsInterface* constraints,
+    cricket::MediaSessionOptions* session_options) {
+  session_options->recv_audio = false;
+  session_options->recv_video = false;
+  if (!ParseConstraintsForAnswer(constraints, session_options)) {
+    return false;
+  }
+
+  AddSendStreams(session_options, senders_, rtp_data_channels_);
+  session_options->bundle_enabled =
+      session_options->bundle_enabled &&
+      (session_options->has_audio() || session_options->has_video() ||
+       session_options->has_data());
+
+  // RTP data channel is handled in MediaSessionOptions::AddStream. SCTP streams
+  // are not signaled in the SDP so does not go through that path and must be
+  // handled here.
+  if (session_->data_channel_type() == cricket::DCT_SCTP) {
+    session_options->data_channel_type = cricket::DCT_SCTP;
+  }
+  return true;
+}
+
+void PeerConnection::RemoveTracks(cricket::MediaType media_type) {
+  UpdateLocalTracks(std::vector<cricket::StreamParams>(), media_type);
+  UpdateRemoteStreamsList(std::vector<cricket::StreamParams>(), false,
+                          media_type, nullptr);
+}
+
+void PeerConnection::UpdateRemoteStreamsList(
+    const cricket::StreamParamsVec& streams,
+    bool default_track_needed,
+    cricket::MediaType media_type,
+    StreamCollection* new_streams) {
+  TrackInfos* current_tracks = GetRemoteTracks(media_type);
+
+  // Find removed tracks. I.e., tracks where the track id or ssrc don't match
+  // the new StreamParam.
+  auto track_it = current_tracks->begin();
+  while (track_it != current_tracks->end()) {
+    const TrackInfo& info = *track_it;
+    const cricket::StreamParams* params =
+        cricket::GetStreamBySsrc(streams, info.ssrc);
+    bool track_exists = params && params->id == info.track_id;
+    // If this is a default track, and we still need it, don't remove it.
+    if ((info.stream_label == kDefaultStreamLabel && default_track_needed) ||
+        track_exists) {
+      ++track_it;
+    } else {
+      OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
+      track_it = current_tracks->erase(track_it);
+    }
+  }
+
+  // Find new and active tracks.
+  for (const cricket::StreamParams& params : streams) {
+    // The sync_label is the MediaStream label and the |stream.id| is the
+    // track id.
+    const std::string& stream_label = params.sync_label;
+    const std::string& track_id = params.id;
+    uint32_t ssrc = params.first_ssrc();
+
+    rtc::scoped_refptr<MediaStreamInterface> stream =
+        remote_streams_->find(stream_label);
+    if (!stream) {
+      // This is a new MediaStream. Create a new remote MediaStream.
+      stream = remote_stream_factory_->CreateMediaStream(stream_label);
+      remote_streams_->AddStream(stream);
+      new_streams->AddStream(stream);
+    }
+
+    const TrackInfo* track_info =
+        FindTrackInfo(*current_tracks, stream_label, track_id);
+    if (!track_info) {
+      current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
+      OnRemoteTrackSeen(stream_label, track_id, ssrc, media_type);
+    }
+  }
+
+  // Add default track if necessary.
+  if (default_track_needed) {
+    rtc::scoped_refptr<MediaStreamInterface> default_stream =
+        remote_streams_->find(kDefaultStreamLabel);
+    if (!default_stream) {
+      // Create the new default MediaStream.
+      default_stream =
+          remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
+      remote_streams_->AddStream(default_stream);
+      new_streams->AddStream(default_stream);
+    }
+    std::string default_track_id = (media_type == cricket::MEDIA_TYPE_AUDIO)
+                                       ? kDefaultAudioTrackLabel
+                                       : kDefaultVideoTrackLabel;
+    const TrackInfo* default_track_info =
+        FindTrackInfo(*current_tracks, kDefaultStreamLabel, default_track_id);
+    if (!default_track_info) {
+      current_tracks->push_back(
+          TrackInfo(kDefaultStreamLabel, default_track_id, 0));
+      OnRemoteTrackSeen(kDefaultStreamLabel, default_track_id, 0, media_type);
+    }
+  }
+}
+
+void PeerConnection::OnRemoteTrackSeen(const std::string& stream_label,
+                                       const std::string& track_id,
+                                       uint32_t ssrc,
+                                       cricket::MediaType media_type) {
+  MediaStreamInterface* stream = remote_streams_->find(stream_label);
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    AudioTrackInterface* audio_track = remote_stream_factory_->AddAudioTrack(
+        ssrc, session_.get(), stream, track_id);
+    CreateAudioReceiver(stream, audio_track, ssrc);
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    VideoTrackInterface* video_track =
+        remote_stream_factory_->AddVideoTrack(stream, track_id);
+    CreateVideoReceiver(stream, video_track, ssrc);
+  } else {
+    RTC_DCHECK(false && "Invalid media type");
+  }
+}
+
+void PeerConnection::OnRemoteTrackRemoved(const std::string& stream_label,
+                                          const std::string& track_id,
+                                          cricket::MediaType media_type) {
+  MediaStreamInterface* stream = remote_streams_->find(stream_label);
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    rtc::scoped_refptr<AudioTrackInterface> audio_track =
+        stream->FindAudioTrack(track_id);
+    if (audio_track) {
+      audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      stream->RemoveTrack(audio_track);
+      DestroyAudioReceiver(stream, audio_track);
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    rtc::scoped_refptr<VideoTrackInterface> video_track =
+        stream->FindVideoTrack(track_id);
+    if (video_track) {
+      video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      stream->RemoveTrack(video_track);
+      DestroyVideoReceiver(stream, video_track);
+    }
+  } else {
+    ASSERT(false && "Invalid media type");
+  }
+}
+
+void PeerConnection::UpdateEndedRemoteMediaStreams() {
+  std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_to_remove;
+  for (size_t i = 0; i < remote_streams_->count(); ++i) {
+    MediaStreamInterface* stream = remote_streams_->at(i);
+    if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
+      streams_to_remove.push_back(stream);
+    }
+  }
+
+  for (const auto& stream : streams_to_remove) {
+    remote_streams_->RemoveStream(stream);
+    observer_->OnRemoveStream(stream);
+  }
+}
+
+void PeerConnection::EndRemoteTracks(cricket::MediaType media_type) {
+  TrackInfos* current_tracks = GetRemoteTracks(media_type);
+  for (TrackInfos::iterator track_it = current_tracks->begin();
+       track_it != current_tracks->end(); ++track_it) {
+    const TrackInfo& info = *track_it;
+    MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
+    if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+      AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
+      // There's no guarantee the track is still available, e.g. the track may
+      // have been removed from the stream by javascript.
+      if (track) {
+        track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      }
+    }
+    if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+      VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
+      // There's no guarantee the track is still available, e.g. the track may
+      // have been removed from the stream by javascript.
+      if (track) {
+        track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      }
+    }
+  }
+}
+
+void PeerConnection::UpdateLocalTracks(
+    const std::vector<cricket::StreamParams>& streams,
+    cricket::MediaType media_type) {
+  TrackInfos* current_tracks = GetLocalTracks(media_type);
+
+  // Find removed tracks. I.e., tracks where the track id, stream label or ssrc
+  // don't match the new StreamParam.
+  TrackInfos::iterator track_it = current_tracks->begin();
+  while (track_it != current_tracks->end()) {
+    const TrackInfo& info = *track_it;
+    const cricket::StreamParams* params =
+        cricket::GetStreamBySsrc(streams, info.ssrc);
+    if (!params || params->id != info.track_id ||
+        params->sync_label != info.stream_label) {
+      OnLocalTrackRemoved(info.stream_label, info.track_id, info.ssrc,
+                          media_type);
+      track_it = current_tracks->erase(track_it);
+    } else {
+      ++track_it;
+    }
+  }
+
+  // Find new and active tracks.
+  for (const cricket::StreamParams& params : streams) {
+    // The sync_label is the MediaStream label and the |stream.id| is the
+    // track id.
+    const std::string& stream_label = params.sync_label;
+    const std::string& track_id = params.id;
+    uint32_t ssrc = params.first_ssrc();
+    const TrackInfo* track_info =
+        FindTrackInfo(*current_tracks, stream_label, track_id);
+    if (!track_info) {
+      current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
+      OnLocalTrackSeen(stream_label, track_id, params.first_ssrc(), media_type);
+    }
+  }
+}
+
+void PeerConnection::OnLocalTrackSeen(const std::string& stream_label,
+                                      const std::string& track_id,
+                                      uint32_t ssrc,
+                                      cricket::MediaType media_type) {
+  RtpSenderInterface* sender = FindSenderById(track_id);
+  if (!sender) {
+    LOG(LS_WARNING) << "An unknown RtpSender with id " << track_id
+                    << " has been configured in the local description.";
+    return;
+  }
+
+  if (sender->media_type() != media_type) {
+    LOG(LS_WARNING) << "An RtpSender has been configured in the local"
+                    << " description with an unexpected media type.";
+    return;
+  }
+
+  sender->set_stream_id(stream_label);
+  sender->SetSsrc(ssrc);
+}
+
+void PeerConnection::OnLocalTrackRemoved(const std::string& stream_label,
+                                         const std::string& track_id,
+                                         uint32_t ssrc,
+                                         cricket::MediaType media_type) {
+  RtpSenderInterface* sender = FindSenderById(track_id);
+  if (!sender) {
+    // This is the normal case. I.e., RemoveStream has been called and the
+    // SessionDescriptions has been renegotiated.
+    return;
+  }
+
+  // A sender has been removed from the SessionDescription but it's still
+  // associated with the PeerConnection. This only occurs if the SDP doesn't
+  // match with the calls to CreateSender, AddStream and RemoveStream.
+  if (sender->media_type() != media_type) {
+    LOG(LS_WARNING) << "An RtpSender has been configured in the local"
+                    << " description with an unexpected media type.";
+    return;
+  }
+
+  sender->SetSsrc(0);
+}
+
+void PeerConnection::UpdateLocalRtpDataChannels(
+    const cricket::StreamParamsVec& streams) {
+  std::vector<std::string> existing_channels;
+
+  // Find new and active data channels.
+  for (const cricket::StreamParams& params : streams) {
+    // |it->sync_label| is actually the data channel label. The reason is that
+    // we use the same naming of data channels as we do for
+    // MediaStreams and Tracks.
+    // For MediaStreams, the sync_label is the MediaStream label and the
+    // track label is the same as |streamid|.
+    const std::string& channel_label = params.sync_label;
+    auto data_channel_it = rtp_data_channels_.find(channel_label);
+    if (!VERIFY(data_channel_it != rtp_data_channels_.end())) {
+      continue;
+    }
+    // Set the SSRC the data channel should use for sending.
+    data_channel_it->second->SetSendSsrc(params.first_ssrc());
+    existing_channels.push_back(data_channel_it->first);
+  }
+
+  UpdateClosingRtpDataChannels(existing_channels, true);
+}
+
+void PeerConnection::UpdateRemoteRtpDataChannels(
+    const cricket::StreamParamsVec& streams) {
+  std::vector<std::string> existing_channels;
+
+  // Find new and active data channels.
+  for (const cricket::StreamParams& params : streams) {
+    // The data channel label is either the mslabel or the SSRC if the mslabel
+    // does not exist. Ex a=ssrc:444330170 mslabel:test1.
+    std::string label = params.sync_label.empty()
+                            ? rtc::ToString(params.first_ssrc())
+                            : params.sync_label;
+    auto data_channel_it = rtp_data_channels_.find(label);
+    if (data_channel_it == rtp_data_channels_.end()) {
+      // This is a new data channel.
+      CreateRemoteRtpDataChannel(label, params.first_ssrc());
+    } else {
+      data_channel_it->second->SetReceiveSsrc(params.first_ssrc());
+    }
+    existing_channels.push_back(label);
+  }
+
+  UpdateClosingRtpDataChannels(existing_channels, false);
+}
+
+void PeerConnection::UpdateClosingRtpDataChannels(
+    const std::vector<std::string>& active_channels,
+    bool is_local_update) {
+  auto it = rtp_data_channels_.begin();
+  while (it != rtp_data_channels_.end()) {
+    DataChannel* data_channel = it->second;
+    if (std::find(active_channels.begin(), active_channels.end(),
+                  data_channel->label()) != active_channels.end()) {
+      ++it;
+      continue;
+    }
+
+    if (is_local_update) {
+      data_channel->SetSendSsrc(0);
+    } else {
+      data_channel->RemotePeerRequestClose();
+    }
+
+    if (data_channel->state() == DataChannel::kClosed) {
+      rtp_data_channels_.erase(it);
+      it = rtp_data_channels_.begin();
+    } else {
+      ++it;
+    }
+  }
+}
+
+void PeerConnection::CreateRemoteRtpDataChannel(const std::string& label,
+                                                uint32_t remote_ssrc) {
+  rtc::scoped_refptr<DataChannel> channel(
+      InternalCreateDataChannel(label, nullptr));
+  if (!channel.get()) {
+    LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
+                    << "CreateDataChannel failed.";
+    return;
+  }
+  channel->SetReceiveSsrc(remote_ssrc);
+  observer_->OnDataChannel(
+      DataChannelProxy::Create(signaling_thread(), channel));
+}
+
+rtc::scoped_refptr<DataChannel> PeerConnection::InternalCreateDataChannel(
+    const std::string& label,
+    const InternalDataChannelInit* config) {
+  if (IsClosed()) {
+    return nullptr;
+  }
+  if (session_->data_channel_type() == cricket::DCT_NONE) {
+    LOG(LS_ERROR)
+        << "InternalCreateDataChannel: Data is not supported in this call.";
+    return nullptr;
+  }
+  InternalDataChannelInit new_config =
+      config ? (*config) : InternalDataChannelInit();
+  if (session_->data_channel_type() == cricket::DCT_SCTP) {
+    if (new_config.id < 0) {
+      rtc::SSLRole role;
+      if ((session_->GetSslRole(session_->data_channel(), &role)) &&
+          !sid_allocator_.AllocateSid(role, &new_config.id)) {
+        LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel.";
+        return nullptr;
+      }
+    } else if (!sid_allocator_.ReserveSid(new_config.id)) {
+      LOG(LS_ERROR) << "Failed to create a SCTP data channel "
+                    << "because the id is already in use or out of range.";
+      return nullptr;
+    }
+  }
+
+  rtc::scoped_refptr<DataChannel> channel(DataChannel::Create(
+      session_.get(), session_->data_channel_type(), label, new_config));
+  if (!channel) {
+    sid_allocator_.ReleaseSid(new_config.id);
+    return nullptr;
+  }
+
+  if (channel->data_channel_type() == cricket::DCT_RTP) {
+    if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) {
+      LOG(LS_ERROR) << "DataChannel with label " << channel->label()
+                    << " already exists.";
+      return nullptr;
+    }
+    rtp_data_channels_[channel->label()] = channel;
+  } else {
+    RTC_DCHECK(channel->data_channel_type() == cricket::DCT_SCTP);
+    sctp_data_channels_.push_back(channel);
+    channel->SignalClosed.connect(this,
+                                  &PeerConnection::OnSctpDataChannelClosed);
+  }
+
+  return channel;
+}
+
+bool PeerConnection::HasDataChannels() const {
+  return !rtp_data_channels_.empty() || !sctp_data_channels_.empty();
+}
+
+void PeerConnection::AllocateSctpSids(rtc::SSLRole role) {
+  for (const auto& channel : sctp_data_channels_) {
+    if (channel->id() < 0) {
+      int sid;
+      if (!sid_allocator_.AllocateSid(role, &sid)) {
+        LOG(LS_ERROR) << "Failed to allocate SCTP sid.";
+        continue;
+      }
+      channel->SetSctpSid(sid);
+    }
+  }
+}
+
+void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+  for (auto it = sctp_data_channels_.begin(); it != sctp_data_channels_.end();
+       ++it) {
+    if (it->get() == channel) {
+      if (channel->id() >= 0) {
+        sid_allocator_.ReleaseSid(channel->id());
+      }
+      // Since this method is triggered by a signal from the DataChannel,
+      // we can't free it directly here; we need to free it asynchronously.
+      sctp_data_channels_to_free_.push_back(*it);
+      sctp_data_channels_.erase(it);
+      signaling_thread()->Post(this, MSG_FREE_DATACHANNELS, nullptr);
+      return;
+    }
+  }
+}
+
+void PeerConnection::OnVoiceChannelDestroyed() {
+  EndRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
+}
+
+void PeerConnection::OnVideoChannelDestroyed() {
+  EndRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
+}
+
+void PeerConnection::OnDataChannelCreated() {
+  for (const auto& channel : sctp_data_channels_) {
+    channel->OnTransportChannelCreated();
+  }
+}
+
+void PeerConnection::OnDataChannelDestroyed() {
+  // Use a temporary copy of the RTP/SCTP DataChannel list because the
+  // DataChannel may callback to us and try to modify the list.
+  std::map<std::string, rtc::scoped_refptr<DataChannel>> temp_rtp_dcs;
+  temp_rtp_dcs.swap(rtp_data_channels_);
+  for (const auto& kv : temp_rtp_dcs) {
+    kv.second->OnTransportChannelDestroyed();
+  }
+
+  std::vector<rtc::scoped_refptr<DataChannel>> temp_sctp_dcs;
+  temp_sctp_dcs.swap(sctp_data_channels_);
+  for (const auto& channel : temp_sctp_dcs) {
+    channel->OnTransportChannelDestroyed();
+  }
+}
+
+void PeerConnection::OnDataChannelOpenMessage(
+    const std::string& label,
+    const InternalDataChannelInit& config) {
+  rtc::scoped_refptr<DataChannel> channel(
+      InternalCreateDataChannel(label, &config));
+  if (!channel.get()) {
+    LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message.";
+    return;
+  }
+
+  observer_->OnDataChannel(
+      DataChannelProxy::Create(signaling_thread(), channel));
+}
+
+RtpSenderInterface* PeerConnection::FindSenderById(const std::string& id) {
+  auto it =
+      std::find_if(senders_.begin(), senders_.end(),
+                   [id](const rtc::scoped_refptr<RtpSenderInterface>& sender) {
+                     return sender->id() == id;
+                   });
+  return it != senders_.end() ? it->get() : nullptr;
+}
+
+std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
+PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
+  return std::find_if(
+      senders_.begin(), senders_.end(),
+      [track](const rtc::scoped_refptr<RtpSenderInterface>& sender) {
+        return sender->track() == track;
+      });
+}
+
+std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
+PeerConnection::FindReceiverForTrack(MediaStreamTrackInterface* track) {
+  return std::find_if(
+      receivers_.begin(), receivers_.end(),
+      [track](const rtc::scoped_refptr<RtpReceiverInterface>& receiver) {
+        return receiver->track() == track;
+      });
+}
+
+PeerConnection::TrackInfos* PeerConnection::GetRemoteTracks(
+    cricket::MediaType media_type) {
+  RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+             media_type == cricket::MEDIA_TYPE_VIDEO);
+  return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &remote_audio_tracks_
+                                                   : &remote_video_tracks_;
+}
+
+PeerConnection::TrackInfos* PeerConnection::GetLocalTracks(
+    cricket::MediaType media_type) {
+  RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+             media_type == cricket::MEDIA_TYPE_VIDEO);
+  return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_tracks_
+                                                   : &local_video_tracks_;
+}
+
+const PeerConnection::TrackInfo* PeerConnection::FindTrackInfo(
+    const PeerConnection::TrackInfos& infos,
+    const std::string& stream_label,
+    const std::string track_id) const {
+  for (const TrackInfo& track_info : infos) {
+    if (track_info.stream_label == stream_label &&
+        track_info.track_id == track_id) {
+      return &track_info;
+    }
+  }
+  return nullptr;
+}
+
+DataChannel* PeerConnection::FindDataChannelBySid(int sid) const {
+  for (const auto& channel : sctp_data_channels_) {
+    if (channel->id() == sid) {
+      return channel;
+    }
+  }
+  return nullptr;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/peerconnection.h b/webrtc/api/peerconnection.h
new file mode 100644
index 0000000..c7de19d
--- /dev/null
+++ b/webrtc/api/peerconnection.h
@@ -0,0 +1,399 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_PEERCONNECTION_H_
+#define WEBRTC_API_PEERCONNECTION_H_
+
+#include <string>
+
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/peerconnectionfactory.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/statscollector.h"
+#include "webrtc/api/streamcollection.h"
+#include "webrtc/api/webrtcsession.h"
+#include "webrtc/base/scoped_ptr.h"
+
+namespace webrtc {
+
+class MediaStreamObserver;
+class RemoteMediaStreamFactory;
+
+// Populates |session_options| from |rtc_options|, and returns true if options
+// are valid.
+bool ConvertRtcOptionsForOffer(
+    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+    cricket::MediaSessionOptions* session_options);
+
+// Populates |session_options| from |constraints|, and returns true if all
+// mandatory constraints are satisfied.
+bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
+                               cricket::MediaSessionOptions* session_options);
+
+// Parses the URLs for each server in |servers| to build |stun_servers| and
+// |turn_servers|.
+bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
+                     cricket::ServerAddresses* stun_servers,
+                     std::vector<cricket::RelayServerConfig>* turn_servers);
+
+// PeerConnection implements the PeerConnectionInterface interface.
+// It uses WebRtcSession to implement the PeerConnection functionality.
+class PeerConnection : public PeerConnectionInterface,
+                       public IceObserver,
+                       public rtc::MessageHandler,
+                       public sigslot::has_slots<> {
+ public:
+  explicit PeerConnection(PeerConnectionFactory* factory);
+
+  bool Initialize(
+      const PeerConnectionInterface::RTCConfiguration& configuration,
+      const MediaConstraintsInterface* constraints,
+      rtc::scoped_ptr<cricket::PortAllocator> allocator,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+      PeerConnectionObserver* observer);
+
+  rtc::scoped_refptr<StreamCollectionInterface> local_streams() override;
+  rtc::scoped_refptr<StreamCollectionInterface> remote_streams() override;
+  bool AddStream(MediaStreamInterface* local_stream) override;
+  void RemoveStream(MediaStreamInterface* local_stream) override;
+
+  rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+      MediaStreamTrackInterface* track,
+      std::vector<MediaStreamInterface*> streams) override;
+  bool RemoveTrack(RtpSenderInterface* sender) override;
+
+  virtual WebRtcSession* session() { return session_.get(); }
+
+  rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
+      AudioTrackInterface* track) override;
+
+  rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+      const std::string& kind,
+      const std::string& stream_id) override;
+
+  std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+      const override;
+  std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+      const override;
+
+  rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+      const std::string& label,
+      const DataChannelInit* config) override;
+  bool GetStats(StatsObserver* observer,
+                webrtc::MediaStreamTrackInterface* track,
+                StatsOutputLevel level) override;
+
+  SignalingState signaling_state() override;
+
+  // TODO(bemasc): Remove ice_state() when callers are removed.
+  IceState ice_state() override;
+  IceConnectionState ice_connection_state() override;
+  IceGatheringState ice_gathering_state() override;
+
+  const SessionDescriptionInterface* local_description() const override;
+  const SessionDescriptionInterface* remote_description() const override;
+
+  // JSEP01
+  void CreateOffer(CreateSessionDescriptionObserver* observer,
+                   const MediaConstraintsInterface* constraints) override;
+  void CreateOffer(CreateSessionDescriptionObserver* observer,
+                   const RTCOfferAnswerOptions& options) override;
+  void CreateAnswer(CreateSessionDescriptionObserver* observer,
+                    const MediaConstraintsInterface* constraints) override;
+  void SetLocalDescription(SetSessionDescriptionObserver* observer,
+                           SessionDescriptionInterface* desc) override;
+  void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+                            SessionDescriptionInterface* desc) override;
+  bool SetConfiguration(
+      const PeerConnectionInterface::RTCConfiguration& config) override;
+  bool AddIceCandidate(const IceCandidateInterface* candidate) override;
+
+  void RegisterUMAObserver(UMAObserver* observer) override;
+
+  void Close() override;
+
+  // Virtual for unit tests.
+  virtual const std::vector<rtc::scoped_refptr<DataChannel>>&
+  sctp_data_channels() const {
+    return sctp_data_channels_;
+  };
+
+ protected:
+  ~PeerConnection() override;
+
+ private:
+  struct TrackInfo {
+    TrackInfo() : ssrc(0) {}
+    TrackInfo(const std::string& stream_label,
+              const std::string track_id,
+              uint32_t ssrc)
+        : stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
+    bool operator==(const TrackInfo& other) {
+      return this->stream_label == other.stream_label &&
+             this->track_id == other.track_id && this->ssrc == other.ssrc;
+    }
+    std::string stream_label;
+    std::string track_id;
+    uint32_t ssrc;
+  };
+  typedef std::vector<TrackInfo> TrackInfos;
+
+  // Implements MessageHandler.
+  void OnMessage(rtc::Message* msg) override;
+
+  void CreateAudioReceiver(MediaStreamInterface* stream,
+                           AudioTrackInterface* audio_track,
+                           uint32_t ssrc);
+  void CreateVideoReceiver(MediaStreamInterface* stream,
+                           VideoTrackInterface* video_track,
+                           uint32_t ssrc);
+  void DestroyAudioReceiver(MediaStreamInterface* stream,
+                            AudioTrackInterface* audio_track);
+  void DestroyVideoReceiver(MediaStreamInterface* stream,
+                            VideoTrackInterface* video_track);
+  void DestroyAudioSender(MediaStreamInterface* stream,
+                          AudioTrackInterface* audio_track,
+                          uint32_t ssrc);
+  void DestroyVideoSender(MediaStreamInterface* stream,
+                          VideoTrackInterface* video_track);
+
+  // Implements IceObserver
+  void OnIceConnectionChange(IceConnectionState new_state) override;
+  void OnIceGatheringChange(IceGatheringState new_state) override;
+  void OnIceCandidate(const IceCandidateInterface* candidate) override;
+  void OnIceConnectionReceivingChange(bool receiving) override;
+
+  // Signals from WebRtcSession.
+  void OnSessionStateChange(WebRtcSession* session, WebRtcSession::State state);
+  void ChangeSignalingState(SignalingState signaling_state);
+
+  // Signals from MediaStreamObserver.
+  void OnAudioTrackAdded(AudioTrackInterface* track,
+                         MediaStreamInterface* stream);
+  void OnAudioTrackRemoved(AudioTrackInterface* track,
+                           MediaStreamInterface* stream);
+  void OnVideoTrackAdded(VideoTrackInterface* track,
+                         MediaStreamInterface* stream);
+  void OnVideoTrackRemoved(VideoTrackInterface* track,
+                           MediaStreamInterface* stream);
+
+  rtc::Thread* signaling_thread() const {
+    return factory_->signaling_thread();
+  }
+
+  void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer,
+                                        const std::string& error);
+  void PostCreateSessionDescriptionFailure(
+      CreateSessionDescriptionObserver* observer,
+      const std::string& error);
+
+  bool IsClosed() const {
+    return signaling_state_ == PeerConnectionInterface::kClosed;
+  }
+
+  // Returns a MediaSessionOptions struct with options decided by |options|,
+  // the local MediaStreams and DataChannels.
+  virtual bool GetOptionsForOffer(
+      const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+      cricket::MediaSessionOptions* session_options);
+
+  // Returns a MediaSessionOptions struct with options decided by
+  // |constraints|, the local MediaStreams and DataChannels.
+  virtual bool GetOptionsForAnswer(
+      const MediaConstraintsInterface* constraints,
+      cricket::MediaSessionOptions* session_options);
+
+  // Remove all local and remote tracks of type |media_type|.
+  // Called when a media type is rejected (m-line set to port 0).
+  void RemoveTracks(cricket::MediaType media_type);
+
+  // Makes sure a MediaStreamTrack is created for each StreamParam in |streams|,
+  // and existing MediaStreamTracks are removed if there is no corresponding
+  // StreamParam. If |default_track_needed| is true, a default MediaStreamTrack
+  // is created if it doesn't exist; if false, it's removed if it exists.
+  // |media_type| is the type of the |streams| and can be either audio or video.
+  // If a new MediaStream is created it is added to |new_streams|.
+  void UpdateRemoteStreamsList(
+      const std::vector<cricket::StreamParams>& streams,
+      bool default_track_needed,
+      cricket::MediaType media_type,
+      StreamCollection* new_streams);
+
+  // Triggered when a remote track has been seen for the first time in a remote
+  // session description. It creates a remote MediaStreamTrackInterface
+  // implementation and triggers CreateAudioReceiver or CreateVideoReceiver.
+  void OnRemoteTrackSeen(const std::string& stream_label,
+                         const std::string& track_id,
+                         uint32_t ssrc,
+                         cricket::MediaType media_type);
+
+  // Triggered when a remote track has been removed from a remote session
+  // description. It removes the remote track with id |track_id| from a remote
+  // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver.
+  void OnRemoteTrackRemoved(const std::string& stream_label,
+                            const std::string& track_id,
+                            cricket::MediaType media_type);
+
+  // Finds remote MediaStreams without any tracks and removes them from
+  // |remote_streams_| and notifies the observer that the MediaStreams no longer
+  // exist.
+  void UpdateEndedRemoteMediaStreams();
+
+  // Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
+  // tracks of type |media_type|.
+  void EndRemoteTracks(cricket::MediaType media_type);
+
+  // Loops through the vector of |streams| and finds added and removed
+  // StreamParams since last time this method was called.
+  // For each new or removed StreamParam, OnLocalTrackSeen or
+  // OnLocalTrackRemoved is invoked.
+  void UpdateLocalTracks(const std::vector<cricket::StreamParams>& streams,
+                         cricket::MediaType media_type);
+
+  // Triggered when a local track has been seen for the first time in a local
+  // session description.
+  // This method triggers CreateAudioSender or CreateVideoSender if the rtp
+  // streams in the local SessionDescription can be mapped to a MediaStreamTrack
+  // in a MediaStream in |local_streams_|
+  void OnLocalTrackSeen(const std::string& stream_label,
+                        const std::string& track_id,
+                        uint32_t ssrc,
+                        cricket::MediaType media_type);
+
+  // Triggered when a local track has been removed from a local session
+  // description.
+  // This method triggers DestroyAudioSender or DestroyVideoSender if a stream
+  // has been removed from the local SessionDescription and the stream can be
+  // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|.
+  void OnLocalTrackRemoved(const std::string& stream_label,
+                           const std::string& track_id,
+                           uint32_t ssrc,
+                           cricket::MediaType media_type);
+
+  void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
+  void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
+  void UpdateClosingRtpDataChannels(
+      const std::vector<std::string>& active_channels,
+      bool is_local_update);
+  void CreateRemoteRtpDataChannel(const std::string& label,
+                                  uint32_t remote_ssrc);
+
+  // Creates channel and adds it to the collection of DataChannels that will
+  // be offered in a SessionDescription.
+  rtc::scoped_refptr<DataChannel> InternalCreateDataChannel(
+      const std::string& label,
+      const InternalDataChannelInit* config);
+
+  // Checks if any data channel has been added.
+  bool HasDataChannels() const;
+
+  void AllocateSctpSids(rtc::SSLRole role);
+  void OnSctpDataChannelClosed(DataChannel* channel);
+
+  // Notifications from WebRtcSession relating to BaseChannels.
+  void OnVoiceChannelDestroyed();
+  void OnVideoChannelDestroyed();
+  void OnDataChannelCreated();
+  void OnDataChannelDestroyed();
+  // Called when the cricket::DataChannel receives a message indicating that a
+  // webrtc::DataChannel should be opened.
+  void OnDataChannelOpenMessage(const std::string& label,
+                                const InternalDataChannelInit& config);
+
+  RtpSenderInterface* FindSenderById(const std::string& id);
+
+  std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
+  FindSenderForTrack(MediaStreamTrackInterface* track);
+  std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
+  FindReceiverForTrack(MediaStreamTrackInterface* track);
+
+  TrackInfos* GetRemoteTracks(cricket::MediaType media_type);
+  TrackInfos* GetLocalTracks(cricket::MediaType media_type);
+  const TrackInfo* FindTrackInfo(const TrackInfos& infos,
+                                 const std::string& stream_label,
+                                 const std::string track_id) const;
+
+  // Returns the specified SCTP DataChannel in sctp_data_channels_,
+  // or nullptr if not found.
+  DataChannel* FindDataChannelBySid(int sid) const;
+
+  // Storing the factory as a scoped reference pointer ensures that the memory
+  // in the PeerConnectionFactoryImpl remains available as long as the
+  // PeerConnection is running. It is passed to PeerConnection as a raw pointer.
+  // However, since the reference counting is done in the
+  // PeerConnectionFactoryInterface all instances created using the raw pointer
+  // will refer to the same reference count.
+  rtc::scoped_refptr<PeerConnectionFactory> factory_;
+  PeerConnectionObserver* observer_;
+  UMAObserver* uma_observer_;
+  SignalingState signaling_state_;
+  // TODO(bemasc): Remove ice_state_.
+  IceState ice_state_;
+  IceConnectionState ice_connection_state_;
+  IceGatheringState ice_gathering_state_;
+
+  rtc::scoped_ptr<cricket::PortAllocator> port_allocator_;
+  rtc::scoped_ptr<MediaControllerInterface> media_controller_;
+
+  // Streams added via AddStream.
+  rtc::scoped_refptr<StreamCollection> local_streams_;
+  // Streams created as a result of SetRemoteDescription.
+  rtc::scoped_refptr<StreamCollection> remote_streams_;
+
+  std::vector<rtc::scoped_ptr<MediaStreamObserver>> stream_observers_;
+
+  // These lists store track info seen in local/remote descriptions.
+  TrackInfos remote_audio_tracks_;
+  TrackInfos remote_video_tracks_;
+  TrackInfos local_audio_tracks_;
+  TrackInfos local_video_tracks_;
+
+  SctpSidAllocator sid_allocator_;
+  // label -> DataChannel
+  std::map<std::string, rtc::scoped_refptr<DataChannel>> rtp_data_channels_;
+  std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_;
+  std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_to_free_;
+
+  bool remote_peer_supports_msid_ = false;
+  rtc::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
+
+  std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
+  std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers_;
+
+  // The session_ scoped_ptr is declared at the bottom of PeerConnection
+  // because its destruction fires signals (such as VoiceChannelDestroyed)
+  // which will trigger some final actions in PeerConnection...
+  rtc::scoped_ptr<WebRtcSession> session_;
+  // ... But stats_ depends on session_ so it should be destroyed even earlier.
+  rtc::scoped_ptr<StatsCollector> stats_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_PEERCONNECTION_H_
diff --git a/webrtc/api/peerconnection_unittest.cc b/webrtc/api/peerconnection_unittest.cc
new file mode 100644
index 0000000..c1e7e3d
--- /dev/null
+++ b/webrtc/api/peerconnection_unittest.cc
@@ -0,0 +1,2029 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <list>
+#include <map>
+#include <utility>
+#include <vector>
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/dtmfsender.h"
+#include "webrtc/api/fakemetricsobserver.h"
+#include "webrtc/api/localaudiosource.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/peerconnection.h"
+#include "webrtc/api/peerconnectionfactory.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/test/fakeaudiocapturemodule.h"
+#include "webrtc/api/test/fakeconstraints.h"
+#include "webrtc/api/test/fakedtlsidentitystore.h"
+#include "webrtc/api/test/fakeperiodicvideocapturer.h"
+#include "webrtc/api/test/fakevideotrackrenderer.h"
+#include "webrtc/api/test/mockpeerconnectionobservers.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/virtualsocketserver.h"
+#include "webrtc/media/webrtc/fakewebrtcvideoengine.h"
+#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/sessiondescription.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
+
+#define MAYBE_SKIP_TEST(feature)                    \
+  if (!(feature())) {                               \
+    LOG(LS_INFO) << "Feature disabled... skipping"; \
+    return;                                         \
+  }
+
+using cricket::ContentInfo;
+using cricket::FakeWebRtcVideoDecoder;
+using cricket::FakeWebRtcVideoDecoderFactory;
+using cricket::FakeWebRtcVideoEncoder;
+using cricket::FakeWebRtcVideoEncoderFactory;
+using cricket::MediaContentDescription;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInterface;
+using webrtc::DtmfSender;
+using webrtc::DtmfSenderInterface;
+using webrtc::DtmfSenderObserverInterface;
+using webrtc::FakeConstraints;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::MockCreateSessionDescriptionObserver;
+using webrtc::MockDataChannelObserver;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::MockStatsObserver;
+using webrtc::ObserverInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionFactory;
+using webrtc::SessionDescriptionInterface;
+using webrtc::StreamCollectionInterface;
+
+static const int kMaxWaitMs = 10000;
+// Disable for TSan v2, see
+// https://code.google.com/p/webrtc/issues/detail?id=1205 for details.
+// This declaration is also #ifdef'd as it causes uninitialized-variable
+// warnings.
+#if !defined(THREAD_SANITIZER)
+static const int kMaxWaitForStatsMs = 3000;
+#endif
+static const int kMaxWaitForActivationMs = 5000;
+static const int kMaxWaitForFramesMs = 10000;
+static const int kEndAudioFrameCount = 3;
+static const int kEndVideoFrameCount = 3;
+
+static const char kStreamLabelBase[] = "stream_label";
+static const char kVideoTrackLabelBase[] = "video_track";
+static const char kAudioTrackLabelBase[] = "audio_track";
+static const char kDataChannelLabel[] = "data_channel";
+
+// Disable for TSan v2, see
+// https://code.google.com/p/webrtc/issues/detail?id=1205 for details.
+// This declaration is also #ifdef'd as it causes unused-variable errors.
+#if !defined(THREAD_SANITIZER)
+// SRTP cipher name negotiated by the tests. This must be updated if the
+// default changes.
+static const int kDefaultSrtpCryptoSuite = rtc::SRTP_AES128_CM_SHA1_32;
+#endif
+
+static void RemoveLinesFromSdp(const std::string& line_start,
+                               std::string* sdp) {
+  const char kSdpLineEnd[] = "\r\n";
+  size_t ssrc_pos = 0;
+  while ((ssrc_pos = sdp->find(line_start, ssrc_pos)) !=
+      std::string::npos) {
+    size_t end_ssrc = sdp->find(kSdpLineEnd, ssrc_pos);
+    sdp->erase(ssrc_pos, end_ssrc - ssrc_pos + strlen(kSdpLineEnd));
+  }
+}
+
+class SignalingMessageReceiver {
+ public:
+  virtual void ReceiveSdpMessage(const std::string& type,
+                                 std::string& msg) = 0;
+  virtual void ReceiveIceMessage(const std::string& sdp_mid,
+                                 int sdp_mline_index,
+                                 const std::string& msg) = 0;
+
+ protected:
+  SignalingMessageReceiver() {}
+  virtual ~SignalingMessageReceiver() {}
+};
+
+class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
+                                 public SignalingMessageReceiver,
+                                 public ObserverInterface {
+ public:
+  static PeerConnectionTestClient* CreateClientWithDtlsIdentityStore(
+      const std::string& id,
+      const MediaConstraintsInterface* constraints,
+      const PeerConnectionFactory::Options* options,
+      rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
+    PeerConnectionTestClient* client(new PeerConnectionTestClient(id));
+    if (!client->Init(constraints, options, std::move(dtls_identity_store))) {
+      delete client;
+      return nullptr;
+    }
+    return client;
+  }
+
+  static PeerConnectionTestClient* CreateClient(
+      const std::string& id,
+      const MediaConstraintsInterface* constraints,
+      const PeerConnectionFactory::Options* options) {
+    rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+        rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
+                                              : nullptr);
+
+    return CreateClientWithDtlsIdentityStore(id, constraints, options,
+                                             std::move(dtls_identity_store));
+  }
+
+  ~PeerConnectionTestClient() {
+  }
+
+  void Negotiate() { Negotiate(true, true); }
+
+  void Negotiate(bool audio, bool video) {
+    rtc::scoped_ptr<SessionDescriptionInterface> offer;
+    ASSERT_TRUE(DoCreateOffer(offer.use()));
+
+    if (offer->description()->GetContentByName("audio")) {
+      offer->description()->GetContentByName("audio")->rejected = !audio;
+    }
+    if (offer->description()->GetContentByName("video")) {
+      offer->description()->GetContentByName("video")->rejected = !video;
+    }
+
+    std::string sdp;
+    EXPECT_TRUE(offer->ToString(&sdp));
+    EXPECT_TRUE(DoSetLocalDescription(offer.release()));
+    signaling_message_receiver_->ReceiveSdpMessage(
+        webrtc::SessionDescriptionInterface::kOffer, sdp);
+  }
+
+  // SignalingMessageReceiver callback.
+  void ReceiveSdpMessage(const std::string& type, std::string& msg) override {
+    FilterIncomingSdpMessage(&msg);
+    if (type == webrtc::SessionDescriptionInterface::kOffer) {
+      HandleIncomingOffer(msg);
+    } else {
+      HandleIncomingAnswer(msg);
+    }
+  }
+
+  // SignalingMessageReceiver callback.
+  void ReceiveIceMessage(const std::string& sdp_mid,
+                         int sdp_mline_index,
+                         const std::string& msg) override {
+    LOG(INFO) << id_ << "ReceiveIceMessage";
+    rtc::scoped_ptr<webrtc::IceCandidateInterface> candidate(
+        webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, msg, nullptr));
+    EXPECT_TRUE(pc()->AddIceCandidate(candidate.get()));
+  }
+
+  // PeerConnectionObserver callbacks.
+  void OnSignalingChange(
+      webrtc::PeerConnectionInterface::SignalingState new_state) override {
+    EXPECT_EQ(pc()->signaling_state(), new_state);
+  }
+  void OnAddStream(MediaStreamInterface* media_stream) override {
+    media_stream->RegisterObserver(this);
+    for (size_t i = 0; i < media_stream->GetVideoTracks().size(); ++i) {
+      const std::string id = media_stream->GetVideoTracks()[i]->id();
+      ASSERT_TRUE(fake_video_renderers_.find(id) ==
+                  fake_video_renderers_.end());
+      fake_video_renderers_[id].reset(new webrtc::FakeVideoTrackRenderer(
+          media_stream->GetVideoTracks()[i]));
+    }
+  }
+  void OnRemoveStream(MediaStreamInterface* media_stream) override {}
+  void OnRenegotiationNeeded() override {}
+  void OnIceConnectionChange(
+      webrtc::PeerConnectionInterface::IceConnectionState new_state) override {
+    EXPECT_EQ(pc()->ice_connection_state(), new_state);
+  }
+  void OnIceGatheringChange(
+      webrtc::PeerConnectionInterface::IceGatheringState new_state) override {
+    EXPECT_EQ(pc()->ice_gathering_state(), new_state);
+  }
+  void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+    LOG(INFO) << id_ << "OnIceCandidate";
+
+    std::string ice_sdp;
+    EXPECT_TRUE(candidate->ToString(&ice_sdp));
+    if (signaling_message_receiver_ == nullptr) {
+      // Remote party may be deleted.
+      return;
+    }
+    signaling_message_receiver_->ReceiveIceMessage(
+        candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp);
+  }
+
+  // MediaStreamInterface callback
+  void OnChanged() override {
+    // Track added or removed from MediaStream, so update our renderers.
+    rtc::scoped_refptr<StreamCollectionInterface> remote_streams =
+        pc()->remote_streams();
+    // Remove renderers for tracks that were removed.
+    for (auto it = fake_video_renderers_.begin();
+         it != fake_video_renderers_.end();) {
+      if (remote_streams->FindVideoTrack(it->first) == nullptr) {
+        auto to_remove = it++;
+        removed_fake_video_renderers_.push_back(std::move(to_remove->second));
+        fake_video_renderers_.erase(to_remove);
+      } else {
+        ++it;
+      }
+    }
+    // Create renderers for new video tracks.
+    for (size_t stream_index = 0; stream_index < remote_streams->count();
+         ++stream_index) {
+      MediaStreamInterface* remote_stream = remote_streams->at(stream_index);
+      for (size_t track_index = 0;
+           track_index < remote_stream->GetVideoTracks().size();
+           ++track_index) {
+        const std::string id =
+            remote_stream->GetVideoTracks()[track_index]->id();
+        if (fake_video_renderers_.find(id) != fake_video_renderers_.end()) {
+          continue;
+        }
+        fake_video_renderers_[id].reset(new webrtc::FakeVideoTrackRenderer(
+            remote_stream->GetVideoTracks()[track_index]));
+      }
+    }
+  }
+
+  void SetVideoConstraints(const webrtc::FakeConstraints& video_constraint) {
+    video_constraints_ = video_constraint;
+  }
+
+  void AddMediaStream(bool audio, bool video) {
+    std::string stream_label =
+        kStreamLabelBase +
+        rtc::ToString<int>(static_cast<int>(pc()->local_streams()->count()));
+    rtc::scoped_refptr<MediaStreamInterface> stream =
+        peer_connection_factory_->CreateLocalMediaStream(stream_label);
+
+    if (audio && can_receive_audio()) {
+      stream->AddTrack(CreateLocalAudioTrack(stream_label));
+    }
+    if (video && can_receive_video()) {
+      stream->AddTrack(CreateLocalVideoTrack(stream_label));
+    }
+
+    EXPECT_TRUE(pc()->AddStream(stream));
+  }
+
+  size_t NumberOfLocalMediaStreams() { return pc()->local_streams()->count(); }
+
+  bool SessionActive() {
+    return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable;
+  }
+
+  // Automatically add a stream when receiving an offer, if we don't have one.
+  // Defaults to true.
+  void set_auto_add_stream(bool auto_add_stream) {
+    auto_add_stream_ = auto_add_stream;
+  }
+
+  void set_signaling_message_receiver(
+      SignalingMessageReceiver* signaling_message_receiver) {
+    signaling_message_receiver_ = signaling_message_receiver;
+  }
+
+  void EnableVideoDecoderFactory() {
+    video_decoder_factory_enabled_ = true;
+    fake_video_decoder_factory_->AddSupportedVideoCodecType(
+        webrtc::kVideoCodecVP8);
+  }
+
+  void IceRestart() {
+    session_description_constraints_.SetMandatoryIceRestart(true);
+    SetExpectIceRestart(true);
+  }
+
+  void SetExpectIceRestart(bool expect_restart) {
+    expect_ice_restart_ = expect_restart;
+  }
+
+  bool ExpectIceRestart() const { return expect_ice_restart_; }
+
+  void SetReceiveAudioVideo(bool audio, bool video) {
+    SetReceiveAudio(audio);
+    SetReceiveVideo(video);
+    ASSERT_EQ(audio, can_receive_audio());
+    ASSERT_EQ(video, can_receive_video());
+  }
+
+  void SetReceiveAudio(bool audio) {
+    if (audio && can_receive_audio())
+      return;
+    session_description_constraints_.SetMandatoryReceiveAudio(audio);
+  }
+
+  void SetReceiveVideo(bool video) {
+    if (video && can_receive_video())
+      return;
+    session_description_constraints_.SetMandatoryReceiveVideo(video);
+  }
+
+  void RemoveMsidFromReceivedSdp(bool remove) { remove_msid_ = remove; }
+
+  void RemoveSdesCryptoFromReceivedSdp(bool remove) { remove_sdes_ = remove; }
+
+  void RemoveBundleFromReceivedSdp(bool remove) { remove_bundle_ = remove; }
+
+  bool can_receive_audio() {
+    bool value;
+    if (webrtc::FindConstraint(&session_description_constraints_,
+                               MediaConstraintsInterface::kOfferToReceiveAudio,
+                               &value, nullptr)) {
+      return value;
+    }
+    return true;
+  }
+
+  bool can_receive_video() {
+    bool value;
+    if (webrtc::FindConstraint(&session_description_constraints_,
+                               MediaConstraintsInterface::kOfferToReceiveVideo,
+                               &value, nullptr)) {
+      return value;
+    }
+    return true;
+  }
+
+  void OnDataChannel(DataChannelInterface* data_channel) override {
+    LOG(INFO) << id_ << "OnDataChannel";
+    data_channel_ = data_channel;
+    data_observer_.reset(new MockDataChannelObserver(data_channel));
+  }
+
+  void CreateDataChannel() {
+    data_channel_ = pc()->CreateDataChannel(kDataChannelLabel, nullptr);
+    ASSERT_TRUE(data_channel_.get() != nullptr);
+    data_observer_.reset(new MockDataChannelObserver(data_channel_));
+  }
+
+  rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateLocalAudioTrack(
+      const std::string& stream_label) {
+    FakeConstraints constraints;
+    // Disable highpass filter so that we can get all the test audio frames.
+    constraints.AddMandatory(MediaConstraintsInterface::kHighpassFilter, false);
+    rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+        peer_connection_factory_->CreateAudioSource(&constraints);
+    // TODO(perkj): Test audio source when it is implemented. Currently audio
+    // always use the default input.
+    std::string label = stream_label + kAudioTrackLabelBase;
+    return peer_connection_factory_->CreateAudioTrack(label, source);
+  }
+
+  rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrack(
+      const std::string& stream_label) {
+    // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+    FakeConstraints source_constraints = video_constraints_;
+    source_constraints.SetMandatoryMaxFrameRate(10);
+
+    cricket::FakeVideoCapturer* fake_capturer =
+        new webrtc::FakePeriodicVideoCapturer();
+    video_capturers_.push_back(fake_capturer);
+    rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+        peer_connection_factory_->CreateVideoSource(fake_capturer,
+                                                    &source_constraints);
+    std::string label = stream_label + kVideoTrackLabelBase;
+    return peer_connection_factory_->CreateVideoTrack(label, source);
+  }
+
+  DataChannelInterface* data_channel() { return data_channel_; }
+  const MockDataChannelObserver* data_observer() const {
+    return data_observer_.get();
+  }
+
+  webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); }
+
+  void StopVideoCapturers() {
+    for (std::vector<cricket::VideoCapturer*>::iterator it =
+             video_capturers_.begin();
+         it != video_capturers_.end(); ++it) {
+      (*it)->Stop();
+    }
+  }
+
+  bool AudioFramesReceivedCheck(int number_of_frames) const {
+    return number_of_frames <= fake_audio_capture_module_->frames_received();
+  }
+
+  int audio_frames_received() const {
+    return fake_audio_capture_module_->frames_received();
+  }
+
+  bool VideoFramesReceivedCheck(int number_of_frames) {
+    if (video_decoder_factory_enabled_) {
+      const std::vector<FakeWebRtcVideoDecoder*>& decoders
+          = fake_video_decoder_factory_->decoders();
+      if (decoders.empty()) {
+        return number_of_frames <= 0;
+      }
+
+      for (FakeWebRtcVideoDecoder* decoder : decoders) {
+        if (number_of_frames > decoder->GetNumFramesReceived()) {
+          return false;
+        }
+      }
+      return true;
+    } else {
+      if (fake_video_renderers_.empty()) {
+        return number_of_frames <= 0;
+      }
+
+      for (const auto& pair : fake_video_renderers_) {
+        if (number_of_frames > pair.second->num_rendered_frames()) {
+          return false;
+        }
+      }
+      return true;
+    }
+  }
+
+  int video_frames_received() const {
+    int total = 0;
+    if (video_decoder_factory_enabled_) {
+      const std::vector<FakeWebRtcVideoDecoder*>& decoders =
+          fake_video_decoder_factory_->decoders();
+      for (const FakeWebRtcVideoDecoder* decoder : decoders) {
+        total += decoder->GetNumFramesReceived();
+      }
+    } else {
+      for (const auto& pair : fake_video_renderers_) {
+        total += pair.second->num_rendered_frames();
+      }
+      for (const auto& renderer : removed_fake_video_renderers_) {
+        total += renderer->num_rendered_frames();
+      }
+    }
+    return total;
+  }
+
+  // Verify the CreateDtmfSender interface
+  void VerifyDtmf() {
+    rtc::scoped_ptr<DummyDtmfObserver> observer(new DummyDtmfObserver());
+    rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender;
+
+    // We can't create a DTMF sender with an invalid audio track or a non local
+    // track.
+    EXPECT_TRUE(peer_connection_->CreateDtmfSender(nullptr) == nullptr);
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> non_localtrack(
+        peer_connection_factory_->CreateAudioTrack("dummy_track", nullptr));
+    EXPECT_TRUE(peer_connection_->CreateDtmfSender(non_localtrack) == nullptr);
+
+    // We should be able to create a DTMF sender from a local track.
+    webrtc::AudioTrackInterface* localtrack =
+        peer_connection_->local_streams()->at(0)->GetAudioTracks()[0];
+    dtmf_sender = peer_connection_->CreateDtmfSender(localtrack);
+    EXPECT_TRUE(dtmf_sender.get() != nullptr);
+    dtmf_sender->RegisterObserver(observer.get());
+
+    // Test the DtmfSender object just created.
+    EXPECT_TRUE(dtmf_sender->CanInsertDtmf());
+    EXPECT_TRUE(dtmf_sender->InsertDtmf("1a", 100, 50));
+
+    // We don't need to verify that the DTMF tones are actually sent out because
+    // that is already covered by the tests of the lower level components.
+
+    EXPECT_TRUE_WAIT(observer->completed(), kMaxWaitMs);
+    std::vector<std::string> tones;
+    tones.push_back("1");
+    tones.push_back("a");
+    tones.push_back("");
+    observer->Verify(tones);
+
+    dtmf_sender->UnregisterObserver();
+  }
+
+  // Verifies that the SessionDescription have rejected the appropriate media
+  // content.
+  void VerifyRejectedMediaInSessionDescription() {
+    ASSERT_TRUE(peer_connection_->remote_description() != nullptr);
+    ASSERT_TRUE(peer_connection_->local_description() != nullptr);
+    const cricket::SessionDescription* remote_desc =
+        peer_connection_->remote_description()->description();
+    const cricket::SessionDescription* local_desc =
+        peer_connection_->local_description()->description();
+
+    const ContentInfo* remote_audio_content = GetFirstAudioContent(remote_desc);
+    if (remote_audio_content) {
+      const ContentInfo* audio_content =
+          GetFirstAudioContent(local_desc);
+      EXPECT_EQ(can_receive_audio(), !audio_content->rejected);
+    }
+
+    const ContentInfo* remote_video_content = GetFirstVideoContent(remote_desc);
+    if (remote_video_content) {
+      const ContentInfo* video_content =
+          GetFirstVideoContent(local_desc);
+      EXPECT_EQ(can_receive_video(), !video_content->rejected);
+    }
+  }
+
+  void VerifyLocalIceUfragAndPassword() {
+    ASSERT_TRUE(peer_connection_->local_description() != nullptr);
+    const cricket::SessionDescription* desc =
+        peer_connection_->local_description()->description();
+    const cricket::ContentInfos& contents = desc->contents();
+
+    for (size_t index = 0; index < contents.size(); ++index) {
+      if (contents[index].rejected)
+        continue;
+      const cricket::TransportDescription* transport_desc =
+          desc->GetTransportDescriptionByName(contents[index].name);
+
+      std::map<int, IceUfragPwdPair>::const_iterator ufragpair_it =
+          ice_ufrag_pwd_.find(static_cast<int>(index));
+      if (ufragpair_it == ice_ufrag_pwd_.end()) {
+        ASSERT_FALSE(ExpectIceRestart());
+        ice_ufrag_pwd_[static_cast<int>(index)] =
+            IceUfragPwdPair(transport_desc->ice_ufrag, transport_desc->ice_pwd);
+      } else if (ExpectIceRestart()) {
+        const IceUfragPwdPair& ufrag_pwd = ufragpair_it->second;
+        EXPECT_NE(ufrag_pwd.first, transport_desc->ice_ufrag);
+        EXPECT_NE(ufrag_pwd.second, transport_desc->ice_pwd);
+      } else {
+        const IceUfragPwdPair& ufrag_pwd = ufragpair_it->second;
+        EXPECT_EQ(ufrag_pwd.first, transport_desc->ice_ufrag);
+        EXPECT_EQ(ufrag_pwd.second, transport_desc->ice_pwd);
+      }
+    }
+  }
+
+  int GetAudioOutputLevelStats(webrtc::MediaStreamTrackInterface* track) {
+    rtc::scoped_refptr<MockStatsObserver>
+        observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    return observer->AudioOutputLevel();
+  }
+
+  int GetAudioInputLevelStats() {
+    rtc::scoped_refptr<MockStatsObserver>
+        observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    return observer->AudioInputLevel();
+  }
+
+  int GetBytesReceivedStats(webrtc::MediaStreamTrackInterface* track) {
+    rtc::scoped_refptr<MockStatsObserver>
+    observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    return observer->BytesReceived();
+  }
+
+  int GetBytesSentStats(webrtc::MediaStreamTrackInterface* track) {
+    rtc::scoped_refptr<MockStatsObserver>
+    observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    return observer->BytesSent();
+  }
+
+  int GetAvailableReceivedBandwidthStats() {
+    rtc::scoped_refptr<MockStatsObserver>
+        observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    int bw = observer->AvailableReceiveBandwidth();
+    return bw;
+  }
+
+  std::string GetDtlsCipherStats() {
+    rtc::scoped_refptr<MockStatsObserver>
+        observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    return observer->DtlsCipher();
+  }
+
+  std::string GetSrtpCipherStats() {
+    rtc::scoped_refptr<MockStatsObserver>
+        observer(new rtc::RefCountedObject<MockStatsObserver>());
+    EXPECT_TRUE(peer_connection_->GetStats(
+        observer, nullptr, PeerConnectionInterface::kStatsOutputLevelStandard));
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    EXPECT_NE(0, observer->timestamp());
+    return observer->SrtpCipher();
+  }
+
+  int rendered_width() {
+    EXPECT_FALSE(fake_video_renderers_.empty());
+    return fake_video_renderers_.empty() ? 1 :
+        fake_video_renderers_.begin()->second->width();
+  }
+
+  int rendered_height() {
+    EXPECT_FALSE(fake_video_renderers_.empty());
+    return fake_video_renderers_.empty() ? 1 :
+        fake_video_renderers_.begin()->second->height();
+  }
+
+  size_t number_of_remote_streams() {
+    if (!pc())
+      return 0;
+    return pc()->remote_streams()->count();
+  }
+
+  StreamCollectionInterface* remote_streams() {
+    if (!pc()) {
+      ADD_FAILURE();
+      return nullptr;
+    }
+    return pc()->remote_streams();
+  }
+
+  StreamCollectionInterface* local_streams() {
+    if (!pc()) {
+      ADD_FAILURE();
+      return nullptr;
+    }
+    return pc()->local_streams();
+  }
+
+  webrtc::PeerConnectionInterface::SignalingState signaling_state() {
+    return pc()->signaling_state();
+  }
+
+  webrtc::PeerConnectionInterface::IceConnectionState ice_connection_state() {
+    return pc()->ice_connection_state();
+  }
+
+  webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() {
+    return pc()->ice_gathering_state();
+  }
+
+ private:
+  class DummyDtmfObserver : public DtmfSenderObserverInterface {
+   public:
+    DummyDtmfObserver() : completed_(false) {}
+
+    // Implements DtmfSenderObserverInterface.
+    void OnToneChange(const std::string& tone) override {
+      tones_.push_back(tone);
+      if (tone.empty()) {
+        completed_ = true;
+      }
+    }
+
+    void Verify(const std::vector<std::string>& tones) const {
+      ASSERT_TRUE(tones_.size() == tones.size());
+      EXPECT_TRUE(std::equal(tones.begin(), tones.end(), tones_.begin()));
+    }
+
+    bool completed() const { return completed_; }
+
+   private:
+    bool completed_;
+    std::vector<std::string> tones_;
+  };
+
+  explicit PeerConnectionTestClient(const std::string& id) : id_(id) {}
+
+  bool Init(
+      const MediaConstraintsInterface* constraints,
+      const PeerConnectionFactory::Options* options,
+      rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
+    EXPECT_TRUE(!peer_connection_);
+    EXPECT_TRUE(!peer_connection_factory_);
+    rtc::scoped_ptr<cricket::PortAllocator> port_allocator(
+        new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+    fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+
+    if (fake_audio_capture_module_ == nullptr) {
+      return false;
+    }
+    fake_video_decoder_factory_ = new FakeWebRtcVideoDecoderFactory();
+    fake_video_encoder_factory_ = new FakeWebRtcVideoEncoderFactory();
+    peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+        rtc::Thread::Current(), rtc::Thread::Current(),
+        fake_audio_capture_module_, fake_video_encoder_factory_,
+        fake_video_decoder_factory_);
+    if (!peer_connection_factory_) {
+      return false;
+    }
+    if (options) {
+      peer_connection_factory_->SetOptions(*options);
+    }
+    peer_connection_ = CreatePeerConnection(
+        std::move(port_allocator), constraints, std::move(dtls_identity_store));
+    return peer_connection_.get() != nullptr;
+  }
+
+  rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
+      rtc::scoped_ptr<cricket::PortAllocator> port_allocator,
+      const MediaConstraintsInterface* constraints,
+      rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
+    // CreatePeerConnection with RTCConfiguration.
+    webrtc::PeerConnectionInterface::RTCConfiguration config;
+    webrtc::PeerConnectionInterface::IceServer ice_server;
+    ice_server.uri = "stun:stun.l.google.com:19302";
+    config.servers.push_back(ice_server);
+
+    return peer_connection_factory_->CreatePeerConnection(
+        config, constraints, std::move(port_allocator),
+        std::move(dtls_identity_store), this);
+  }
+
+  void HandleIncomingOffer(const std::string& msg) {
+    LOG(INFO) << id_ << "HandleIncomingOffer ";
+    if (NumberOfLocalMediaStreams() == 0 && auto_add_stream_) {
+      // If we are not sending any streams ourselves it is time to add some.
+      AddMediaStream(true, true);
+    }
+    rtc::scoped_ptr<SessionDescriptionInterface> desc(
+        webrtc::CreateSessionDescription("offer", msg, nullptr));
+    EXPECT_TRUE(DoSetRemoteDescription(desc.release()));
+    rtc::scoped_ptr<SessionDescriptionInterface> answer;
+    EXPECT_TRUE(DoCreateAnswer(answer.use()));
+    std::string sdp;
+    EXPECT_TRUE(answer->ToString(&sdp));
+    EXPECT_TRUE(DoSetLocalDescription(answer.release()));
+    if (signaling_message_receiver_) {
+      signaling_message_receiver_->ReceiveSdpMessage(
+          webrtc::SessionDescriptionInterface::kAnswer, sdp);
+    }
+  }
+
+  void HandleIncomingAnswer(const std::string& msg) {
+    LOG(INFO) << id_ << "HandleIncomingAnswer";
+    rtc::scoped_ptr<SessionDescriptionInterface> desc(
+        webrtc::CreateSessionDescription("answer", msg, nullptr));
+    EXPECT_TRUE(DoSetRemoteDescription(desc.release()));
+  }
+
+  bool DoCreateOfferAnswer(SessionDescriptionInterface** desc,
+                           bool offer) {
+    rtc::scoped_refptr<MockCreateSessionDescriptionObserver>
+        observer(new rtc::RefCountedObject<
+            MockCreateSessionDescriptionObserver>());
+    if (offer) {
+      pc()->CreateOffer(observer, &session_description_constraints_);
+    } else {
+      pc()->CreateAnswer(observer, &session_description_constraints_);
+    }
+    EXPECT_EQ_WAIT(true, observer->called(), kMaxWaitMs);
+    *desc = observer->release_desc();
+    if (observer->result() && ExpectIceRestart()) {
+      EXPECT_EQ(0u, (*desc)->candidates(0)->count());
+    }
+    return observer->result();
+  }
+
+  bool DoCreateOffer(SessionDescriptionInterface** desc) {
+    return DoCreateOfferAnswer(desc, true);
+  }
+
+  bool DoCreateAnswer(SessionDescriptionInterface** desc) {
+    return DoCreateOfferAnswer(desc, false);
+  }
+
+  bool DoSetLocalDescription(SessionDescriptionInterface* desc) {
+    rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+            observer(new rtc::RefCountedObject<
+                MockSetSessionDescriptionObserver>());
+    LOG(INFO) << id_ << "SetLocalDescription ";
+    pc()->SetLocalDescription(observer, desc);
+    // Ignore the observer result. If we wait for the result with
+    // EXPECT_TRUE_WAIT, local ice candidates might be sent to the remote peer
+    // before the offer which is an error.
+    // The reason is that EXPECT_TRUE_WAIT uses
+    // rtc::Thread::Current()->ProcessMessages(1);
+    // ProcessMessages waits at least 1ms but processes all messages before
+    // returning. Since this test is synchronous and send messages to the remote
+    // peer whenever a callback is invoked, this can lead to messages being
+    // sent to the remote peer in the wrong order.
+    // TODO(perkj): Find a way to check the result without risking that the
+    // order of sent messages are changed. Ex- by posting all messages that are
+    // sent to the remote peer.
+    return true;
+  }
+
+  bool DoSetRemoteDescription(SessionDescriptionInterface* desc) {
+    rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+        observer(new rtc::RefCountedObject<
+            MockSetSessionDescriptionObserver>());
+    LOG(INFO) << id_ << "SetRemoteDescription ";
+    pc()->SetRemoteDescription(observer, desc);
+    EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+    return observer->result();
+  }
+
+  // This modifies all received SDP messages before they are processed.
+  void FilterIncomingSdpMessage(std::string* sdp) {
+    if (remove_msid_) {
+      const char kSdpSsrcAttribute[] = "a=ssrc:";
+      RemoveLinesFromSdp(kSdpSsrcAttribute, sdp);
+      const char kSdpMsidSupportedAttribute[] = "a=msid-semantic:";
+      RemoveLinesFromSdp(kSdpMsidSupportedAttribute, sdp);
+    }
+    if (remove_bundle_) {
+      const char kSdpBundleAttribute[] = "a=group:BUNDLE";
+      RemoveLinesFromSdp(kSdpBundleAttribute, sdp);
+    }
+    if (remove_sdes_) {
+      const char kSdpSdesCryptoAttribute[] = "a=crypto";
+      RemoveLinesFromSdp(kSdpSdesCryptoAttribute, sdp);
+    }
+  }
+
+  std::string id_;
+
+  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+  rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+      peer_connection_factory_;
+
+  bool auto_add_stream_ = true;
+
+  typedef std::pair<std::string, std::string> IceUfragPwdPair;
+  std::map<int, IceUfragPwdPair> ice_ufrag_pwd_;
+  bool expect_ice_restart_ = false;
+
+  // Needed to keep track of number of frames sent.
+  rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+  // Needed to keep track of number of frames received.
+  std::map<std::string, rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer>>
+      fake_video_renderers_;
+  // Needed to ensure frames aren't received for removed tracks.
+  std::vector<rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer>>
+      removed_fake_video_renderers_;
+  // Needed to keep track of number of frames received when external decoder
+  // used.
+  FakeWebRtcVideoDecoderFactory* fake_video_decoder_factory_ = nullptr;
+  FakeWebRtcVideoEncoderFactory* fake_video_encoder_factory_ = nullptr;
+  bool video_decoder_factory_enabled_ = false;
+  webrtc::FakeConstraints video_constraints_;
+
+  // For remote peer communication.
+  SignalingMessageReceiver* signaling_message_receiver_ = nullptr;
+
+  // Store references to the video capturers we've created, so that we can stop
+  // them, if required.
+  std::vector<cricket::VideoCapturer*> video_capturers_;
+
+  webrtc::FakeConstraints session_description_constraints_;
+  bool remove_msid_ = false;  // True if MSID should be removed in received SDP.
+  bool remove_bundle_ =
+      false;  // True if bundle should be removed in received SDP.
+  bool remove_sdes_ =
+      false;  // True if a=crypto should be removed in received SDP.
+
+  rtc::scoped_refptr<DataChannelInterface> data_channel_;
+  rtc::scoped_ptr<MockDataChannelObserver> data_observer_;
+};
+
+class P2PTestConductor : public testing::Test {
+ public:
+  P2PTestConductor()
+      : pss_(new rtc::PhysicalSocketServer),
+        ss_(new rtc::VirtualSocketServer(pss_.get())),
+        ss_scope_(ss_.get()) {}
+
+  bool SessionActive() {
+    return initiating_client_->SessionActive() &&
+           receiving_client_->SessionActive();
+  }
+
+  // Return true if the number of frames provided have been received or it is
+  // known that that will never occur (e.g. no frames will be sent or
+  // captured).
+  bool FramesNotPending(int audio_frames_to_receive,
+                        int video_frames_to_receive) {
+    return VideoFramesReceivedCheck(video_frames_to_receive) &&
+        AudioFramesReceivedCheck(audio_frames_to_receive);
+  }
+  bool AudioFramesReceivedCheck(int frames_received) {
+    return initiating_client_->AudioFramesReceivedCheck(frames_received) &&
+        receiving_client_->AudioFramesReceivedCheck(frames_received);
+  }
+  bool VideoFramesReceivedCheck(int frames_received) {
+    return initiating_client_->VideoFramesReceivedCheck(frames_received) &&
+        receiving_client_->VideoFramesReceivedCheck(frames_received);
+  }
+  void VerifyDtmf() {
+    initiating_client_->VerifyDtmf();
+    receiving_client_->VerifyDtmf();
+  }
+
+  void TestUpdateOfferWithRejectedContent() {
+    // Renegotiate, rejecting the video m-line.
+    initiating_client_->Negotiate(true, false);
+    ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+
+    int pc1_audio_received = initiating_client_->audio_frames_received();
+    int pc1_video_received = initiating_client_->video_frames_received();
+    int pc2_audio_received = receiving_client_->audio_frames_received();
+    int pc2_video_received = receiving_client_->video_frames_received();
+
+    // Wait for some additional audio frames to be received.
+    EXPECT_TRUE_WAIT(initiating_client_->AudioFramesReceivedCheck(
+                         pc1_audio_received + kEndAudioFrameCount) &&
+                         receiving_client_->AudioFramesReceivedCheck(
+                             pc2_audio_received + kEndAudioFrameCount),
+                     kMaxWaitForFramesMs);
+
+    // During this time, we shouldn't have received any additional video frames
+    // for the rejected video tracks.
+    EXPECT_EQ(pc1_video_received, initiating_client_->video_frames_received());
+    EXPECT_EQ(pc2_video_received, receiving_client_->video_frames_received());
+  }
+
+  void VerifyRenderedSize(int width, int height) {
+    EXPECT_EQ(width, receiving_client()->rendered_width());
+    EXPECT_EQ(height, receiving_client()->rendered_height());
+    EXPECT_EQ(width, initializing_client()->rendered_width());
+    EXPECT_EQ(height, initializing_client()->rendered_height());
+  }
+
+  void VerifySessionDescriptions() {
+    initiating_client_->VerifyRejectedMediaInSessionDescription();
+    receiving_client_->VerifyRejectedMediaInSessionDescription();
+    initiating_client_->VerifyLocalIceUfragAndPassword();
+    receiving_client_->VerifyLocalIceUfragAndPassword();
+  }
+
+  ~P2PTestConductor() {
+    if (initiating_client_) {
+      initiating_client_->set_signaling_message_receiver(nullptr);
+    }
+    if (receiving_client_) {
+      receiving_client_->set_signaling_message_receiver(nullptr);
+    }
+  }
+
+  bool CreateTestClients() { return CreateTestClients(nullptr, nullptr); }
+
+  bool CreateTestClients(MediaConstraintsInterface* init_constraints,
+                         MediaConstraintsInterface* recv_constraints) {
+    return CreateTestClients(init_constraints, nullptr, recv_constraints,
+                             nullptr);
+  }
+
+  void SetSignalingReceivers() {
+    initiating_client_->set_signaling_message_receiver(receiving_client_.get());
+    receiving_client_->set_signaling_message_receiver(initiating_client_.get());
+  }
+
+  bool CreateTestClients(MediaConstraintsInterface* init_constraints,
+                         PeerConnectionFactory::Options* init_options,
+                         MediaConstraintsInterface* recv_constraints,
+                         PeerConnectionFactory::Options* recv_options) {
+    initiating_client_.reset(PeerConnectionTestClient::CreateClient(
+        "Caller: ", init_constraints, init_options));
+    receiving_client_.reset(PeerConnectionTestClient::CreateClient(
+        "Callee: ", recv_constraints, recv_options));
+    if (!initiating_client_ || !receiving_client_) {
+      return false;
+    }
+    SetSignalingReceivers();
+    return true;
+  }
+
+  void SetVideoConstraints(const webrtc::FakeConstraints& init_constraints,
+                           const webrtc::FakeConstraints& recv_constraints) {
+    initiating_client_->SetVideoConstraints(init_constraints);
+    receiving_client_->SetVideoConstraints(recv_constraints);
+  }
+
+  void EnableVideoDecoderFactory() {
+    initiating_client_->EnableVideoDecoderFactory();
+    receiving_client_->EnableVideoDecoderFactory();
+  }
+
+  // This test sets up a call between two parties. Both parties send static
+  // frames to each other. Once the test is finished the number of sent frames
+  // is compared to the number of received frames.
+  void LocalP2PTest() {
+    if (initiating_client_->NumberOfLocalMediaStreams() == 0) {
+      initiating_client_->AddMediaStream(true, true);
+    }
+    initiating_client_->Negotiate();
+    // Assert true is used here since next tests are guaranteed to fail and
+    // would eat up 5 seconds.
+    ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+    VerifySessionDescriptions();
+
+    int audio_frame_count = kEndAudioFrameCount;
+    // TODO(ronghuawu): Add test to cover the case of sendonly and recvonly.
+    if (!initiating_client_->can_receive_audio() ||
+        !receiving_client_->can_receive_audio()) {
+      audio_frame_count = -1;
+    }
+    int video_frame_count = kEndVideoFrameCount;
+    if (!initiating_client_->can_receive_video() ||
+        !receiving_client_->can_receive_video()) {
+      video_frame_count = -1;
+    }
+
+    if (audio_frame_count != -1 || video_frame_count != -1) {
+      // Audio or video is expected to flow, so both clients should reach the
+      // Connected state, and the offerer (ICE controller) should proceed to
+      // Completed.
+      // Note: These tests have been observed to fail under heavy load at
+      // shorter timeouts, so they may be flaky.
+      EXPECT_EQ_WAIT(
+          webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+          initiating_client_->ice_connection_state(),
+          kMaxWaitForFramesMs);
+      EXPECT_EQ_WAIT(
+          webrtc::PeerConnectionInterface::kIceConnectionConnected,
+          receiving_client_->ice_connection_state(),
+          kMaxWaitForFramesMs);
+    }
+
+    if (initiating_client_->can_receive_audio() ||
+        initiating_client_->can_receive_video()) {
+      // The initiating client can receive media, so it must produce candidates
+      // that will serve as destinations for that media.
+      // TODO(bemasc): Understand why the state is not already Complete here, as
+      // seems to be the case for the receiving client. This may indicate a bug
+      // in the ICE gathering system.
+      EXPECT_NE(webrtc::PeerConnectionInterface::kIceGatheringNew,
+                initiating_client_->ice_gathering_state());
+    }
+    if (receiving_client_->can_receive_audio() ||
+        receiving_client_->can_receive_video()) {
+      EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete,
+                     receiving_client_->ice_gathering_state(),
+                     kMaxWaitForFramesMs);
+    }
+
+    EXPECT_TRUE_WAIT(FramesNotPending(audio_frame_count, video_frame_count),
+                     kMaxWaitForFramesMs);
+  }
+
+  void SetupAndVerifyDtlsCall() {
+    MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+    FakeConstraints setup_constraints;
+    setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+                                   true);
+    ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+    LocalP2PTest();
+    VerifyRenderedSize(640, 480);
+  }
+
+  PeerConnectionTestClient* CreateDtlsClientWithAlternateKey() {
+    FakeConstraints setup_constraints;
+    setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+                                   true);
+
+    rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+        rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
+                                              : nullptr);
+    dtls_identity_store->use_alternate_key();
+
+    // Make sure the new client is using a different certificate.
+    return PeerConnectionTestClient::CreateClientWithDtlsIdentityStore(
+        "New Peer: ", &setup_constraints, nullptr,
+        std::move(dtls_identity_store));
+  }
+
+  void SendRtpData(webrtc::DataChannelInterface* dc, const std::string& data) {
+    // Messages may get lost on the unreliable DataChannel, so we send multiple
+    // times to avoid test flakiness.
+    static const size_t kSendAttempts = 5;
+
+    for (size_t i = 0; i < kSendAttempts; ++i) {
+      dc->Send(DataBuffer(data));
+    }
+  }
+
+  PeerConnectionTestClient* initializing_client() {
+    return initiating_client_.get();
+  }
+
+  // Set the |initiating_client_| to the |client| passed in and return the
+  // original |initiating_client_|.
+  PeerConnectionTestClient* set_initializing_client(
+      PeerConnectionTestClient* client) {
+    PeerConnectionTestClient* old = initiating_client_.release();
+    initiating_client_.reset(client);
+    return old;
+  }
+
+  PeerConnectionTestClient* receiving_client() {
+    return receiving_client_.get();
+  }
+
+  // Set the |receiving_client_| to the |client| passed in and return the
+  // original |receiving_client_|.
+  PeerConnectionTestClient* set_receiving_client(
+      PeerConnectionTestClient* client) {
+    PeerConnectionTestClient* old = receiving_client_.release();
+    receiving_client_.reset(client);
+    return old;
+  }
+
+ private:
+  rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
+  rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+  rtc::SocketServerScope ss_scope_;
+  rtc::scoped_ptr<PeerConnectionTestClient> initiating_client_;
+  rtc::scoped_ptr<PeerConnectionTestClient> receiving_client_;
+};
+
+// Disable for TSan v2, see
+// https://code.google.com/p/webrtc/issues/detail?id=1205 for details.
+#if !defined(THREAD_SANITIZER)
+
+// This test sets up a Jsep call between two parties and test Dtmf.
+// TODO(holmer): Disabled due to sometimes crashing on buildbots.
+// See issue webrtc/2378.
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTestDtmf) {
+  ASSERT_TRUE(CreateTestClients());
+  LocalP2PTest();
+  VerifyDtmf();
+}
+
+// This test sets up a Jsep call between two parties and test that we can get a
+// video aspect ratio of 16:9.
+TEST_F(P2PTestConductor, LocalP2PTest16To9) {
+  ASSERT_TRUE(CreateTestClients());
+  FakeConstraints constraint;
+  double requested_ratio = 640.0/360;
+  constraint.SetMandatoryMinAspectRatio(requested_ratio);
+  SetVideoConstraints(constraint, constraint);
+  LocalP2PTest();
+
+  ASSERT_LE(0, initializing_client()->rendered_height());
+  double initiating_video_ratio =
+      static_cast<double>(initializing_client()->rendered_width()) /
+      initializing_client()->rendered_height();
+  EXPECT_LE(requested_ratio, initiating_video_ratio);
+
+  ASSERT_LE(0, receiving_client()->rendered_height());
+  double receiving_video_ratio =
+      static_cast<double>(receiving_client()->rendered_width()) /
+      receiving_client()->rendered_height();
+  EXPECT_LE(requested_ratio, receiving_video_ratio);
+}
+
+// This test sets up a Jsep call between two parties and test that the
+// received video has a resolution of 1280*720.
+// TODO(mallinath): Enable when
+// http://code.google.com/p/webrtc/issues/detail?id=981 is fixed.
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTest1280By720) {
+  ASSERT_TRUE(CreateTestClients());
+  FakeConstraints constraint;
+  constraint.SetMandatoryMinWidth(1280);
+  constraint.SetMandatoryMinHeight(720);
+  SetVideoConstraints(constraint, constraint);
+  LocalP2PTest();
+  VerifyRenderedSize(1280, 720);
+}
+
+// This test sets up a call between two endpoints that are configured to use
+// DTLS key agreement. As a result, DTLS is negotiated and used for transport.
+TEST_F(P2PTestConductor, LocalP2PTestDtls) {
+  SetupAndVerifyDtlsCall();
+}
+
+// This test sets up a audio call initially and then upgrades to audio/video,
+// using DTLS.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsRenegotiate) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  FakeConstraints setup_constraints;
+  setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+                                 true);
+  ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+  receiving_client()->SetReceiveAudioVideo(true, false);
+  LocalP2PTest();
+  receiving_client()->SetReceiveAudioVideo(true, true);
+  receiving_client()->Negotiate();
+}
+
+// This test sets up a call transfer to a new caller with a different DTLS
+// fingerprint.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsTransferCallee) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  SetupAndVerifyDtlsCall();
+
+  // Keeping the original peer around which will still send packets to the
+  // receiving client. These SRTP packets will be dropped.
+  rtc::scoped_ptr<PeerConnectionTestClient> original_peer(
+      set_initializing_client(CreateDtlsClientWithAlternateKey()));
+  original_peer->pc()->Close();
+
+  SetSignalingReceivers();
+  receiving_client()->SetExpectIceRestart(true);
+  LocalP2PTest();
+  VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a non-bundle call and apply bundle during ICE restart. When
+// bundle is in effect in the restart, the channel can successfully reset its
+// DTLS-SRTP context.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsBundleInIceRestart) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  FakeConstraints setup_constraints;
+  setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+                                 true);
+  ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+  receiving_client()->RemoveBundleFromReceivedSdp(true);
+  LocalP2PTest();
+  VerifyRenderedSize(640, 480);
+
+  initializing_client()->IceRestart();
+  receiving_client()->SetExpectIceRestart(true);
+  receiving_client()->RemoveBundleFromReceivedSdp(false);
+  LocalP2PTest();
+  VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a call transfer to a new callee with a different DTLS
+// fingerprint.
+TEST_F(P2PTestConductor, LocalP2PTestDtlsTransferCaller) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  SetupAndVerifyDtlsCall();
+
+  // Keeping the original peer around which will still send packets to the
+  // receiving client. These SRTP packets will be dropped.
+  rtc::scoped_ptr<PeerConnectionTestClient> original_peer(
+      set_receiving_client(CreateDtlsClientWithAlternateKey()));
+  original_peer->pc()->Close();
+
+  SetSignalingReceivers();
+  initializing_client()->IceRestart();
+  LocalP2PTest();
+  VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a call between two endpoints that are configured to use
+// DTLS key agreement. The offerer don't support SDES. As a result, DTLS is
+// negotiated and used for transport.
+TEST_F(P2PTestConductor, LocalP2PTestOfferDtlsButNotSdes) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  FakeConstraints setup_constraints;
+  setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+                                 true);
+  ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+  receiving_client()->RemoveSdesCryptoFromReceivedSdp(true);
+  LocalP2PTest();
+  VerifyRenderedSize(640, 480);
+}
+
+// This test sets up a Jsep call between two parties, and the callee only
+// accept to receive video.
+TEST_F(P2PTestConductor, LocalP2PTestAnswerVideo) {
+  ASSERT_TRUE(CreateTestClients());
+  receiving_client()->SetReceiveAudioVideo(false, true);
+  LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties, and the callee only
+// accept to receive audio.
+TEST_F(P2PTestConductor, LocalP2PTestAnswerAudio) {
+  ASSERT_TRUE(CreateTestClients());
+  receiving_client()->SetReceiveAudioVideo(true, false);
+  LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties, and the callee reject both
+// audio and video.
+TEST_F(P2PTestConductor, LocalP2PTestAnswerNone) {
+  ASSERT_TRUE(CreateTestClients());
+  receiving_client()->SetReceiveAudioVideo(false, false);
+  LocalP2PTest();
+}
+
+// This test sets up an audio and video call between two parties. After the call
+// runs for a while (10 frames), the caller sends an update offer with video
+// being rejected. Once the re-negotiation is done, the video flow should stop
+// and the audio flow should continue.
+TEST_F(P2PTestConductor, UpdateOfferWithRejectedContent) {
+  ASSERT_TRUE(CreateTestClients());
+  LocalP2PTest();
+  TestUpdateOfferWithRejectedContent();
+}
+
+// This test sets up a Jsep call between two parties. The MSID is removed from
+// the SDP strings from the caller.
+TEST_F(P2PTestConductor, LocalP2PTestWithoutMsid) {
+  ASSERT_TRUE(CreateTestClients());
+  receiving_client()->RemoveMsidFromReceivedSdp(true);
+  // TODO(perkj): Currently there is a bug that cause audio to stop playing if
+  // audio and video is muxed when MSID is disabled. Remove
+  // SetRemoveBundleFromSdp once
+  // https://code.google.com/p/webrtc/issues/detail?id=1193 is fixed.
+  receiving_client()->RemoveBundleFromReceivedSdp(true);
+  LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties and the initiating peer
+// sends two steams.
+// TODO(perkj): Disabled due to
+// https://code.google.com/p/webrtc/issues/detail?id=1454
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTestTwoStreams) {
+  ASSERT_TRUE(CreateTestClients());
+  // Set optional video constraint to max 320pixels to decrease CPU usage.
+  FakeConstraints constraint;
+  constraint.SetOptionalMaxWidth(320);
+  SetVideoConstraints(constraint, constraint);
+  initializing_client()->AddMediaStream(true, true);
+  initializing_client()->AddMediaStream(false, true);
+  ASSERT_EQ(2u, initializing_client()->NumberOfLocalMediaStreams());
+  LocalP2PTest();
+  EXPECT_EQ(2u, receiving_client()->number_of_remote_streams());
+}
+
+// Test that we can receive the audio output level from a remote audio track.
+TEST_F(P2PTestConductor, GetAudioOutputLevelStats) {
+  ASSERT_TRUE(CreateTestClients());
+  LocalP2PTest();
+
+  StreamCollectionInterface* remote_streams =
+      initializing_client()->remote_streams();
+  ASSERT_GT(remote_streams->count(), 0u);
+  ASSERT_GT(remote_streams->at(0)->GetAudioTracks().size(), 0u);
+  MediaStreamTrackInterface* remote_audio_track =
+      remote_streams->at(0)->GetAudioTracks()[0];
+
+  // Get the audio output level stats. Note that the level is not available
+  // until a RTCP packet has been received.
+  EXPECT_TRUE_WAIT(
+      initializing_client()->GetAudioOutputLevelStats(remote_audio_track) > 0,
+      kMaxWaitForStatsMs);
+}
+
+// Test that an audio input level is reported.
+TEST_F(P2PTestConductor, GetAudioInputLevelStats) {
+  ASSERT_TRUE(CreateTestClients());
+  LocalP2PTest();
+
+  // Get the audio input level stats.  The level should be available very
+  // soon after the test starts.
+  EXPECT_TRUE_WAIT(initializing_client()->GetAudioInputLevelStats() > 0,
+      kMaxWaitForStatsMs);
+}
+
+// Test that we can get incoming byte counts from both audio and video tracks.
+TEST_F(P2PTestConductor, GetBytesReceivedStats) {
+  ASSERT_TRUE(CreateTestClients());
+  LocalP2PTest();
+
+  StreamCollectionInterface* remote_streams =
+      initializing_client()->remote_streams();
+  ASSERT_GT(remote_streams->count(), 0u);
+  ASSERT_GT(remote_streams->at(0)->GetAudioTracks().size(), 0u);
+  MediaStreamTrackInterface* remote_audio_track =
+      remote_streams->at(0)->GetAudioTracks()[0];
+  EXPECT_TRUE_WAIT(
+      initializing_client()->GetBytesReceivedStats(remote_audio_track) > 0,
+      kMaxWaitForStatsMs);
+
+  MediaStreamTrackInterface* remote_video_track =
+      remote_streams->at(0)->GetVideoTracks()[0];
+  EXPECT_TRUE_WAIT(
+      initializing_client()->GetBytesReceivedStats(remote_video_track) > 0,
+      kMaxWaitForStatsMs);
+}
+
+// Test that we can get outgoing byte counts from both audio and video tracks.
+TEST_F(P2PTestConductor, GetBytesSentStats) {
+  ASSERT_TRUE(CreateTestClients());
+  LocalP2PTest();
+
+  StreamCollectionInterface* local_streams =
+      initializing_client()->local_streams();
+  ASSERT_GT(local_streams->count(), 0u);
+  ASSERT_GT(local_streams->at(0)->GetAudioTracks().size(), 0u);
+  MediaStreamTrackInterface* local_audio_track =
+      local_streams->at(0)->GetAudioTracks()[0];
+  EXPECT_TRUE_WAIT(
+      initializing_client()->GetBytesSentStats(local_audio_track) > 0,
+      kMaxWaitForStatsMs);
+
+  MediaStreamTrackInterface* local_video_track =
+      local_streams->at(0)->GetVideoTracks()[0];
+  EXPECT_TRUE_WAIT(
+      initializing_client()->GetBytesSentStats(local_video_track) > 0,
+      kMaxWaitForStatsMs);
+}
+
+// Test that DTLS 1.0 is used if both sides only support DTLS 1.0.
+TEST_F(P2PTestConductor, GetDtls12None) {
+  PeerConnectionFactory::Options init_options;
+  init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+  PeerConnectionFactory::Options recv_options;
+  recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+  ASSERT_TRUE(
+      CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+  rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+      init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+  initializing_client()->pc()->RegisterUMAObserver(init_observer);
+  LocalP2PTest();
+
+  EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
+                     rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                         rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
+                 initializing_client()->GetDtlsCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1, init_observer->GetEnumCounter(
+                   webrtc::kEnumCounterAudioSslCipher,
+                   rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                       rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
+
+  EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
+                 initializing_client()->GetSrtpCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1,
+            init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+                                          kDefaultSrtpCryptoSuite));
+}
+
+// Test that DTLS 1.2 is used if both ends support it.
+TEST_F(P2PTestConductor, GetDtls12Both) {
+  PeerConnectionFactory::Options init_options;
+  init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+  PeerConnectionFactory::Options recv_options;
+  recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+  ASSERT_TRUE(
+      CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+  rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+      init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+  initializing_client()->pc()->RegisterUMAObserver(init_observer);
+  LocalP2PTest();
+
+  EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
+                     rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                         rtc::SSL_PROTOCOL_DTLS_12, rtc::KT_DEFAULT)),
+                 initializing_client()->GetDtlsCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1, init_observer->GetEnumCounter(
+                   webrtc::kEnumCounterAudioSslCipher,
+                   rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                       rtc::SSL_PROTOCOL_DTLS_12, rtc::KT_DEFAULT)));
+
+  EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
+                 initializing_client()->GetSrtpCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1,
+            init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+                                          kDefaultSrtpCryptoSuite));
+}
+
+// Test that DTLS 1.0 is used if the initator supports DTLS 1.2 and the
+// received supports 1.0.
+TEST_F(P2PTestConductor, GetDtls12Init) {
+  PeerConnectionFactory::Options init_options;
+  init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+  PeerConnectionFactory::Options recv_options;
+  recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+  ASSERT_TRUE(
+      CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+  rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+      init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+  initializing_client()->pc()->RegisterUMAObserver(init_observer);
+  LocalP2PTest();
+
+  EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
+                     rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                         rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
+                 initializing_client()->GetDtlsCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1, init_observer->GetEnumCounter(
+                   webrtc::kEnumCounterAudioSslCipher,
+                   rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                       rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
+
+  EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
+                 initializing_client()->GetSrtpCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1,
+            init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+                                          kDefaultSrtpCryptoSuite));
+}
+
+// Test that DTLS 1.0 is used if the initator supports DTLS 1.0 and the
+// received supports 1.2.
+TEST_F(P2PTestConductor, GetDtls12Recv) {
+  PeerConnectionFactory::Options init_options;
+  init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+  PeerConnectionFactory::Options recv_options;
+  recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+  ASSERT_TRUE(
+      CreateTestClients(nullptr, &init_options, nullptr, &recv_options));
+  rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+      init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+  initializing_client()->pc()->RegisterUMAObserver(init_observer);
+  LocalP2PTest();
+
+  EXPECT_EQ_WAIT(rtc::SSLStreamAdapter::SslCipherSuiteToName(
+                     rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                         rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)),
+                 initializing_client()->GetDtlsCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1, init_observer->GetEnumCounter(
+                   webrtc::kEnumCounterAudioSslCipher,
+                   rtc::SSLStreamAdapter::GetDefaultSslCipherForTest(
+                       rtc::SSL_PROTOCOL_DTLS_10, rtc::KT_DEFAULT)));
+
+  EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite),
+                 initializing_client()->GetSrtpCipherStats(),
+                 kMaxWaitForStatsMs);
+  EXPECT_EQ(1,
+            init_observer->GetEnumCounter(webrtc::kEnumCounterAudioSrtpCipher,
+                                          kDefaultSrtpCryptoSuite));
+}
+
+// This test sets up a call between two parties with audio, video and an RTP
+// data channel.
+TEST_F(P2PTestConductor, LocalP2PTestRtpDataChannel) {
+  FakeConstraints setup_constraints;
+  setup_constraints.SetAllowRtpDataChannels();
+  ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+  initializing_client()->CreateDataChannel();
+  LocalP2PTest();
+  ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+  ASSERT_TRUE(receiving_client()->data_channel() != nullptr);
+  EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+  EXPECT_TRUE_WAIT(receiving_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+
+  std::string data = "hello world";
+
+  SendRtpData(initializing_client()->data_channel(), data);
+  EXPECT_EQ_WAIT(data, receiving_client()->data_observer()->last_message(),
+                 kMaxWaitMs);
+
+  SendRtpData(receiving_client()->data_channel(), data);
+  EXPECT_EQ_WAIT(data, initializing_client()->data_observer()->last_message(),
+                 kMaxWaitMs);
+
+  receiving_client()->data_channel()->Close();
+  // Send new offer and answer.
+  receiving_client()->Negotiate();
+  EXPECT_FALSE(initializing_client()->data_observer()->IsOpen());
+  EXPECT_FALSE(receiving_client()->data_observer()->IsOpen());
+}
+
+// This test sets up a call between two parties with audio, video and an SCTP
+// data channel.
+TEST_F(P2PTestConductor, LocalP2PTestSctpDataChannel) {
+  ASSERT_TRUE(CreateTestClients());
+  initializing_client()->CreateDataChannel();
+  LocalP2PTest();
+  ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+  EXPECT_TRUE_WAIT(receiving_client()->data_channel() != nullptr, kMaxWaitMs);
+  EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+  EXPECT_TRUE_WAIT(receiving_client()->data_observer()->IsOpen(), kMaxWaitMs);
+
+  std::string data = "hello world";
+
+  initializing_client()->data_channel()->Send(DataBuffer(data));
+  EXPECT_EQ_WAIT(data, receiving_client()->data_observer()->last_message(),
+                 kMaxWaitMs);
+
+  receiving_client()->data_channel()->Send(DataBuffer(data));
+  EXPECT_EQ_WAIT(data, initializing_client()->data_observer()->last_message(),
+                 kMaxWaitMs);
+
+  receiving_client()->data_channel()->Close();
+  EXPECT_TRUE_WAIT(!initializing_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+  EXPECT_TRUE_WAIT(!receiving_client()->data_observer()->IsOpen(), kMaxWaitMs);
+}
+
+// This test sets up a call between two parties and creates a data channel.
+// The test tests that received data is buffered unless an observer has been
+// registered.
+// Rtp data channels can receive data before the underlying
+// transport has detected that a channel is writable and thus data can be
+// received before the data channel state changes to open. That is hard to test
+// but the same buffering is used in that case.
+TEST_F(P2PTestConductor, RegisterDataChannelObserver) {
+  FakeConstraints setup_constraints;
+  setup_constraints.SetAllowRtpDataChannels();
+  ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+  initializing_client()->CreateDataChannel();
+  initializing_client()->Negotiate();
+
+  ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+  ASSERT_TRUE(receiving_client()->data_channel() != nullptr);
+  EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+  EXPECT_EQ_WAIT(DataChannelInterface::kOpen,
+                 receiving_client()->data_channel()->state(), kMaxWaitMs);
+
+  // Unregister the existing observer.
+  receiving_client()->data_channel()->UnregisterObserver();
+
+  std::string data = "hello world";
+  SendRtpData(initializing_client()->data_channel(), data);
+
+  // Wait a while to allow the sent data to arrive before an observer is
+  // registered..
+  rtc::Thread::Current()->ProcessMessages(100);
+
+  MockDataChannelObserver new_observer(receiving_client()->data_channel());
+  EXPECT_EQ_WAIT(data, new_observer.last_message(), kMaxWaitMs);
+}
+
+// This test sets up a call between two parties with audio, video and but only
+// the initiating client support data.
+TEST_F(P2PTestConductor, LocalP2PTestReceiverDoesntSupportData) {
+  FakeConstraints setup_constraints_1;
+  setup_constraints_1.SetAllowRtpDataChannels();
+  // Must disable DTLS to make negotiation succeed.
+  setup_constraints_1.SetMandatory(
+      MediaConstraintsInterface::kEnableDtlsSrtp, false);
+  FakeConstraints setup_constraints_2;
+  setup_constraints_2.SetMandatory(
+      MediaConstraintsInterface::kEnableDtlsSrtp, false);
+  ASSERT_TRUE(CreateTestClients(&setup_constraints_1, &setup_constraints_2));
+  initializing_client()->CreateDataChannel();
+  LocalP2PTest();
+  EXPECT_TRUE(initializing_client()->data_channel() != nullptr);
+  EXPECT_FALSE(receiving_client()->data_channel());
+  EXPECT_FALSE(initializing_client()->data_observer()->IsOpen());
+}
+
+// This test sets up a call between two parties with audio, video. When audio
+// and video is setup and flowing and data channel is negotiated.
+TEST_F(P2PTestConductor, AddDataChannelAfterRenegotiation) {
+  FakeConstraints setup_constraints;
+  setup_constraints.SetAllowRtpDataChannels();
+  ASSERT_TRUE(CreateTestClients(&setup_constraints, &setup_constraints));
+  LocalP2PTest();
+  initializing_client()->CreateDataChannel();
+  // Send new offer and answer.
+  initializing_client()->Negotiate();
+  ASSERT_TRUE(initializing_client()->data_channel() != nullptr);
+  ASSERT_TRUE(receiving_client()->data_channel() != nullptr);
+  EXPECT_TRUE_WAIT(initializing_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+  EXPECT_TRUE_WAIT(receiving_client()->data_observer()->IsOpen(),
+                   kMaxWaitMs);
+}
+
+// This test sets up a Jsep call with SCTP DataChannel and verifies the
+// negotiation is completed without error.
+#ifdef HAVE_SCTP
+TEST_F(P2PTestConductor, CreateOfferWithSctpDataChannel) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  FakeConstraints constraints;
+  constraints.SetMandatory(
+      MediaConstraintsInterface::kEnableDtlsSrtp, true);
+  ASSERT_TRUE(CreateTestClients(&constraints, &constraints));
+  initializing_client()->CreateDataChannel();
+  initializing_client()->Negotiate(false, false);
+}
+#endif
+
+// This test sets up a call between two parties with audio, and video.
+// During the call, the initializing side restart ice and the test verifies that
+// new ice candidates are generated and audio and video still can flow.
+TEST_F(P2PTestConductor, IceRestart) {
+  ASSERT_TRUE(CreateTestClients());
+
+  // Negotiate and wait for ice completion and make sure audio and video plays.
+  LocalP2PTest();
+
+  // Create a SDP string of the first audio candidate for both clients.
+  const webrtc::IceCandidateCollection* audio_candidates_initiator =
+      initializing_client()->pc()->local_description()->candidates(0);
+  const webrtc::IceCandidateCollection* audio_candidates_receiver =
+      receiving_client()->pc()->local_description()->candidates(0);
+  ASSERT_GT(audio_candidates_initiator->count(), 0u);
+  ASSERT_GT(audio_candidates_receiver->count(), 0u);
+  std::string initiator_candidate;
+  EXPECT_TRUE(
+      audio_candidates_initiator->at(0)->ToString(&initiator_candidate));
+  std::string receiver_candidate;
+  EXPECT_TRUE(audio_candidates_receiver->at(0)->ToString(&receiver_candidate));
+
+  // Restart ice on the initializing client.
+  receiving_client()->SetExpectIceRestart(true);
+  initializing_client()->IceRestart();
+
+  // Negotiate and wait for ice completion again and make sure audio and video
+  // plays.
+  LocalP2PTest();
+
+  // Create a SDP string of the first audio candidate for both clients again.
+  const webrtc::IceCandidateCollection* audio_candidates_initiator_restart =
+      initializing_client()->pc()->local_description()->candidates(0);
+  const webrtc::IceCandidateCollection* audio_candidates_reciever_restart =
+      receiving_client()->pc()->local_description()->candidates(0);
+  ASSERT_GT(audio_candidates_initiator_restart->count(), 0u);
+  ASSERT_GT(audio_candidates_reciever_restart->count(), 0u);
+  std::string initiator_candidate_restart;
+  EXPECT_TRUE(audio_candidates_initiator_restart->at(0)->ToString(
+      &initiator_candidate_restart));
+  std::string receiver_candidate_restart;
+  EXPECT_TRUE(audio_candidates_reciever_restart->at(0)->ToString(
+      &receiver_candidate_restart));
+
+  // Verify that the first candidates in the local session descriptions has
+  // changed.
+  EXPECT_NE(initiator_candidate, initiator_candidate_restart);
+  EXPECT_NE(receiver_candidate, receiver_candidate_restart);
+}
+
+// This test sets up a call between two parties with audio, and video.
+// It then renegotiates setting the video m-line to "port 0", then later
+// renegotiates again, enabling video.
+TEST_F(P2PTestConductor, LocalP2PTestVideoDisableEnable) {
+  ASSERT_TRUE(CreateTestClients());
+
+  // Do initial negotiation. Will result in video and audio sendonly m-lines.
+  receiving_client()->set_auto_add_stream(false);
+  initializing_client()->AddMediaStream(true, true);
+  initializing_client()->Negotiate();
+
+  // Negotiate again, disabling the video m-line (receiving client will
+  // set port to 0 due to mandatory "OfferToReceiveVideo: false" constraint).
+  receiving_client()->SetReceiveVideo(false);
+  initializing_client()->Negotiate();
+
+  // Enable video and do negotiation again, making sure video is received
+  // end-to-end.
+  receiving_client()->SetReceiveVideo(true);
+  receiving_client()->AddMediaStream(true, true);
+  LocalP2PTest();
+}
+
+// This test sets up a Jsep call between two parties with external
+// VideoDecoderFactory.
+// TODO(holmer): Disabled due to sometimes crashing on buildbots.
+// See issue webrtc/2378.
+TEST_F(P2PTestConductor, DISABLED_LocalP2PTestWithVideoDecoderFactory) {
+  ASSERT_TRUE(CreateTestClients());
+  EnableVideoDecoderFactory();
+  LocalP2PTest();
+}
+
+// This tests that if we negotiate after calling CreateSender but before we
+// have a track, then set a track later, frames from the newly-set track are
+// received end-to-end.
+TEST_F(P2PTestConductor, EarlyWarmupTest) {
+  ASSERT_TRUE(CreateTestClients());
+  auto audio_sender =
+      initializing_client()->pc()->CreateSender("audio", "stream_id");
+  auto video_sender =
+      initializing_client()->pc()->CreateSender("video", "stream_id");
+  initializing_client()->Negotiate();
+  // Wait for ICE connection to complete, without any tracks.
+  // Note that the receiving client WILL (in HandleIncomingOffer) create
+  // tracks, so it's only the initiator here that's doing early warmup.
+  ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+  VerifySessionDescriptions();
+  EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+                 initializing_client()->ice_connection_state(),
+                 kMaxWaitForFramesMs);
+  EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+                 receiving_client()->ice_connection_state(),
+                 kMaxWaitForFramesMs);
+  // Now set the tracks, and expect frames to immediately start flowing.
+  EXPECT_TRUE(
+      audio_sender->SetTrack(initializing_client()->CreateLocalAudioTrack("")));
+  EXPECT_TRUE(
+      video_sender->SetTrack(initializing_client()->CreateLocalVideoTrack("")));
+  EXPECT_TRUE_WAIT(FramesNotPending(kEndAudioFrameCount, kEndVideoFrameCount),
+                   kMaxWaitForFramesMs);
+}
+
+class IceServerParsingTest : public testing::Test {
+ public:
+  // Convenience for parsing a single URL.
+  bool ParseUrl(const std::string& url) {
+    return ParseUrl(url, std::string(), std::string());
+  }
+
+  bool ParseUrl(const std::string& url,
+                const std::string& username,
+                const std::string& password) {
+    PeerConnectionInterface::IceServers servers;
+    PeerConnectionInterface::IceServer server;
+    server.urls.push_back(url);
+    server.username = username;
+    server.password = password;
+    servers.push_back(server);
+    return webrtc::ParseIceServers(servers, &stun_servers_, &turn_servers_);
+  }
+
+ protected:
+  cricket::ServerAddresses stun_servers_;
+  std::vector<cricket::RelayServerConfig> turn_servers_;
+};
+
+// Make sure all STUN/TURN prefixes are parsed correctly.
+TEST_F(IceServerParsingTest, ParseStunPrefixes) {
+  EXPECT_TRUE(ParseUrl("stun:hostname"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ(0U, turn_servers_.size());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("stuns:hostname"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ(0U, turn_servers_.size());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("turn:hostname"));
+  EXPECT_EQ(0U, stun_servers_.size());
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_FALSE(turn_servers_[0].ports[0].secure);
+  turn_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("turns:hostname"));
+  EXPECT_EQ(0U, stun_servers_.size());
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_TRUE(turn_servers_[0].ports[0].secure);
+  turn_servers_.clear();
+
+  // invalid prefixes
+  EXPECT_FALSE(ParseUrl("stunn:hostname"));
+  EXPECT_FALSE(ParseUrl(":hostname"));
+  EXPECT_FALSE(ParseUrl(":"));
+  EXPECT_FALSE(ParseUrl(""));
+}
+
+TEST_F(IceServerParsingTest, VerifyDefaults) {
+  // TURNS defaults
+  EXPECT_TRUE(ParseUrl("turns:hostname"));
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_EQ(5349, turn_servers_[0].ports[0].address.port());
+  EXPECT_EQ(cricket::PROTO_TCP, turn_servers_[0].ports[0].proto);
+  turn_servers_.clear();
+
+  // TURN defaults
+  EXPECT_TRUE(ParseUrl("turn:hostname"));
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_EQ(3478, turn_servers_[0].ports[0].address.port());
+  EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+  turn_servers_.clear();
+
+  // STUN defaults
+  EXPECT_TRUE(ParseUrl("stun:hostname"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ(3478, stun_servers_.begin()->port());
+  stun_servers_.clear();
+}
+
+// Check that the 6 combinations of IPv4/IPv6/hostname and with/without port
+// can be parsed correctly.
+TEST_F(IceServerParsingTest, ParseHostnameAndPort) {
+  EXPECT_TRUE(ParseUrl("stun:1.2.3.4:1234"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ("1.2.3.4", stun_servers_.begin()->hostname());
+  EXPECT_EQ(1234, stun_servers_.begin()->port());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]:4321"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ("1:2:3:4:5:6:7:8", stun_servers_.begin()->hostname());
+  EXPECT_EQ(4321, stun_servers_.begin()->port());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("stun:hostname:9999"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ("hostname", stun_servers_.begin()->hostname());
+  EXPECT_EQ(9999, stun_servers_.begin()->port());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("stun:1.2.3.4"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ("1.2.3.4", stun_servers_.begin()->hostname());
+  EXPECT_EQ(3478, stun_servers_.begin()->port());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("stun:[1:2:3:4:5:6:7:8]"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ("1:2:3:4:5:6:7:8", stun_servers_.begin()->hostname());
+  EXPECT_EQ(3478, stun_servers_.begin()->port());
+  stun_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("stun:hostname"));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ("hostname", stun_servers_.begin()->hostname());
+  EXPECT_EQ(3478, stun_servers_.begin()->port());
+  stun_servers_.clear();
+
+  // Try some invalid hostname:port strings.
+  EXPECT_FALSE(ParseUrl("stun:hostname:99a99"));
+  EXPECT_FALSE(ParseUrl("stun:hostname:-1"));
+  EXPECT_FALSE(ParseUrl("stun:hostname:port:more"));
+  EXPECT_FALSE(ParseUrl("stun:hostname:port more"));
+  EXPECT_FALSE(ParseUrl("stun:hostname:"));
+  EXPECT_FALSE(ParseUrl("stun:[1:2:3:4:5:6:7:8]junk:1000"));
+  EXPECT_FALSE(ParseUrl("stun::5555"));
+  EXPECT_FALSE(ParseUrl("stun:"));
+}
+
+// Test parsing the "?transport=xxx" part of the URL.
+TEST_F(IceServerParsingTest, ParseTransport) {
+  EXPECT_TRUE(ParseUrl("turn:hostname:1234?transport=tcp"));
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_EQ(cricket::PROTO_TCP, turn_servers_[0].ports[0].proto);
+  turn_servers_.clear();
+
+  EXPECT_TRUE(ParseUrl("turn:hostname?transport=udp"));
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto);
+  turn_servers_.clear();
+
+  EXPECT_FALSE(ParseUrl("turn:hostname?transport=invalid"));
+}
+
+// Test parsing ICE username contained in URL.
+TEST_F(IceServerParsingTest, ParseUsername) {
+  EXPECT_TRUE(ParseUrl("turn:user@hostname"));
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_EQ("user", turn_servers_[0].credentials.username);
+  turn_servers_.clear();
+
+  EXPECT_FALSE(ParseUrl("turn:@hostname"));
+  EXPECT_FALSE(ParseUrl("turn:username@"));
+  EXPECT_FALSE(ParseUrl("turn:@"));
+  EXPECT_FALSE(ParseUrl("turn:user@name@hostname"));
+}
+
+// Test that username and password from IceServer is copied into the resulting
+// RelayServerConfig.
+TEST_F(IceServerParsingTest, CopyUsernameAndPasswordFromIceServer) {
+  EXPECT_TRUE(ParseUrl("turn:hostname", "username", "password"));
+  EXPECT_EQ(1U, turn_servers_.size());
+  EXPECT_EQ("username", turn_servers_[0].credentials.username);
+  EXPECT_EQ("password", turn_servers_[0].credentials.password);
+}
+
+// Ensure that if a server has multiple URLs, each one is parsed.
+TEST_F(IceServerParsingTest, ParseMultipleUrls) {
+  PeerConnectionInterface::IceServers servers;
+  PeerConnectionInterface::IceServer server;
+  server.urls.push_back("stun:hostname");
+  server.urls.push_back("turn:hostname");
+  servers.push_back(server);
+  EXPECT_TRUE(webrtc::ParseIceServers(servers, &stun_servers_, &turn_servers_));
+  EXPECT_EQ(1U, stun_servers_.size());
+  EXPECT_EQ(1U, turn_servers_.size());
+}
+
+// Ensure that TURN servers are given unique priorities,
+// so that their resulting candidates have unique priorities.
+TEST_F(IceServerParsingTest, TurnServerPrioritiesUnique) {
+  PeerConnectionInterface::IceServers servers;
+  PeerConnectionInterface::IceServer server;
+  server.urls.push_back("turn:hostname");
+  server.urls.push_back("turn:hostname2");
+  servers.push_back(server);
+  EXPECT_TRUE(webrtc::ParseIceServers(servers, &stun_servers_, &turn_servers_));
+  EXPECT_EQ(2U, turn_servers_.size());
+  EXPECT_NE(turn_servers_[0].priority, turn_servers_[1].priority);
+}
+
+#endif // if !defined(THREAD_SANITIZER)
diff --git a/webrtc/api/peerconnection_unittests.isolate b/webrtc/api/peerconnection_unittests.isolate
new file mode 100644
index 0000000..bc7db90
--- /dev/null
+++ b/webrtc/api/peerconnection_unittests.isolate
@@ -0,0 +1,40 @@
+#
+# libjingle
+# Copyright 2013 Google Inc.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#  1. Redistributions of source code must retain the above copyright notice,
+#     this list of conditions and the following disclaimer.
+#  2. Redistributions in binary form must reproduce the above copyright notice,
+#     this list of conditions and the following disclaimer in the documentation
+#     and/or other materials provided with the distribution.
+#  3. The name of the author may not be used to endorse or promote products
+#     derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+  'conditions': [
+    ['OS=="linux" or OS=="mac" or OS=="win"', {
+      'variables': {
+        'command': [
+          '<(PRODUCT_DIR)/peerconnection_unittests<(EXECUTABLE_SUFFIX)',
+        ],
+        'files': [
+          '<(PRODUCT_DIR)/peerconnection_unittests<(EXECUTABLE_SUFFIX)',
+        ],
+      },
+    }],
+  ],
+}
diff --git a/webrtc/api/peerconnectionendtoend_unittest.cc b/webrtc/api/peerconnectionendtoend_unittest.cc
new file mode 100644
index 0000000..adcfe57
--- /dev/null
+++ b/webrtc/api/peerconnectionendtoend_unittest.cc
@@ -0,0 +1,387 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/test/peerconnectiontestwrapper.h"
+// Notice that mockpeerconnectionobservers.h must be included after the above!
+#include "webrtc/api/test/mockpeerconnectionobservers.h"
+#ifdef WEBRTC_ANDROID
+#include "webrtc/api/test/androidtestinitializer.h"
+#endif
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+
+#define MAYBE_SKIP_TEST(feature)                    \
+  if (!(feature())) {                               \
+    LOG(LS_INFO) << "Feature disabled... skipping"; \
+    return;                                         \
+  }
+
+using webrtc::DataChannelInterface;
+using webrtc::FakeConstraints;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionInterface;
+
+namespace {
+
+const size_t kMaxWait = 10000;
+
+}  // namespace
+
+class PeerConnectionEndToEndTest
+    : public sigslot::has_slots<>,
+      public testing::Test {
+ public:
+  typedef std::vector<rtc::scoped_refptr<DataChannelInterface> >
+      DataChannelList;
+
+  PeerConnectionEndToEndTest()
+      : caller_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+                    "caller")),
+        callee_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+                    "callee")) {
+#ifdef WEBRTC_ANDROID
+    webrtc::InitializeAndroidObjects();
+#endif
+  }
+
+  void CreatePcs() {
+    CreatePcs(NULL);
+  }
+
+  void CreatePcs(const MediaConstraintsInterface* pc_constraints) {
+    EXPECT_TRUE(caller_->CreatePc(pc_constraints));
+    EXPECT_TRUE(callee_->CreatePc(pc_constraints));
+    PeerConnectionTestWrapper::Connect(caller_.get(), callee_.get());
+
+    caller_->SignalOnDataChannel.connect(
+        this, &PeerConnectionEndToEndTest::OnCallerAddedDataChanel);
+    callee_->SignalOnDataChannel.connect(
+        this, &PeerConnectionEndToEndTest::OnCalleeAddedDataChannel);
+  }
+
+  void GetAndAddUserMedia() {
+    FakeConstraints audio_constraints;
+    FakeConstraints video_constraints;
+    GetAndAddUserMedia(true, audio_constraints, true, video_constraints);
+  }
+
+  void GetAndAddUserMedia(bool audio, FakeConstraints audio_constraints,
+                          bool video, FakeConstraints video_constraints) {
+    caller_->GetAndAddUserMedia(audio, audio_constraints,
+                                video, video_constraints);
+    callee_->GetAndAddUserMedia(audio, audio_constraints,
+                                video, video_constraints);
+  }
+
+  void Negotiate() {
+    caller_->CreateOffer(NULL);
+  }
+
+  void WaitForCallEstablished() {
+    caller_->WaitForCallEstablished();
+    callee_->WaitForCallEstablished();
+  }
+
+  void WaitForConnection() {
+    caller_->WaitForConnection();
+    callee_->WaitForConnection();
+  }
+
+  void OnCallerAddedDataChanel(DataChannelInterface* dc) {
+    caller_signaled_data_channels_.push_back(dc);
+  }
+
+  void OnCalleeAddedDataChannel(DataChannelInterface* dc) {
+    callee_signaled_data_channels_.push_back(dc);
+  }
+
+  // Tests that |dc1| and |dc2| can send to and receive from each other.
+  void TestDataChannelSendAndReceive(
+      DataChannelInterface* dc1, DataChannelInterface* dc2) {
+    rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc1_observer(
+        new webrtc::MockDataChannelObserver(dc1));
+
+    rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc2_observer(
+        new webrtc::MockDataChannelObserver(dc2));
+
+    static const std::string kDummyData = "abcdefg";
+    webrtc::DataBuffer buffer(kDummyData);
+    EXPECT_TRUE(dc1->Send(buffer));
+    EXPECT_EQ_WAIT(kDummyData, dc2_observer->last_message(), kMaxWait);
+
+    EXPECT_TRUE(dc2->Send(buffer));
+    EXPECT_EQ_WAIT(kDummyData, dc1_observer->last_message(), kMaxWait);
+
+    EXPECT_EQ(1U, dc1_observer->received_message_count());
+    EXPECT_EQ(1U, dc2_observer->received_message_count());
+  }
+
+  void WaitForDataChannelsToOpen(DataChannelInterface* local_dc,
+                                 const DataChannelList& remote_dc_list,
+                                 size_t remote_dc_index) {
+    EXPECT_EQ_WAIT(DataChannelInterface::kOpen, local_dc->state(), kMaxWait);
+
+    EXPECT_TRUE_WAIT(remote_dc_list.size() > remote_dc_index, kMaxWait);
+    EXPECT_EQ_WAIT(DataChannelInterface::kOpen,
+                   remote_dc_list[remote_dc_index]->state(),
+                   kMaxWait);
+    EXPECT_EQ(local_dc->id(), remote_dc_list[remote_dc_index]->id());
+  }
+
+  void CloseDataChannels(DataChannelInterface* local_dc,
+                         const DataChannelList& remote_dc_list,
+                         size_t remote_dc_index) {
+    local_dc->Close();
+    EXPECT_EQ_WAIT(DataChannelInterface::kClosed, local_dc->state(), kMaxWait);
+    EXPECT_EQ_WAIT(DataChannelInterface::kClosed,
+                   remote_dc_list[remote_dc_index]->state(),
+                   kMaxWait);
+  }
+
+ protected:
+  rtc::scoped_refptr<PeerConnectionTestWrapper> caller_;
+  rtc::scoped_refptr<PeerConnectionTestWrapper> callee_;
+  DataChannelList caller_signaled_data_channels_;
+  DataChannelList callee_signaled_data_channels_;
+};
+
+// Disabled for TSan v2, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=4719 for details.
+// Disabled for Mac, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5231 for details.
+#if !defined(THREAD_SANITIZER) && !defined(WEBRTC_MAC)
+TEST_F(PeerConnectionEndToEndTest, Call) {
+  CreatePcs();
+  GetAndAddUserMedia();
+  Negotiate();
+  WaitForCallEstablished();
+}
+#endif // if !defined(THREAD_SANITIZER) && !defined(WEBRTC_MAC)
+
+TEST_F(PeerConnectionEndToEndTest, CallWithLegacySdp) {
+  FakeConstraints pc_constraints;
+  pc_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
+                              false);
+  CreatePcs(&pc_constraints);
+  GetAndAddUserMedia();
+  Negotiate();
+  WaitForCallEstablished();
+}
+
+// Verifies that a DataChannel created before the negotiation can transition to
+// "OPEN" and transfer data.
+TEST_F(PeerConnectionEndToEndTest, CreateDataChannelBeforeNegotiate) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+  rtc::scoped_refptr<DataChannelInterface> caller_dc(
+      caller_->CreateDataChannel("data", init));
+  rtc::scoped_refptr<DataChannelInterface> callee_dc(
+      callee_->CreateDataChannel("data", init));
+
+  Negotiate();
+  WaitForConnection();
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+  WaitForDataChannelsToOpen(callee_dc, caller_signaled_data_channels_, 0);
+
+  TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[0]);
+  TestDataChannelSendAndReceive(callee_dc, caller_signaled_data_channels_[0]);
+
+  CloseDataChannels(caller_dc, callee_signaled_data_channels_, 0);
+  CloseDataChannels(callee_dc, caller_signaled_data_channels_, 0);
+}
+
+// Verifies that a DataChannel created after the negotiation can transition to
+// "OPEN" and transfer data.
+#if defined(MEMORY_SANITIZER)
+// Fails under MemorySanitizer:
+// See https://code.google.com/p/webrtc/issues/detail?id=3980.
+#define MAYBE_CreateDataChannelAfterNegotiate DISABLED_CreateDataChannelAfterNegotiate
+#else
+#define MAYBE_CreateDataChannelAfterNegotiate CreateDataChannelAfterNegotiate
+#endif
+TEST_F(PeerConnectionEndToEndTest, MAYBE_CreateDataChannelAfterNegotiate) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+
+  // This DataChannel is for creating the data content in the negotiation.
+  rtc::scoped_refptr<DataChannelInterface> dummy(
+      caller_->CreateDataChannel("data", init));
+  Negotiate();
+  WaitForConnection();
+
+  // Creates new DataChannels after the negotiation and verifies their states.
+  rtc::scoped_refptr<DataChannelInterface> caller_dc(
+      caller_->CreateDataChannel("hello", init));
+  rtc::scoped_refptr<DataChannelInterface> callee_dc(
+      callee_->CreateDataChannel("hello", init));
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 1);
+  WaitForDataChannelsToOpen(callee_dc, caller_signaled_data_channels_, 0);
+
+  TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[1]);
+  TestDataChannelSendAndReceive(callee_dc, caller_signaled_data_channels_[0]);
+
+  CloseDataChannels(caller_dc, callee_signaled_data_channels_, 1);
+  CloseDataChannels(callee_dc, caller_signaled_data_channels_, 0);
+}
+
+// Verifies that DataChannel IDs are even/odd based on the DTLS roles.
+TEST_F(PeerConnectionEndToEndTest, DataChannelIdAssignment) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+  rtc::scoped_refptr<DataChannelInterface> caller_dc_1(
+      caller_->CreateDataChannel("data", init));
+  rtc::scoped_refptr<DataChannelInterface> callee_dc_1(
+      callee_->CreateDataChannel("data", init));
+
+  Negotiate();
+  WaitForConnection();
+
+  EXPECT_EQ(1U, caller_dc_1->id() % 2);
+  EXPECT_EQ(0U, callee_dc_1->id() % 2);
+
+  rtc::scoped_refptr<DataChannelInterface> caller_dc_2(
+      caller_->CreateDataChannel("data", init));
+  rtc::scoped_refptr<DataChannelInterface> callee_dc_2(
+      callee_->CreateDataChannel("data", init));
+
+  EXPECT_EQ(1U, caller_dc_2->id() % 2);
+  EXPECT_EQ(0U, callee_dc_2->id() % 2);
+}
+
+// Verifies that the message is received by the right remote DataChannel when
+// there are multiple DataChannels.
+TEST_F(PeerConnectionEndToEndTest,
+       MessageTransferBetweenTwoPairsOfDataChannels) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+
+  rtc::scoped_refptr<DataChannelInterface> caller_dc_1(
+      caller_->CreateDataChannel("data", init));
+  rtc::scoped_refptr<DataChannelInterface> caller_dc_2(
+      caller_->CreateDataChannel("data", init));
+
+  Negotiate();
+  WaitForConnection();
+  WaitForDataChannelsToOpen(caller_dc_1, callee_signaled_data_channels_, 0);
+  WaitForDataChannelsToOpen(caller_dc_2, callee_signaled_data_channels_, 1);
+
+  rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc_1_observer(
+      new webrtc::MockDataChannelObserver(callee_signaled_data_channels_[0]));
+
+  rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc_2_observer(
+      new webrtc::MockDataChannelObserver(callee_signaled_data_channels_[1]));
+
+  const std::string message_1 = "hello 1";
+  const std::string message_2 = "hello 2";
+
+  caller_dc_1->Send(webrtc::DataBuffer(message_1));
+  EXPECT_EQ_WAIT(message_1, dc_1_observer->last_message(), kMaxWait);
+
+  caller_dc_2->Send(webrtc::DataBuffer(message_2));
+  EXPECT_EQ_WAIT(message_2, dc_2_observer->last_message(), kMaxWait);
+
+  EXPECT_EQ(1U, dc_1_observer->received_message_count());
+  EXPECT_EQ(1U, dc_2_observer->received_message_count());
+}
+
+// Verifies that a DataChannel added from an OPEN message functions after
+// a channel has been previously closed (webrtc issue 3778).
+// This previously failed because the new channel re-uses the ID of the closed
+// channel, and the closed channel was incorrectly still assigned to the id.
+// TODO(deadbeef): This is disabled because there's currently a race condition
+// caused by the fact that a data channel signals that it's closed before it
+// really is. Re-enable this test once that's fixed.
+TEST_F(PeerConnectionEndToEndTest,
+       DISABLED_DataChannelFromOpenWorksAfterClose) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+  rtc::scoped_refptr<DataChannelInterface> caller_dc(
+      caller_->CreateDataChannel("data", init));
+
+  Negotiate();
+  WaitForConnection();
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+  CloseDataChannels(caller_dc, callee_signaled_data_channels_, 0);
+
+  // Create a new channel and ensure it works after closing the previous one.
+  caller_dc = caller_->CreateDataChannel("data2", init);
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 1);
+  TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[1]);
+
+  CloseDataChannels(caller_dc, callee_signaled_data_channels_, 1);
+}
+
+// This tests that if a data channel is closed remotely while not referenced
+// by the application (meaning only the PeerConnection contributes to its
+// reference count), no memory access violation will occur.
+// See: https://code.google.com/p/chromium/issues/detail?id=565048
+TEST_F(PeerConnectionEndToEndTest, CloseDataChannelRemotelyWhileNotReferenced) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+  rtc::scoped_refptr<DataChannelInterface> caller_dc(
+      caller_->CreateDataChannel("data", init));
+
+  Negotiate();
+  WaitForConnection();
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+  // This removes the reference to the remote data channel that we hold.
+  callee_signaled_data_channels_.clear();
+  caller_dc->Close();
+  EXPECT_EQ_WAIT(DataChannelInterface::kClosed, caller_dc->state(), kMaxWait);
+
+  // Wait for a bit longer so the remote data channel will receive the
+  // close message and be destroyed.
+  rtc::Thread::Current()->ProcessMessages(100);
+}
diff --git a/webrtc/api/peerconnectionfactory.cc b/webrtc/api/peerconnectionfactory.cc
new file mode 100644
index 0000000..66545930
--- /dev/null
+++ b/webrtc/api/peerconnectionfactory.cc
@@ -0,0 +1,331 @@
+/*
+ * libjingle
+ * Copyright 2004 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/peerconnectionfactory.h"
+
+#include <utility>
+
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/localaudiosource.h"
+#include "webrtc/api/mediastream.h"
+#include "webrtc/api/mediastreamproxy.h"
+#include "webrtc/api/mediastreamtrackproxy.h"
+#include "webrtc/api/peerconnection.h"
+#include "webrtc/api/peerconnectionfactoryproxy.h"
+#include "webrtc/api/peerconnectionproxy.h"
+#include "webrtc/api/videosource.h"
+#include "webrtc/api/videosourceproxy.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/media/webrtc/webrtcmediaengine.h"
+#include "webrtc/media/webrtc/webrtcvideodecoderfactory.h"
+#include "webrtc/media/webrtc/webrtcvideoencoderfactory.h"
+#include "webrtc/modules/audio_device/include/audio_device.h"
+#include "webrtc/p2p/base/basicpacketsocketfactory.h"
+#include "webrtc/p2p/client/basicportallocator.h"
+
+namespace webrtc {
+
+namespace {
+
+// Passes down the calls to |store_|. See usage in CreatePeerConnection.
+class DtlsIdentityStoreWrapper : public DtlsIdentityStoreInterface {
+ public:
+  DtlsIdentityStoreWrapper(
+      const rtc::scoped_refptr<RefCountedDtlsIdentityStore>& store)
+      : store_(store) {
+    RTC_DCHECK(store_);
+  }
+
+  void RequestIdentity(
+      rtc::KeyType key_type,
+      const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>&
+          observer) override {
+    store_->RequestIdentity(key_type, observer);
+  }
+
+ private:
+  rtc::scoped_refptr<RefCountedDtlsIdentityStore> store_;
+};
+
+}  // anonymous namespace
+
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory() {
+  rtc::scoped_refptr<PeerConnectionFactory> pc_factory(
+      new rtc::RefCountedObject<PeerConnectionFactory>());
+
+
+  // Call Initialize synchronously but make sure its executed on
+  // |signaling_thread|.
+  MethodCall0<PeerConnectionFactory, bool> call(
+      pc_factory.get(),
+      &PeerConnectionFactory::Initialize);
+  bool result =  call.Marshal(pc_factory->signaling_thread());
+
+  if (!result) {
+    return NULL;
+  }
+  return PeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(),
+                                            pc_factory);
+}
+
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(
+    rtc::Thread* worker_thread,
+    rtc::Thread* signaling_thread,
+    AudioDeviceModule* default_adm,
+    cricket::WebRtcVideoEncoderFactory* encoder_factory,
+    cricket::WebRtcVideoDecoderFactory* decoder_factory) {
+  rtc::scoped_refptr<PeerConnectionFactory> pc_factory(
+      new rtc::RefCountedObject<PeerConnectionFactory>(worker_thread,
+                                                       signaling_thread,
+                                                       default_adm,
+                                                       encoder_factory,
+                                                       decoder_factory));
+
+  // Call Initialize synchronously but make sure its executed on
+  // |signaling_thread|.
+  MethodCall0<PeerConnectionFactory, bool> call(
+      pc_factory.get(),
+      &PeerConnectionFactory::Initialize);
+  bool result =  call.Marshal(signaling_thread);
+
+  if (!result) {
+    return NULL;
+  }
+  return PeerConnectionFactoryProxy::Create(signaling_thread, pc_factory);
+}
+
+PeerConnectionFactory::PeerConnectionFactory()
+    : owns_ptrs_(true),
+      wraps_current_thread_(false),
+      signaling_thread_(rtc::ThreadManager::Instance()->CurrentThread()),
+      worker_thread_(new rtc::Thread) {
+  if (!signaling_thread_) {
+    signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread();
+    wraps_current_thread_ = true;
+  }
+  worker_thread_->Start();
+}
+
+PeerConnectionFactory::PeerConnectionFactory(
+    rtc::Thread* worker_thread,
+    rtc::Thread* signaling_thread,
+    AudioDeviceModule* default_adm,
+    cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
+    cricket::WebRtcVideoDecoderFactory* video_decoder_factory)
+    : owns_ptrs_(false),
+      wraps_current_thread_(false),
+      signaling_thread_(signaling_thread),
+      worker_thread_(worker_thread),
+      default_adm_(default_adm),
+      video_encoder_factory_(video_encoder_factory),
+      video_decoder_factory_(video_decoder_factory) {
+  ASSERT(worker_thread != NULL);
+  ASSERT(signaling_thread != NULL);
+  // TODO: Currently there is no way creating an external adm in
+  // libjingle source tree. So we can 't currently assert if this is NULL.
+  // ASSERT(default_adm != NULL);
+}
+
+PeerConnectionFactory::~PeerConnectionFactory() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  channel_manager_.reset(nullptr);
+
+  // Make sure |worker_thread_| and |signaling_thread_| outlive
+  // |dtls_identity_store_|, |default_socket_factory_| and
+  // |default_network_manager_|.
+  dtls_identity_store_ = nullptr;
+  default_socket_factory_ = nullptr;
+  default_network_manager_ = nullptr;
+
+  if (owns_ptrs_) {
+    if (wraps_current_thread_)
+      rtc::ThreadManager::Instance()->UnwrapCurrentThread();
+    delete worker_thread_;
+  }
+}
+
+bool PeerConnectionFactory::Initialize() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  rtc::InitRandom(rtc::Time());
+
+  default_network_manager_.reset(new rtc::BasicNetworkManager());
+  if (!default_network_manager_) {
+    return false;
+  }
+
+  default_socket_factory_.reset(
+      new rtc::BasicPacketSocketFactory(worker_thread_));
+  if (!default_socket_factory_) {
+    return false;
+  }
+
+  // TODO:  Need to make sure only one VoE is created inside
+  // WebRtcMediaEngine.
+  cricket::MediaEngineInterface* media_engine =
+      worker_thread_->Invoke<cricket::MediaEngineInterface*>(rtc::Bind(
+      &PeerConnectionFactory::CreateMediaEngine_w, this));
+
+  channel_manager_.reset(
+      new cricket::ChannelManager(media_engine, worker_thread_));
+
+  channel_manager_->SetVideoRtxEnabled(true);
+  if (!channel_manager_->Init()) {
+    return false;
+  }
+
+  dtls_identity_store_ = new RefCountedDtlsIdentityStore(
+      signaling_thread_, worker_thread_);
+
+  return true;
+}
+
+rtc::scoped_refptr<AudioSourceInterface>
+PeerConnectionFactory::CreateAudioSource(
+    const MediaConstraintsInterface* constraints) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  rtc::scoped_refptr<LocalAudioSource> source(
+      LocalAudioSource::Create(options_, constraints));
+  return source;
+}
+
+rtc::scoped_refptr<VideoSourceInterface>
+PeerConnectionFactory::CreateVideoSource(
+    cricket::VideoCapturer* capturer,
+    const MediaConstraintsInterface* constraints) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  rtc::scoped_refptr<VideoSource> source(VideoSource::Create(
+      channel_manager_.get(), capturer, constraints, false));
+  return VideoSourceProxy::Create(signaling_thread_, source);
+}
+
+bool PeerConnectionFactory::StartAecDump(rtc::PlatformFile file,
+                                         int64_t max_size_bytes) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  return channel_manager_->StartAecDump(file, max_size_bytes);
+}
+
+void PeerConnectionFactory::StopAecDump() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  channel_manager_->StopAecDump();
+}
+
+bool PeerConnectionFactory::StartRtcEventLog(rtc::PlatformFile file) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  return channel_manager_->StartRtcEventLog(file);
+}
+
+void PeerConnectionFactory::StopRtcEventLog() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  channel_manager_->StopRtcEventLog();
+}
+
+rtc::scoped_refptr<PeerConnectionInterface>
+PeerConnectionFactory::CreatePeerConnection(
+    const PeerConnectionInterface::RTCConfiguration& configuration,
+    const MediaConstraintsInterface* constraints,
+    rtc::scoped_ptr<cricket::PortAllocator> allocator,
+    rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+    PeerConnectionObserver* observer) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+
+  if (!dtls_identity_store.get()) {
+    // Because |pc|->Initialize takes ownership of the store we need a new
+    // wrapper object that can be deleted without deleting the underlying
+    // |dtls_identity_store_|, protecting it from being deleted multiple times.
+    dtls_identity_store.reset(
+        new DtlsIdentityStoreWrapper(dtls_identity_store_));
+  }
+
+  if (!allocator) {
+    allocator.reset(new cricket::BasicPortAllocator(
+        default_network_manager_.get(), default_socket_factory_.get()));
+  }
+  allocator->SetNetworkIgnoreMask(options_.network_ignore_mask);
+
+  rtc::scoped_refptr<PeerConnection> pc(
+      new rtc::RefCountedObject<PeerConnection>(this));
+  if (!pc->Initialize(configuration, constraints, std::move(allocator),
+                      std::move(dtls_identity_store), observer)) {
+    return nullptr;
+  }
+  return PeerConnectionProxy::Create(signaling_thread(), pc);
+}
+
+rtc::scoped_refptr<MediaStreamInterface>
+PeerConnectionFactory::CreateLocalMediaStream(const std::string& label) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  return MediaStreamProxy::Create(signaling_thread_,
+                                  MediaStream::Create(label));
+}
+
+rtc::scoped_refptr<VideoTrackInterface>
+PeerConnectionFactory::CreateVideoTrack(
+    const std::string& id,
+    VideoSourceInterface* source) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  rtc::scoped_refptr<VideoTrackInterface> track(
+      VideoTrack::Create(id, source));
+  return VideoTrackProxy::Create(signaling_thread_, track);
+}
+
+rtc::scoped_refptr<AudioTrackInterface>
+PeerConnectionFactory::CreateAudioTrack(const std::string& id,
+                                        AudioSourceInterface* source) {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  rtc::scoped_refptr<AudioTrackInterface> track(AudioTrack::Create(id, source));
+  return AudioTrackProxy::Create(signaling_thread_, track);
+}
+
+webrtc::MediaControllerInterface* PeerConnectionFactory::CreateMediaController()
+    const {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  return MediaControllerInterface::Create(worker_thread_,
+                                          channel_manager_.get());
+}
+
+rtc::Thread* PeerConnectionFactory::signaling_thread() {
+  // This method can be called on a different thread when the factory is
+  // created in CreatePeerConnectionFactory().
+  return signaling_thread_;
+}
+
+rtc::Thread* PeerConnectionFactory::worker_thread() {
+  RTC_DCHECK(signaling_thread_->IsCurrent());
+  return worker_thread_;
+}
+
+cricket::MediaEngineInterface* PeerConnectionFactory::CreateMediaEngine_w() {
+  ASSERT(worker_thread_ == rtc::Thread::Current());
+  return cricket::WebRtcMediaEngineFactory::Create(
+      default_adm_.get(), video_encoder_factory_.get(),
+      video_decoder_factory_.get());
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/peerconnectionfactory.h b/webrtc/api/peerconnectionfactory.h
new file mode 100644
index 0000000..7011736
--- /dev/null
+++ b/webrtc/api/peerconnectionfactory.h
@@ -0,0 +1,132 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_PEERCONNECTIONFACTORY_H_
+#define WEBRTC_API_PEERCONNECTIONFACTORY_H_
+
+#include <string>
+
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/mediacontroller.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+class BasicNetworkManager;
+class BasicPacketSocketFactory;
+}
+
+namespace webrtc {
+
+typedef rtc::RefCountedObject<DtlsIdentityStoreImpl>
+    RefCountedDtlsIdentityStore;
+
+class PeerConnectionFactory : public PeerConnectionFactoryInterface {
+ public:
+  virtual void SetOptions(const Options& options) {
+    options_ = options;
+  }
+
+  rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+      const PeerConnectionInterface::RTCConfiguration& configuration,
+      const MediaConstraintsInterface* constraints,
+      rtc::scoped_ptr<cricket::PortAllocator> allocator,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+      PeerConnectionObserver* observer) override;
+
+  bool Initialize();
+
+  rtc::scoped_refptr<MediaStreamInterface>
+      CreateLocalMediaStream(const std::string& label) override;
+
+  rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
+      const MediaConstraintsInterface* constraints) override;
+
+  rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource(
+      cricket::VideoCapturer* capturer,
+      const MediaConstraintsInterface* constraints) override;
+
+  rtc::scoped_refptr<VideoTrackInterface>
+      CreateVideoTrack(const std::string& id,
+                       VideoSourceInterface* video_source) override;
+
+  rtc::scoped_refptr<AudioTrackInterface>
+      CreateAudioTrack(const std::string& id,
+                       AudioSourceInterface* audio_source) override;
+
+  bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) override;
+  void StopAecDump() override;
+  bool StartRtcEventLog(rtc::PlatformFile file) override;
+  void StopRtcEventLog() override;
+
+  virtual webrtc::MediaControllerInterface* CreateMediaController() const;
+  virtual rtc::Thread* signaling_thread();
+  virtual rtc::Thread* worker_thread();
+  const Options& options() const { return options_; }
+
+ protected:
+  PeerConnectionFactory();
+  PeerConnectionFactory(
+      rtc::Thread* worker_thread,
+      rtc::Thread* signaling_thread,
+      AudioDeviceModule* default_adm,
+      cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
+      cricket::WebRtcVideoDecoderFactory* video_decoder_factory);
+  virtual ~PeerConnectionFactory();
+
+ private:
+  cricket::MediaEngineInterface* CreateMediaEngine_w();
+
+  bool owns_ptrs_;
+  bool wraps_current_thread_;
+  rtc::Thread* signaling_thread_;
+  rtc::Thread* worker_thread_;
+  Options options_;
+  // External Audio device used for audio playback.
+  rtc::scoped_refptr<AudioDeviceModule> default_adm_;
+  rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+  // External Video encoder factory. This can be NULL if the client has not
+  // injected any. In that case, video engine will use the internal SW encoder.
+  rtc::scoped_ptr<cricket::WebRtcVideoEncoderFactory>
+      video_encoder_factory_;
+  // External Video decoder factory. This can be NULL if the client has not
+  // injected any. In that case, video engine will use the internal SW decoder.
+  rtc::scoped_ptr<cricket::WebRtcVideoDecoderFactory>
+      video_decoder_factory_;
+  rtc::scoped_ptr<rtc::BasicNetworkManager> default_network_manager_;
+  rtc::scoped_ptr<rtc::BasicPacketSocketFactory> default_socket_factory_;
+
+  rtc::scoped_refptr<RefCountedDtlsIdentityStore> dtls_identity_store_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_PEERCONNECTIONFACTORY_H_
diff --git a/webrtc/api/peerconnectionfactory_unittest.cc b/webrtc/api/peerconnectionfactory_unittest.cc
new file mode 100644
index 0000000..a526ea5
--- /dev/null
+++ b/webrtc/api/peerconnectionfactory_unittest.cc
@@ -0,0 +1,374 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+#include <utility>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/peerconnectionfactory.h"
+#ifdef WEBRTC_ANDROID
+#include "webrtc/api/test/androidtestinitializer.h"
+#endif
+#include "webrtc/api/test/fakedtlsidentitystore.h"
+#include "webrtc/api/test/fakevideotrackrenderer.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/media/base/fakevideocapturer.h"
+#include "webrtc/media/webrtc/webrtccommon.h"
+#include "webrtc/media/webrtc/webrtcvoe.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
+
+using webrtc::DataChannelInterface;
+using webrtc::DtlsIdentityStoreInterface;
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrackInterface;
+
+namespace {
+
+static const char kStunIceServer[] = "stun:stun.l.google.com:19302";
+static const char kTurnIceServer[] = "turn:test%40hello.com@test.com:1234";
+static const char kTurnIceServerWithTransport[] =
+    "turn:test@hello.com?transport=tcp";
+static const char kSecureTurnIceServer[] =
+    "turns:test@hello.com?transport=tcp";
+static const char kSecureTurnIceServerWithoutTransportParam[] =
+    "turns:test_no_transport@hello.com:443";
+static const char kSecureTurnIceServerWithoutTransportAndPortParam[] =
+    "turns:test_no_transport@hello.com";
+static const char kTurnIceServerWithNoUsernameInUri[] =
+    "turn:test.com:1234";
+static const char kTurnPassword[] = "turnpassword";
+static const int kDefaultStunPort = 3478;
+static const int kDefaultStunTlsPort = 5349;
+static const char kTurnUsername[] = "test";
+static const char kStunIceServerWithIPv4Address[] = "stun:1.2.3.4:1234";
+static const char kStunIceServerWithIPv4AddressWithoutPort[] = "stun:1.2.3.4";
+static const char kStunIceServerWithIPv6Address[] = "stun:[2401:fa00:4::]:1234";
+static const char kStunIceServerWithIPv6AddressWithoutPort[] =
+    "stun:[2401:fa00:4::]";
+static const char kTurnIceServerWithIPv6Address[] =
+    "turn:test@[2401:fa00:4::]:1234";
+
+class NullPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+  virtual void OnMessage(const std::string& msg) {}
+  virtual void OnSignalingMessage(const std::string& msg) {}
+  virtual void OnSignalingChange(
+      PeerConnectionInterface::SignalingState new_state) {}
+  virtual void OnAddStream(MediaStreamInterface* stream) {}
+  virtual void OnRemoveStream(MediaStreamInterface* stream) {}
+  virtual void OnDataChannel(DataChannelInterface* data_channel) {}
+  virtual void OnRenegotiationNeeded() {}
+  virtual void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) {}
+  virtual void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) {}
+  virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {}
+};
+
+}  // namespace
+
+class PeerConnectionFactoryTest : public testing::Test {
+  void SetUp() {
+#ifdef WEBRTC_ANDROID
+    webrtc::InitializeAndroidObjects();
+#endif
+    factory_ = webrtc::CreatePeerConnectionFactory(rtc::Thread::Current(),
+                                                   rtc::Thread::Current(),
+                                                   NULL,
+                                                   NULL,
+                                                   NULL);
+
+    ASSERT_TRUE(factory_.get() != NULL);
+    port_allocator_.reset(
+        new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+    raw_port_allocator_ = port_allocator_.get();
+  }
+
+ protected:
+  void VerifyStunServers(cricket::ServerAddresses stun_servers) {
+    EXPECT_EQ(stun_servers, raw_port_allocator_->stun_servers());
+  }
+
+  void VerifyTurnServers(std::vector<cricket::RelayServerConfig> turn_servers) {
+    EXPECT_EQ(turn_servers.size(), raw_port_allocator_->turn_servers().size());
+    for (size_t i = 0; i < turn_servers.size(); ++i) {
+      ASSERT_EQ(1u, turn_servers[i].ports.size());
+      EXPECT_EQ(1u, raw_port_allocator_->turn_servers()[i].ports.size());
+      EXPECT_EQ(
+          turn_servers[i].ports[0].address.ToString(),
+          raw_port_allocator_->turn_servers()[i].ports[0].address.ToString());
+      EXPECT_EQ(turn_servers[i].ports[0].proto,
+                raw_port_allocator_->turn_servers()[i].ports[0].proto);
+      EXPECT_EQ(turn_servers[i].credentials.username,
+                raw_port_allocator_->turn_servers()[i].credentials.username);
+      EXPECT_EQ(turn_servers[i].credentials.password,
+                raw_port_allocator_->turn_servers()[i].credentials.password);
+    }
+  }
+
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
+  NullPeerConnectionObserver observer_;
+  rtc::scoped_ptr<cricket::FakePortAllocator> port_allocator_;
+  // Since the PC owns the port allocator after it's been initialized,
+  // this should only be used when known to be safe.
+  cricket::FakePortAllocator* raw_port_allocator_;
+};
+
+// Verify creation of PeerConnection using internal ADM, video factory and
+// internal libjingle threads.
+TEST(PeerConnectionFactoryTestInternal, CreatePCUsingInternalModules) {
+#ifdef WEBRTC_ANDROID
+  webrtc::InitializeAndroidObjects();
+#endif
+
+  rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      webrtc::CreatePeerConnectionFactory());
+
+  NullPeerConnectionObserver observer;
+  webrtc::PeerConnectionInterface::RTCConfiguration config;
+
+  rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory->CreatePeerConnection(
+      config, nullptr, nullptr, std::move(dtls_identity_store), &observer));
+
+  EXPECT_TRUE(pc.get() != nullptr);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) {
+  PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.uri = kStunIceServer;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kTurnIceServer;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kTurnIceServerWithTransport;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+      config, nullptr, std::move(port_allocator_),
+      std::move(dtls_identity_store), &observer_));
+  ASSERT_TRUE(pc.get() != NULL);
+  cricket::ServerAddresses stun_servers;
+  rtc::SocketAddress stun1("stun.l.google.com", 19302);
+  stun_servers.insert(stun1);
+  VerifyStunServers(stun_servers);
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  cricket::RelayServerConfig turn1("test.com", 1234, "test@hello.com",
+                                   kTurnPassword, cricket::PROTO_UDP, false);
+  turn_servers.push_back(turn1);
+  cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, "test",
+                                   kTurnPassword, cricket::PROTO_TCP, false);
+  turn_servers.push_back(turn2);
+  VerifyTurnServers(turn_servers);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the list of URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) {
+  PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.urls.push_back(kStunIceServer);
+  ice_server.urls.push_back(kTurnIceServer);
+  ice_server.urls.push_back(kTurnIceServerWithTransport);
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+      config, nullptr, std::move(port_allocator_),
+      std::move(dtls_identity_store), &observer_));
+  ASSERT_TRUE(pc.get() != NULL);
+  cricket::ServerAddresses stun_servers;
+  rtc::SocketAddress stun1("stun.l.google.com", 19302);
+  stun_servers.insert(stun1);
+  VerifyStunServers(stun_servers);
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  cricket::RelayServerConfig turn1("test.com", 1234, "test@hello.com",
+                                   kTurnPassword, cricket::PROTO_UDP, false);
+  turn_servers.push_back(turn1);
+  cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, "test",
+                                   kTurnPassword, cricket::PROTO_TCP, false);
+  turn_servers.push_back(turn2);
+  VerifyTurnServers(turn_servers);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) {
+  PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.uri = kStunIceServer;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kTurnIceServerWithNoUsernameInUri;
+  ice_server.username = kTurnUsername;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+      config, nullptr, std::move(port_allocator_),
+      std::move(dtls_identity_store), &observer_));
+  ASSERT_TRUE(pc.get() != NULL);
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  cricket::RelayServerConfig turn("test.com", 1234, kTurnUsername,
+                                  kTurnPassword, cricket::PROTO_UDP, false);
+  turn_servers.push_back(turn);
+  VerifyTurnServers(turn_servers);
+}
+
+// This test verifies the PeerConnection created properly with TURN url which
+// has transport parameter in it.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingTurnUrlWithTransportParam) {
+  PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.uri = kTurnIceServerWithTransport;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+      config, nullptr, std::move(port_allocator_),
+      std::move(dtls_identity_store), &observer_));
+  ASSERT_TRUE(pc.get() != NULL);
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  cricket::RelayServerConfig turn("hello.com", kDefaultStunPort, "test",
+                                  kTurnPassword, cricket::PROTO_TCP, false);
+  turn_servers.push_back(turn);
+  VerifyTurnServers(turn_servers);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
+  PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.uri = kSecureTurnIceServer;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kSecureTurnIceServerWithoutTransportParam;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kSecureTurnIceServerWithoutTransportAndPortParam;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+      config, nullptr, std::move(port_allocator_),
+      std::move(dtls_identity_store), &observer_));
+  ASSERT_TRUE(pc.get() != NULL);
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  cricket::RelayServerConfig turn1("hello.com", kDefaultStunTlsPort, "test",
+                                   kTurnPassword, cricket::PROTO_TCP, true);
+  turn_servers.push_back(turn1);
+  // TURNS with transport param should be default to tcp.
+  cricket::RelayServerConfig turn2("hello.com", 443, "test_no_transport",
+                                   kTurnPassword, cricket::PROTO_TCP, true);
+  turn_servers.push_back(turn2);
+  cricket::RelayServerConfig turn3("hello.com", kDefaultStunTlsPort,
+                                   "test_no_transport", kTurnPassword,
+                                   cricket::PROTO_TCP, true);
+  turn_servers.push_back(turn3);
+  VerifyTurnServers(turn_servers);
+}
+
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) {
+  PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.uri = kStunIceServerWithIPv4Address;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kStunIceServerWithIPv4AddressWithoutPort;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kStunIceServerWithIPv6Address;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kStunIceServerWithIPv6AddressWithoutPort;
+  config.servers.push_back(ice_server);
+  ice_server.uri = kTurnIceServerWithIPv6Address;
+  ice_server.password = kTurnPassword;
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+      new FakeDtlsIdentityStore());
+  rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
+      config, nullptr, std::move(port_allocator_),
+      std::move(dtls_identity_store), &observer_));
+  ASSERT_TRUE(pc.get() != NULL);
+  cricket::ServerAddresses stun_servers;
+  rtc::SocketAddress stun1("1.2.3.4", 1234);
+  stun_servers.insert(stun1);
+  rtc::SocketAddress stun2("1.2.3.4", 3478);
+  stun_servers.insert(stun2);  // Default port
+  rtc::SocketAddress stun3("2401:fa00:4::", 1234);
+  stun_servers.insert(stun3);
+  rtc::SocketAddress stun4("2401:fa00:4::", 3478);
+  stun_servers.insert(stun4);  // Default port
+  VerifyStunServers(stun_servers);
+
+  std::vector<cricket::RelayServerConfig> turn_servers;
+  cricket::RelayServerConfig turn1("2401:fa00:4::", 1234, "test", kTurnPassword,
+                                   cricket::PROTO_UDP, false);
+  turn_servers.push_back(turn1);
+  VerifyTurnServers(turn_servers);
+}
+
+// This test verifies the captured stream is rendered locally using a
+// local video track.
+TEST_F(PeerConnectionFactoryTest, LocalRendering) {
+  cricket::FakeVideoCapturer* capturer = new cricket::FakeVideoCapturer();
+  // The source take ownership of |capturer|.
+  rtc::scoped_refptr<VideoSourceInterface> source(
+      factory_->CreateVideoSource(capturer, NULL));
+  ASSERT_TRUE(source.get() != NULL);
+  rtc::scoped_refptr<VideoTrackInterface> track(
+      factory_->CreateVideoTrack("testlabel", source));
+  ASSERT_TRUE(track.get() != NULL);
+  FakeVideoTrackRenderer local_renderer(track);
+
+  EXPECT_EQ(0, local_renderer.num_rendered_frames());
+  EXPECT_TRUE(capturer->CaptureFrame());
+  EXPECT_EQ(1, local_renderer.num_rendered_frames());
+  EXPECT_FALSE(local_renderer.black_frame());
+
+  track->set_enabled(false);
+  EXPECT_TRUE(capturer->CaptureFrame());
+  EXPECT_EQ(2, local_renderer.num_rendered_frames());
+  EXPECT_TRUE(local_renderer.black_frame());
+
+  track->set_enabled(true);
+  EXPECT_TRUE(capturer->CaptureFrame());
+  EXPECT_EQ(3, local_renderer.num_rendered_frames());
+  EXPECT_FALSE(local_renderer.black_frame());
+}
diff --git a/webrtc/api/peerconnectionfactoryproxy.h b/webrtc/api/peerconnectionfactoryproxy.h
new file mode 100644
index 0000000..65f0969
--- /dev/null
+++ b/webrtc/api/peerconnectionfactoryproxy.h
@@ -0,0 +1,86 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_PEERCONNECTIONFACTORYPROXY_H_
+#define WEBRTC_API_PEERCONNECTIONFACTORYPROXY_H_
+
+#include <string>
+#include <utility>
+
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/proxy.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc {
+
+BEGIN_PROXY_MAP(PeerConnectionFactory)
+  PROXY_METHOD1(void, SetOptions, const Options&)
+  // Can't use PROXY_METHOD5 because scoped_ptr must be moved.
+  // TODO(tommi,hbos): Use of templates to support scoped_ptr?
+  rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+      const PeerConnectionInterface::RTCConfiguration& a1,
+      const MediaConstraintsInterface* a2,
+      rtc::scoped_ptr<cricket::PortAllocator> a3,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> a4,
+      PeerConnectionObserver* a5) override {
+    return owner_thread_->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
+        rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot, this,
+                  a1, a2, a3.release(), a4.release(), a5));
+  }
+  PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
+                CreateLocalMediaStream, const std::string&)
+  PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
+                CreateAudioSource, const MediaConstraintsInterface*)
+  PROXY_METHOD2(rtc::scoped_refptr<VideoSourceInterface>,
+                CreateVideoSource, cricket::VideoCapturer*,
+                const MediaConstraintsInterface*)
+  PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>,
+                CreateVideoTrack, const std::string&,  VideoSourceInterface*)
+  PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
+                CreateAudioTrack, const std::string&,  AudioSourceInterface*)
+  PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t)
+  PROXY_METHOD0(void, StopAecDump)
+  PROXY_METHOD1(bool, StartRtcEventLog, rtc::PlatformFile)
+  PROXY_METHOD0(void, StopRtcEventLog)
+
+ private:
+  rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_ot(
+      const PeerConnectionInterface::RTCConfiguration& a1,
+      const MediaConstraintsInterface* a2,
+      cricket::PortAllocator* a3,
+      DtlsIdentityStoreInterface* a4,
+      PeerConnectionObserver* a5) {
+    rtc::scoped_ptr<cricket::PortAllocator> ptr_a3(a3);
+    rtc::scoped_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
+    return c_->CreatePeerConnection(a1, a2, std::move(ptr_a3),
+                                    std::move(ptr_a4), a5);
+  }
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_PEERCONNECTIONFACTORYPROXY_H_
diff --git a/webrtc/api/peerconnectioninterface.h b/webrtc/api/peerconnectioninterface.h
new file mode 100644
index 0000000..5cdb097
--- /dev/null
+++ b/webrtc/api/peerconnectioninterface.h
@@ -0,0 +1,622 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the PeerConnection interface as defined in
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html#peer-to-peer-connections.
+// Applications must use this interface to implement peerconnection.
+// PeerConnectionFactory class provides factory methods to create
+// peerconnection, mediastream and media tracks objects.
+//
+// The Following steps are needed to setup a typical call using Jsep.
+// 1. Create a PeerConnectionFactoryInterface. Check constructors for more
+// information about input parameters.
+// 2. Create a PeerConnection object. Provide a configuration string which
+// points either to stun or turn server to generate ICE candidates and provide
+// an object that implements the PeerConnectionObserver interface.
+// 3. Create local MediaStream and MediaTracks using the PeerConnectionFactory
+// and add it to PeerConnection by calling AddStream.
+// 4. Create an offer and serialize it and send it to the remote peer.
+// 5. Once an ice candidate have been found PeerConnection will call the
+// observer function OnIceCandidate. The candidates must also be serialized and
+// sent to the remote peer.
+// 6. Once an answer is received from the remote peer, call
+// SetLocalSessionDescription with the offer and SetRemoteSessionDescription
+// with the remote answer.
+// 7. Once a remote candidate is received from the remote peer, provide it to
+// the peerconnection by calling AddIceCandidate.
+
+
+// The Receiver of a call can decide to accept or reject the call.
+// This decision will be taken by the application not peerconnection.
+// If application decides to accept the call
+// 1. Create PeerConnectionFactoryInterface if it doesn't exist.
+// 2. Create a new PeerConnection.
+// 3. Provide the remote offer to the new PeerConnection object by calling
+// SetRemoteSessionDescription.
+// 4. Generate an answer to the remote offer by calling CreateAnswer and send it
+// back to the remote peer.
+// 5. Provide the local answer to the new PeerConnection by calling
+// SetLocalSessionDescription with the answer.
+// 6. Provide the remote ice candidates by calling AddIceCandidate.
+// 7. Once a candidate have been found PeerConnection will call the observer
+// function OnIceCandidate. Send these candidates to the remote peer.
+
+#ifndef WEBRTC_API_PEERCONNECTIONINTERFACE_H_
+#define WEBRTC_API_PEERCONNECTIONINTERFACE_H_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "webrtc/api/datachannelinterface.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/dtmfsenderinterface.h"
+#include "webrtc/api/jsep.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/statstypes.h"
+#include "webrtc/api/umametrics.h"
+#include "webrtc/base/fileutils.h"
+#include "webrtc/base/network.h"
+#include "webrtc/base/rtccertificate.h"
+#include "webrtc/base/socketaddress.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/p2p/base/portallocator.h"
+
+namespace rtc {
+class SSLIdentity;
+class Thread;
+}
+
+namespace cricket {
+class WebRtcVideoDecoderFactory;
+class WebRtcVideoEncoderFactory;
+}
+
+namespace webrtc {
+class AudioDeviceModule;
+class MediaConstraintsInterface;
+
+// MediaStream container interface.
+class StreamCollectionInterface : public rtc::RefCountInterface {
+ public:
+  // TODO(ronghuawu): Update the function names to c++ style, e.g. find -> Find.
+  virtual size_t count() = 0;
+  virtual MediaStreamInterface* at(size_t index) = 0;
+  virtual MediaStreamInterface* find(const std::string& label) = 0;
+  virtual MediaStreamTrackInterface* FindAudioTrack(
+      const std::string& id) = 0;
+  virtual MediaStreamTrackInterface* FindVideoTrack(
+      const std::string& id) = 0;
+
+ protected:
+  // Dtor protected as objects shouldn't be deleted via this interface.
+  ~StreamCollectionInterface() {}
+};
+
+class StatsObserver : public rtc::RefCountInterface {
+ public:
+  virtual void OnComplete(const StatsReports& reports) = 0;
+
+ protected:
+  virtual ~StatsObserver() {}
+};
+
+class MetricsObserverInterface : public rtc::RefCountInterface {
+ public:
+
+  // |type| is the type of the enum counter to be incremented. |counter|
+  // is the particular counter in that type. |counter_max| is the next sequence
+  // number after the highest counter.
+  virtual void IncrementEnumCounter(PeerConnectionEnumCounterType type,
+                                    int counter,
+                                    int counter_max) {}
+
+  // This is used to handle sparse counters like SSL cipher suites.
+  // TODO(guoweis): Remove the implementation once the dependency's interface
+  // definition is updated.
+  virtual void IncrementSparseEnumCounter(PeerConnectionEnumCounterType type,
+                                          int counter) {
+    IncrementEnumCounter(type, counter, 0 /* Ignored */);
+  }
+
+  virtual void AddHistogramSample(PeerConnectionMetricsName type,
+                                  int value) = 0;
+
+ protected:
+  virtual ~MetricsObserverInterface() {}
+};
+
+typedef MetricsObserverInterface UMAObserver;
+
+class PeerConnectionInterface : public rtc::RefCountInterface {
+ public:
+  // See http://dev.w3.org/2011/webrtc/editor/webrtc.html#state-definitions .
+  enum SignalingState {
+    kStable,
+    kHaveLocalOffer,
+    kHaveLocalPrAnswer,
+    kHaveRemoteOffer,
+    kHaveRemotePrAnswer,
+    kClosed,
+  };
+
+  // TODO(bemasc): Remove IceState when callers are changed to
+  // IceConnection/GatheringState.
+  enum IceState {
+    kIceNew,
+    kIceGathering,
+    kIceWaiting,
+    kIceChecking,
+    kIceConnected,
+    kIceCompleted,
+    kIceFailed,
+    kIceClosed,
+  };
+
+  enum IceGatheringState {
+    kIceGatheringNew,
+    kIceGatheringGathering,
+    kIceGatheringComplete
+  };
+
+  enum IceConnectionState {
+    kIceConnectionNew,
+    kIceConnectionChecking,
+    kIceConnectionConnected,
+    kIceConnectionCompleted,
+    kIceConnectionFailed,
+    kIceConnectionDisconnected,
+    kIceConnectionClosed,
+    kIceConnectionMax,
+  };
+
+  struct IceServer {
+    // TODO(jbauch): Remove uri when all code using it has switched to urls.
+    std::string uri;
+    std::vector<std::string> urls;
+    std::string username;
+    std::string password;
+  };
+  typedef std::vector<IceServer> IceServers;
+
+  enum IceTransportsType {
+    // TODO(pthatcher): Rename these kTransporTypeXXX, but update
+    // Chromium at the same time.
+    kNone,
+    kRelay,
+    kNoHost,
+    kAll
+  };
+
+  // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-08#section-4.1.1
+  enum BundlePolicy {
+    kBundlePolicyBalanced,
+    kBundlePolicyMaxBundle,
+    kBundlePolicyMaxCompat
+  };
+
+  // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-09#section-4.1.1
+  enum RtcpMuxPolicy {
+    kRtcpMuxPolicyNegotiate,
+    kRtcpMuxPolicyRequire,
+  };
+
+  enum TcpCandidatePolicy {
+    kTcpCandidatePolicyEnabled,
+    kTcpCandidatePolicyDisabled
+  };
+
+  enum ContinualGatheringPolicy {
+    GATHER_ONCE,
+    GATHER_CONTINUALLY
+  };
+
+  // TODO(hbos): Change into class with private data and public getters.
+  struct RTCConfiguration {
+    static const int kUndefined = -1;
+    // Default maximum number of packets in the audio jitter buffer.
+    static const int kAudioJitterBufferMaxPackets = 50;
+    // TODO(pthatcher): Rename this ice_transport_type, but update
+    // Chromium at the same time.
+    IceTransportsType type;
+    // TODO(pthatcher): Rename this ice_servers, but update Chromium
+    // at the same time.
+    IceServers servers;
+    BundlePolicy bundle_policy;
+    RtcpMuxPolicy rtcp_mux_policy;
+    TcpCandidatePolicy tcp_candidate_policy;
+    int audio_jitter_buffer_max_packets;
+    bool audio_jitter_buffer_fast_accelerate;
+    int ice_connection_receiving_timeout;         // ms
+    int ice_backup_candidate_pair_ping_interval;  // ms
+    ContinualGatheringPolicy continual_gathering_policy;
+    std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
+    bool disable_prerenderer_smoothing;
+    RTCConfiguration()
+        : type(kAll),
+          bundle_policy(kBundlePolicyBalanced),
+          rtcp_mux_policy(kRtcpMuxPolicyNegotiate),
+          tcp_candidate_policy(kTcpCandidatePolicyEnabled),
+          audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets),
+          audio_jitter_buffer_fast_accelerate(false),
+          ice_connection_receiving_timeout(kUndefined),
+          ice_backup_candidate_pair_ping_interval(kUndefined),
+          continual_gathering_policy(GATHER_ONCE),
+          disable_prerenderer_smoothing(false) {}
+  };
+
+  struct RTCOfferAnswerOptions {
+    static const int kUndefined = -1;
+    static const int kMaxOfferToReceiveMedia = 1;
+
+    // The default value for constraint offerToReceiveX:true.
+    static const int kOfferToReceiveMediaTrue = 1;
+
+    int offer_to_receive_video;
+    int offer_to_receive_audio;
+    bool voice_activity_detection;
+    bool ice_restart;
+    bool use_rtp_mux;
+
+    RTCOfferAnswerOptions()
+        : offer_to_receive_video(kUndefined),
+          offer_to_receive_audio(kUndefined),
+          voice_activity_detection(true),
+          ice_restart(false),
+          use_rtp_mux(true) {}
+
+    RTCOfferAnswerOptions(int offer_to_receive_video,
+                          int offer_to_receive_audio,
+                          bool voice_activity_detection,
+                          bool ice_restart,
+                          bool use_rtp_mux)
+        : offer_to_receive_video(offer_to_receive_video),
+          offer_to_receive_audio(offer_to_receive_audio),
+          voice_activity_detection(voice_activity_detection),
+          ice_restart(ice_restart),
+          use_rtp_mux(use_rtp_mux) {}
+  };
+
+  // Used by GetStats to decide which stats to include in the stats reports.
+  // |kStatsOutputLevelStandard| includes the standard stats for Javascript API;
+  // |kStatsOutputLevelDebug| includes both the standard stats and additional
+  // stats for debugging purposes.
+  enum StatsOutputLevel {
+    kStatsOutputLevelStandard,
+    kStatsOutputLevelDebug,
+  };
+
+  // Accessor methods to active local streams.
+  virtual rtc::scoped_refptr<StreamCollectionInterface>
+      local_streams() = 0;
+
+  // Accessor methods to remote streams.
+  virtual rtc::scoped_refptr<StreamCollectionInterface>
+      remote_streams() = 0;
+
+  // Add a new MediaStream to be sent on this PeerConnection.
+  // Note that a SessionDescription negotiation is needed before the
+  // remote peer can receive the stream.
+  virtual bool AddStream(MediaStreamInterface* stream) = 0;
+
+  // Remove a MediaStream from this PeerConnection.
+  // Note that a SessionDescription negotiation is need before the
+  // remote peer is notified.
+  virtual void RemoveStream(MediaStreamInterface* stream) = 0;
+
+  // TODO(deadbeef): Make the following two methods pure virtual once
+  // implemented by all subclasses of PeerConnectionInterface.
+  // Add a new MediaStreamTrack to be sent on this PeerConnection.
+  // |streams| indicates which stream labels the track should be associated
+  // with.
+  virtual rtc::scoped_refptr<RtpSenderInterface> AddTrack(
+      MediaStreamTrackInterface* track,
+      std::vector<MediaStreamInterface*> streams) {
+    return nullptr;
+  }
+
+  // Remove an RtpSender from this PeerConnection.
+  // Returns true on success.
+  virtual bool RemoveTrack(RtpSenderInterface* sender) {
+    return false;
+  }
+
+  // Returns pointer to the created DtmfSender on success.
+  // Otherwise returns NULL.
+  virtual rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
+      AudioTrackInterface* track) = 0;
+
+  // TODO(deadbeef): Make these pure virtual once all subclasses implement them.
+  // |kind| must be "audio" or "video".
+  // |stream_id| is used to populate the msid attribute; if empty, one will
+  // be generated automatically.
+  virtual rtc::scoped_refptr<RtpSenderInterface> CreateSender(
+      const std::string& kind,
+      const std::string& stream_id) {
+    return rtc::scoped_refptr<RtpSenderInterface>();
+  }
+
+  virtual std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
+      const {
+    return std::vector<rtc::scoped_refptr<RtpSenderInterface>>();
+  }
+
+  virtual std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
+      const {
+    return std::vector<rtc::scoped_refptr<RtpReceiverInterface>>();
+  }
+
+  virtual bool GetStats(StatsObserver* observer,
+                        MediaStreamTrackInterface* track,
+                        StatsOutputLevel level) = 0;
+
+  virtual rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+      const std::string& label,
+      const DataChannelInit* config) = 0;
+
+  virtual const SessionDescriptionInterface* local_description() const = 0;
+  virtual const SessionDescriptionInterface* remote_description() const = 0;
+
+  // Create a new offer.
+  // The CreateSessionDescriptionObserver callback will be called when done.
+  virtual void CreateOffer(CreateSessionDescriptionObserver* observer,
+                           const MediaConstraintsInterface* constraints) {}
+
+  // TODO(jiayl): remove the default impl and the old interface when chromium
+  // code is updated.
+  virtual void CreateOffer(CreateSessionDescriptionObserver* observer,
+                           const RTCOfferAnswerOptions& options) {}
+
+  // Create an answer to an offer.
+  // The CreateSessionDescriptionObserver callback will be called when done.
+  virtual void CreateAnswer(CreateSessionDescriptionObserver* observer,
+                            const MediaConstraintsInterface* constraints) = 0;
+  // Sets the local session description.
+  // JsepInterface takes the ownership of |desc| even if it fails.
+  // The |observer| callback will be called when done.
+  virtual void SetLocalDescription(SetSessionDescriptionObserver* observer,
+                                   SessionDescriptionInterface* desc) = 0;
+  // Sets the remote session description.
+  // JsepInterface takes the ownership of |desc| even if it fails.
+  // The |observer| callback will be called when done.
+  virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer,
+                                    SessionDescriptionInterface* desc) = 0;
+  // Restarts or updates the ICE Agent process of gathering local candidates
+  // and pinging remote candidates.
+  // TODO(deadbeef): Remove once Chrome is moved over to SetConfiguration.
+  virtual bool UpdateIce(const IceServers& configuration,
+                         const MediaConstraintsInterface* constraints) {
+    return false;
+  }
+  // Sets the PeerConnection's global configuration to |config|.
+  // Any changes to STUN/TURN servers or ICE candidate policy will affect the
+  // next gathering phase, and cause the next call to createOffer to generate
+  // new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies
+  // cannot be changed with this method.
+  // TODO(deadbeef): Make this pure virtual once all Chrome subclasses of
+  // PeerConnectionInterface implement it.
+  virtual bool SetConfiguration(
+      const PeerConnectionInterface::RTCConfiguration& config) {
+    return false;
+  }
+  // Provides a remote candidate to the ICE Agent.
+  // A copy of the |candidate| will be created and added to the remote
+  // description. So the caller of this method still has the ownership of the
+  // |candidate|.
+  // TODO(ronghuawu): Consider to change this so that the AddIceCandidate will
+  // take the ownership of the |candidate|.
+  virtual bool AddIceCandidate(const IceCandidateInterface* candidate) = 0;
+
+  virtual void RegisterUMAObserver(UMAObserver* observer) = 0;
+
+  // Returns the current SignalingState.
+  virtual SignalingState signaling_state() = 0;
+
+  // TODO(bemasc): Remove ice_state when callers are changed to
+  // IceConnection/GatheringState.
+  // Returns the current IceState.
+  virtual IceState ice_state() = 0;
+  virtual IceConnectionState ice_connection_state() = 0;
+  virtual IceGatheringState ice_gathering_state() = 0;
+
+  // Terminates all media and closes the transport.
+  virtual void Close() = 0;
+
+ protected:
+  // Dtor protected as objects shouldn't be deleted via this interface.
+  ~PeerConnectionInterface() {}
+};
+
+// PeerConnection callback interface. Application should implement these
+// methods.
+class PeerConnectionObserver {
+ public:
+  enum StateType {
+    kSignalingState,
+    kIceState,
+  };
+
+  // Triggered when the SignalingState changed.
+  virtual void OnSignalingChange(
+      PeerConnectionInterface::SignalingState new_state) = 0;
+
+  // Triggered when media is received on a new stream from remote peer.
+  virtual void OnAddStream(MediaStreamInterface* stream) = 0;
+
+  // Triggered when a remote peer close a stream.
+  virtual void OnRemoveStream(MediaStreamInterface* stream) = 0;
+
+  // Triggered when a remote peer open a data channel.
+  virtual void OnDataChannel(DataChannelInterface* data_channel) = 0;
+
+  // Triggered when renegotiation is needed, for example the ICE has restarted.
+  virtual void OnRenegotiationNeeded() = 0;
+
+  // Called any time the IceConnectionState changes
+  virtual void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) = 0;
+
+  // Called any time the IceGatheringState changes
+  virtual void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) = 0;
+
+  // New Ice candidate have been found.
+  virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0;
+
+  // Called when the ICE connection receiving status changes.
+  virtual void OnIceConnectionReceivingChange(bool receiving) {}
+
+ protected:
+  // Dtor protected as objects shouldn't be deleted via this interface.
+  ~PeerConnectionObserver() {}
+};
+
+// PeerConnectionFactoryInterface is the factory interface use for creating
+// PeerConnection, MediaStream and media tracks.
+// PeerConnectionFactoryInterface will create required libjingle threads,
+// socket and network manager factory classes for networking.
+// If an application decides to provide its own threads and network
+// implementation of these classes it should use the alternate
+// CreatePeerConnectionFactory method which accepts threads as input and use the
+// CreatePeerConnection version that takes a PortAllocator as an
+// argument.
+class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
+ public:
+  class Options {
+   public:
+    Options()
+        : disable_encryption(false),
+          disable_sctp_data_channels(false),
+          disable_network_monitor(false),
+          network_ignore_mask(rtc::kDefaultNetworkIgnoreMask),
+          ssl_max_version(rtc::SSL_PROTOCOL_DTLS_12) {}
+    bool disable_encryption;
+    bool disable_sctp_data_channels;
+    bool disable_network_monitor;
+
+    // Sets the network types to ignore. For instance, calling this with
+    // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and
+    // loopback interfaces.
+    int network_ignore_mask;
+
+    // Sets the maximum supported protocol version. The highest version
+    // supported by both ends will be used for the connection, i.e. if one
+    // party supports DTLS 1.0 and the other DTLS 1.2, DTLS 1.0 will be used.
+    rtc::SSLProtocolVersion ssl_max_version;
+  };
+
+  virtual void SetOptions(const Options& options) = 0;
+
+  virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+      const PeerConnectionInterface::RTCConfiguration& configuration,
+      const MediaConstraintsInterface* constraints,
+      rtc::scoped_ptr<cricket::PortAllocator> allocator,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+      PeerConnectionObserver* observer) = 0;
+
+  virtual rtc::scoped_refptr<MediaStreamInterface>
+      CreateLocalMediaStream(const std::string& label) = 0;
+
+  // Creates a AudioSourceInterface.
+  // |constraints| decides audio processing settings but can be NULL.
+  virtual rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
+      const MediaConstraintsInterface* constraints) = 0;
+
+  // Creates a VideoSourceInterface. The new source take ownership of
+  // |capturer|. |constraints| decides video resolution and frame rate but can
+  // be NULL.
+  virtual rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource(
+      cricket::VideoCapturer* capturer,
+      const MediaConstraintsInterface* constraints) = 0;
+
+  // Creates a new local VideoTrack. The same |source| can be used in several
+  // tracks.
+  virtual rtc::scoped_refptr<VideoTrackInterface>
+      CreateVideoTrack(const std::string& label,
+                       VideoSourceInterface* source) = 0;
+
+  // Creates an new AudioTrack. At the moment |source| can be NULL.
+  virtual rtc::scoped_refptr<AudioTrackInterface>
+      CreateAudioTrack(const std::string& label,
+                       AudioSourceInterface* source) = 0;
+
+  // Starts AEC dump using existing file. Takes ownership of |file| and passes
+  // it on to VoiceEngine (via other objects) immediately, which will take
+  // the ownerhip. If the operation fails, the file will be closed.
+  // A maximum file size in bytes can be specified. When the file size limit is
+  // reached, logging is stopped automatically. If max_size_bytes is set to a
+  // value <= 0, no limit will be used, and logging will continue until the
+  // StopAecDump function is called.
+  virtual bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) = 0;
+
+  // Stops logging the AEC dump.
+  virtual void StopAecDump() = 0;
+
+  // Starts RtcEventLog using existing file. Takes ownership of |file| and
+  // passes it on to VoiceEngine, which will take the ownership. If the
+  // operation fails the file will be closed. The logging will stop
+  // automatically after 10 minutes have passed, or when the StopRtcEventLog
+  // function is called.
+  // This function as well as the StopRtcEventLog don't really belong on this
+  // interface, this is a temporary solution until we move the logging object
+  // from inside voice engine to webrtc::Call, which will happen when the VoE
+  // restructuring effort is further along.
+  // TODO(ivoc): Move this into being:
+  //             PeerConnection => MediaController => webrtc::Call.
+  virtual bool StartRtcEventLog(rtc::PlatformFile file) = 0;
+
+  // Stops logging the RtcEventLog.
+  virtual void StopRtcEventLog() = 0;
+
+ protected:
+  // Dtor and ctor protected as objects shouldn't be created or deleted via
+  // this interface.
+  PeerConnectionFactoryInterface() {}
+  ~PeerConnectionFactoryInterface() {} // NOLINT
+};
+
+// Create a new instance of PeerConnectionFactoryInterface.
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory();
+
+// Create a new instance of PeerConnectionFactoryInterface.
+// Ownership of |factory|, |default_adm|, and optionally |encoder_factory| and
+// |decoder_factory| transferred to the returned factory.
+rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(
+    rtc::Thread* worker_thread,
+    rtc::Thread* signaling_thread,
+    AudioDeviceModule* default_adm,
+    cricket::WebRtcVideoEncoderFactory* encoder_factory,
+    cricket::WebRtcVideoDecoderFactory* decoder_factory);
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_PEERCONNECTIONINTERFACE_H_
diff --git a/webrtc/api/peerconnectioninterface_unittest.cc b/webrtc/api/peerconnectioninterface_unittest.cc
new file mode 100644
index 0000000..b93cd77
--- /dev/null
+++ b/webrtc/api/peerconnectioninterface_unittest.cc
@@ -0,0 +1,2515 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+#include <utility>
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "webrtc/api/mediastream.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/peerconnection.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/streamcollection.h"
+#ifdef WEBRTC_ANDROID
+#include "webrtc/api/test/androidtestinitializer.h"
+#endif
+#include "webrtc/api/test/fakeconstraints.h"
+#include "webrtc/api/test/fakedtlsidentitystore.h"
+#include "webrtc/api/test/mockpeerconnectionobservers.h"
+#include "webrtc/api/test/testsdpstrings.h"
+#include "webrtc/api/videosource.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/media/base/fakevideocapturer.h"
+#include "webrtc/media/sctp/sctpdataengine.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStreamLabel3[] = "local_stream_3";
+static const int kDefaultStunPort = 3478;
+static const char kStunAddressOnly[] = "stun:address";
+static const char kStunInvalidPort[] = "stun:address:-1";
+static const char kStunAddressPortAndMore1[] = "stun:address:port:more";
+static const char kStunAddressPortAndMore2[] = "stun:address:port more";
+static const char kTurnIceServerUri[] = "turn:user@turn.example.org";
+static const char kTurnUsername[] = "user";
+static const char kTurnPassword[] = "password";
+static const char kTurnHostname[] = "turn.example.org";
+static const uint32_t kTimeout = 10000U;
+
+static const char kStreams[][8] = {"stream1", "stream2"};
+static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
+static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
+
+static const char kRecvonly[] = "recvonly";
+static const char kSendrecv[] = "sendrecv";
+
+// Reference SDP with a MediaStream with label "stream1" and audio track with
+// id "audio_1" and a video track with id "video_1;
+static const char kSdpStringWithStream1[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=ssrc:1 cname:stream1\r\n"
+    "a=ssrc:1 mslabel:stream1\r\n"
+    "a=ssrc:1 label:audiotrack0\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:120 VP8/90000\r\n"
+    "a=ssrc:2 cname:stream1\r\n"
+    "a=ssrc:2 mslabel:stream1\r\n"
+    "a=ssrc:2 label:videotrack0\r\n";
+
+// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
+// MediaStreams have one audio track and one video track.
+// This uses MSID.
+static const char kSdpStringWithStream1And2[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "a=msid-semantic: WMS stream1 stream2\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=ssrc:1 cname:stream1\r\n"
+    "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+    "a=ssrc:3 cname:stream2\r\n"
+    "a=ssrc:3 msid:stream2 audiotrack1\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:120 VP8/0\r\n"
+    "a=ssrc:2 cname:stream1\r\n"
+    "a=ssrc:2 msid:stream1 videotrack0\r\n"
+    "a=ssrc:4 cname:stream2\r\n"
+    "a=ssrc:4 msid:stream2 videotrack1\r\n";
+
+// Reference SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringWithoutStreams[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams. Msid is supported.
+static const char kSdpStringWithMsidWithoutStreams[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "a=msid-semantic: WMS\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams and audio only.
+static const char kSdpStringWithoutStreamsAudioOnly[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n";
+
+// Reference SENDONLY SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringSendOnlyWithoutStreams[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=sendonly\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=sendonly\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringInit[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "a=msid-semantic: WMS\r\n";
+
+static const char kSdpStringAudio[] =
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n";
+
+static const char kSdpStringVideo[] =
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringMs1Audio0[] =
+    "a=ssrc:1 cname:stream1\r\n"
+    "a=ssrc:1 msid:stream1 audiotrack0\r\n";
+
+static const char kSdpStringMs1Video0[] =
+    "a=ssrc:2 cname:stream1\r\n"
+    "a=ssrc:2 msid:stream1 videotrack0\r\n";
+
+static const char kSdpStringMs1Audio1[] =
+    "a=ssrc:3 cname:stream1\r\n"
+    "a=ssrc:3 msid:stream1 audiotrack1\r\n";
+
+static const char kSdpStringMs1Video1[] =
+    "a=ssrc:4 cname:stream1\r\n"
+    "a=ssrc:4 msid:stream1 videotrack1\r\n";
+
+#define MAYBE_SKIP_TEST(feature)                    \
+  if (!(feature())) {                               \
+    LOG(LS_INFO) << "Feature disabled... skipping"; \
+    return;                                         \
+  }
+
+using rtc::scoped_ptr;
+using rtc::scoped_refptr;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrack;
+using webrtc::AudioTrackInterface;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInterface;
+using webrtc::FakeConstraints;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStream;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::MockCreateSessionDescriptionObserver;
+using webrtc::MockDataChannelObserver;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::MockStatsObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SdpParseError;
+using webrtc::SessionDescriptionInterface;
+using webrtc::StreamCollection;
+using webrtc::StreamCollectionInterface;
+using webrtc::VideoSourceInterface;
+using webrtc::VideoTrack;
+using webrtc::VideoTrackInterface;
+
+typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+
+namespace {
+
+// Gets the first ssrc of given content type from the ContentInfo.
+bool GetFirstSsrc(const cricket::ContentInfo* content_info, int* ssrc) {
+  if (!content_info || !ssrc) {
+    return false;
+  }
+  const cricket::MediaContentDescription* media_desc =
+      static_cast<const cricket::MediaContentDescription*>(
+          content_info->description);
+  if (!media_desc || media_desc->streams().empty()) {
+    return false;
+  }
+  *ssrc = media_desc->streams().begin()->first_ssrc();
+  return true;
+}
+
+void SetSsrcToZero(std::string* sdp) {
+  const char kSdpSsrcAtribute[] = "a=ssrc:";
+  const char kSdpSsrcAtributeZero[] = "a=ssrc:0";
+  size_t ssrc_pos = 0;
+  while ((ssrc_pos = sdp->find(kSdpSsrcAtribute, ssrc_pos)) !=
+      std::string::npos) {
+    size_t end_ssrc = sdp->find(" ", ssrc_pos);
+    sdp->replace(ssrc_pos, end_ssrc - ssrc_pos, kSdpSsrcAtributeZero);
+    ssrc_pos = end_ssrc;
+  }
+}
+
+// Check if |streams| contains the specified track.
+bool ContainsTrack(const std::vector<cricket::StreamParams>& streams,
+                   const std::string& stream_label,
+                   const std::string& track_id) {
+  for (const cricket::StreamParams& params : streams) {
+    if (params.sync_label == stream_label && params.id == track_id) {
+      return true;
+    }
+  }
+  return false;
+}
+
+// Check if |senders| contains the specified sender, by id.
+bool ContainsSender(
+    const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+    const std::string& id) {
+  for (const auto& sender : senders) {
+    if (sender->id() == id) {
+      return true;
+    }
+  }
+  return false;
+}
+
+// Create a collection of streams.
+// CreateStreamCollection(1) creates a collection that
+// correspond to kSdpStringWithStream1.
+// CreateStreamCollection(2) correspond to kSdpStringWithStream1And2.
+rtc::scoped_refptr<StreamCollection> CreateStreamCollection(
+    int number_of_streams) {
+  rtc::scoped_refptr<StreamCollection> local_collection(
+      StreamCollection::Create());
+
+  for (int i = 0; i < number_of_streams; ++i) {
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+        webrtc::MediaStream::Create(kStreams[i]));
+
+    // Add a local audio track.
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+        webrtc::AudioTrack::Create(kAudioTracks[i], nullptr));
+    stream->AddTrack(audio_track);
+
+    // Add a local video track.
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+        webrtc::VideoTrack::Create(kVideoTracks[i], nullptr));
+    stream->AddTrack(video_track);
+
+    local_collection->AddStream(stream);
+  }
+  return local_collection;
+}
+
+// Check equality of StreamCollections.
+bool CompareStreamCollections(StreamCollectionInterface* s1,
+                              StreamCollectionInterface* s2) {
+  if (s1 == nullptr || s2 == nullptr || s1->count() != s2->count()) {
+    return false;
+  }
+
+  for (size_t i = 0; i != s1->count(); ++i) {
+    if (s1->at(i)->label() != s2->at(i)->label()) {
+      return false;
+    }
+    webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
+    webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
+    webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
+    webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
+
+    if (audio_tracks1.size() != audio_tracks2.size()) {
+      return false;
+    }
+    for (size_t j = 0; j != audio_tracks1.size(); ++j) {
+      if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) {
+        return false;
+      }
+    }
+    if (video_tracks1.size() != video_tracks2.size()) {
+      return false;
+    }
+    for (size_t j = 0; j != video_tracks1.size(); ++j) {
+      if (video_tracks1[j]->id() != video_tracks2[j]->id()) {
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
+class MockPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+  MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
+  ~MockPeerConnectionObserver() {
+  }
+  void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
+    pc_ = pc;
+    if (pc) {
+      state_ = pc_->signaling_state();
+    }
+  }
+  virtual void OnSignalingChange(
+      PeerConnectionInterface::SignalingState new_state) {
+    EXPECT_EQ(pc_->signaling_state(), new_state);
+    state_ = new_state;
+  }
+  // TODO(bemasc): Remove this once callers transition to OnIceGatheringChange.
+  virtual void OnStateChange(StateType state_changed) {
+    if (pc_.get() == NULL)
+      return;
+    switch (state_changed) {
+      case kSignalingState:
+        // OnSignalingChange and OnStateChange(kSignalingState) should always
+        // be called approximately simultaneously.  To ease testing, we require
+        // that they always be called in that order.  This check verifies
+        // that OnSignalingChange has just been called.
+        EXPECT_EQ(pc_->signaling_state(), state_);
+        break;
+      case kIceState:
+        ADD_FAILURE();
+        break;
+      default:
+        ADD_FAILURE();
+        break;
+    }
+  }
+
+  MediaStreamInterface* RemoteStream(const std::string& label) {
+    return remote_streams_->find(label);
+  }
+  StreamCollectionInterface* remote_streams() const { return remote_streams_; }
+  void OnAddStream(MediaStreamInterface* stream) override {
+    last_added_stream_ = stream;
+    remote_streams_->AddStream(stream);
+  }
+  void OnRemoveStream(MediaStreamInterface* stream) override {
+    last_removed_stream_ = stream;
+    remote_streams_->RemoveStream(stream);
+  }
+  void OnRenegotiationNeeded() override { renegotiation_needed_ = true; }
+  void OnDataChannel(DataChannelInterface* data_channel) override {
+    last_datachannel_ = data_channel;
+  }
+
+  void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) override {
+    EXPECT_EQ(pc_->ice_connection_state(), new_state);
+  }
+  void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) override {
+    EXPECT_EQ(pc_->ice_gathering_state(), new_state);
+    ice_complete_ = new_state == PeerConnectionInterface::kIceGatheringComplete;
+  }
+  void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+    EXPECT_NE(PeerConnectionInterface::kIceGatheringNew,
+              pc_->ice_gathering_state());
+
+    std::string sdp;
+    EXPECT_TRUE(candidate->ToString(&sdp));
+    EXPECT_LT(0u, sdp.size());
+    last_candidate_.reset(webrtc::CreateIceCandidate(candidate->sdp_mid(),
+        candidate->sdp_mline_index(), sdp, NULL));
+    EXPECT_TRUE(last_candidate_.get() != NULL);
+  }
+
+  // Returns the label of the last added stream.
+  // Empty string if no stream have been added.
+  std::string GetLastAddedStreamLabel() {
+    if (last_added_stream_.get())
+      return last_added_stream_->label();
+    return "";
+  }
+  std::string GetLastRemovedStreamLabel() {
+    if (last_removed_stream_.get())
+      return last_removed_stream_->label();
+    return "";
+  }
+
+  scoped_refptr<PeerConnectionInterface> pc_;
+  PeerConnectionInterface::SignalingState state_;
+  scoped_ptr<IceCandidateInterface> last_candidate_;
+  scoped_refptr<DataChannelInterface> last_datachannel_;
+  rtc::scoped_refptr<StreamCollection> remote_streams_;
+  bool renegotiation_needed_ = false;
+  bool ice_complete_ = false;
+
+ private:
+  scoped_refptr<MediaStreamInterface> last_added_stream_;
+  scoped_refptr<MediaStreamInterface> last_removed_stream_;
+};
+
+}  // namespace
+
+class PeerConnectionInterfaceTest : public testing::Test {
+ protected:
+  PeerConnectionInterfaceTest() {
+#ifdef WEBRTC_ANDROID
+    webrtc::InitializeAndroidObjects();
+#endif
+  }
+
+  virtual void SetUp() {
+    pc_factory_ = webrtc::CreatePeerConnectionFactory(
+        rtc::Thread::Current(), rtc::Thread::Current(), NULL, NULL,
+        NULL);
+    ASSERT_TRUE(pc_factory_.get() != NULL);
+  }
+
+  void CreatePeerConnection() {
+    CreatePeerConnection("", "", NULL);
+  }
+
+  void CreatePeerConnection(webrtc::MediaConstraintsInterface* constraints) {
+    CreatePeerConnection("", "", constraints);
+  }
+
+  void CreatePeerConnection(const std::string& uri,
+                            const std::string& password,
+                            webrtc::MediaConstraintsInterface* constraints) {
+    PeerConnectionInterface::RTCConfiguration config;
+    PeerConnectionInterface::IceServer server;
+    if (!uri.empty()) {
+      server.uri = uri;
+      server.password = password;
+      config.servers.push_back(server);
+    }
+
+    rtc::scoped_ptr<cricket::FakePortAllocator> port_allocator(
+        new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+    port_allocator_ = port_allocator.get();
+
+    // DTLS does not work in a loopback call, so is disabled for most of the
+    // tests in this file. We only create a FakeIdentityService if the test
+    // explicitly sets the constraint.
+    FakeConstraints default_constraints;
+    if (!constraints) {
+      constraints = &default_constraints;
+
+      default_constraints.AddMandatory(
+          webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, false);
+    }
+
+    scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store;
+    bool dtls;
+    if (FindConstraint(constraints,
+                       webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                       &dtls,
+                       nullptr) && dtls) {
+      dtls_identity_store.reset(new FakeDtlsIdentityStore());
+    }
+    pc_ = pc_factory_->CreatePeerConnection(
+        config, constraints, std::move(port_allocator),
+        std::move(dtls_identity_store), &observer_);
+    ASSERT_TRUE(pc_.get() != NULL);
+    observer_.SetPeerConnectionInterface(pc_.get());
+    EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+  }
+
+  void CreatePeerConnectionExpectFail(const std::string& uri) {
+    PeerConnectionInterface::RTCConfiguration config;
+    PeerConnectionInterface::IceServer server;
+    server.uri = uri;
+    config.servers.push_back(server);
+
+    scoped_refptr<PeerConnectionInterface> pc;
+    pc = pc_factory_->CreatePeerConnection(config, nullptr, nullptr, nullptr,
+                                           &observer_);
+    EXPECT_EQ(nullptr, pc);
+  }
+
+  void CreatePeerConnectionWithDifferentConfigurations() {
+    CreatePeerConnection(kStunAddressOnly, "", NULL);
+    EXPECT_EQ(1u, port_allocator_->stun_servers().size());
+    EXPECT_EQ(0u, port_allocator_->turn_servers().size());
+    EXPECT_EQ("address", port_allocator_->stun_servers().begin()->hostname());
+    EXPECT_EQ(kDefaultStunPort,
+              port_allocator_->stun_servers().begin()->port());
+
+    CreatePeerConnectionExpectFail(kStunInvalidPort);
+    CreatePeerConnectionExpectFail(kStunAddressPortAndMore1);
+    CreatePeerConnectionExpectFail(kStunAddressPortAndMore2);
+
+    CreatePeerConnection(kTurnIceServerUri, kTurnPassword, NULL);
+    EXPECT_EQ(0u, port_allocator_->stun_servers().size());
+    EXPECT_EQ(1u, port_allocator_->turn_servers().size());
+    EXPECT_EQ(kTurnUsername,
+              port_allocator_->turn_servers()[0].credentials.username);
+    EXPECT_EQ(kTurnPassword,
+              port_allocator_->turn_servers()[0].credentials.password);
+    EXPECT_EQ(kTurnHostname,
+              port_allocator_->turn_servers()[0].ports[0].address.hostname());
+  }
+
+  void ReleasePeerConnection() {
+    pc_ = NULL;
+    observer_.SetPeerConnectionInterface(NULL);
+  }
+
+  void AddVideoStream(const std::string& label) {
+    // Create a local stream.
+    scoped_refptr<MediaStreamInterface> stream(
+        pc_factory_->CreateLocalMediaStream(label));
+    scoped_refptr<VideoSourceInterface> video_source(
+        pc_factory_->CreateVideoSource(new cricket::FakeVideoCapturer(), NULL));
+    scoped_refptr<VideoTrackInterface> video_track(
+        pc_factory_->CreateVideoTrack(label + "v0", video_source));
+    stream->AddTrack(video_track.get());
+    EXPECT_TRUE(pc_->AddStream(stream));
+    EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+    observer_.renegotiation_needed_ = false;
+  }
+
+  void AddVoiceStream(const std::string& label) {
+    // Create a local stream.
+    scoped_refptr<MediaStreamInterface> stream(
+        pc_factory_->CreateLocalMediaStream(label));
+    scoped_refptr<AudioTrackInterface> audio_track(
+        pc_factory_->CreateAudioTrack(label + "a0", NULL));
+    stream->AddTrack(audio_track.get());
+    EXPECT_TRUE(pc_->AddStream(stream));
+    EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+    observer_.renegotiation_needed_ = false;
+  }
+
+  void AddAudioVideoStream(const std::string& stream_label,
+                           const std::string& audio_track_label,
+                           const std::string& video_track_label) {
+    // Create a local stream.
+    scoped_refptr<MediaStreamInterface> stream(
+        pc_factory_->CreateLocalMediaStream(stream_label));
+    scoped_refptr<AudioTrackInterface> audio_track(
+        pc_factory_->CreateAudioTrack(
+            audio_track_label, static_cast<AudioSourceInterface*>(NULL)));
+    stream->AddTrack(audio_track.get());
+    scoped_refptr<VideoTrackInterface> video_track(
+        pc_factory_->CreateVideoTrack(video_track_label, NULL));
+    stream->AddTrack(video_track.get());
+    EXPECT_TRUE(pc_->AddStream(stream));
+    EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout);
+    observer_.renegotiation_needed_ = false;
+  }
+
+  bool DoCreateOfferAnswer(SessionDescriptionInterface** desc,
+                           bool offer,
+                           MediaConstraintsInterface* constraints) {
+    rtc::scoped_refptr<MockCreateSessionDescriptionObserver>
+        observer(new rtc::RefCountedObject<
+            MockCreateSessionDescriptionObserver>());
+    if (offer) {
+      pc_->CreateOffer(observer, constraints);
+    } else {
+      pc_->CreateAnswer(observer, constraints);
+    }
+    EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+    *desc = observer->release_desc();
+    return observer->result();
+  }
+
+  bool DoCreateOffer(SessionDescriptionInterface** desc,
+                     MediaConstraintsInterface* constraints) {
+    return DoCreateOfferAnswer(desc, true, constraints);
+  }
+
+  bool DoCreateAnswer(SessionDescriptionInterface** desc,
+                      MediaConstraintsInterface* constraints) {
+    return DoCreateOfferAnswer(desc, false, constraints);
+  }
+
+  bool DoSetSessionDescription(SessionDescriptionInterface* desc, bool local) {
+    rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+        observer(new rtc::RefCountedObject<
+            MockSetSessionDescriptionObserver>());
+    if (local) {
+      pc_->SetLocalDescription(observer, desc);
+    } else {
+      pc_->SetRemoteDescription(observer, desc);
+    }
+    EXPECT_EQ_WAIT(true, observer->called(), kTimeout);
+    return observer->result();
+  }
+
+  bool DoSetLocalDescription(SessionDescriptionInterface* desc) {
+    return DoSetSessionDescription(desc, true);
+  }
+
+  bool DoSetRemoteDescription(SessionDescriptionInterface* desc) {
+    return DoSetSessionDescription(desc, false);
+  }
+
+  // Calls PeerConnection::GetStats and check the return value.
+  // It does not verify the values in the StatReports since a RTCP packet might
+  // be required.
+  bool DoGetStats(MediaStreamTrackInterface* track) {
+    rtc::scoped_refptr<MockStatsObserver> observer(
+        new rtc::RefCountedObject<MockStatsObserver>());
+    if (!pc_->GetStats(
+        observer, track, PeerConnectionInterface::kStatsOutputLevelStandard))
+      return false;
+    EXPECT_TRUE_WAIT(observer->called(), kTimeout);
+    return observer->called();
+  }
+
+  void InitiateCall() {
+    CreatePeerConnection();
+    // Create a local stream with audio&video tracks.
+    AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+    CreateOfferReceiveAnswer();
+  }
+
+  // Verify that RTP Header extensions has been negotiated for audio and video.
+  void VerifyRemoteRtpHeaderExtensions() {
+    const cricket::MediaContentDescription* desc =
+        cricket::GetFirstAudioContentDescription(
+            pc_->remote_description()->description());
+    ASSERT_TRUE(desc != NULL);
+    EXPECT_GT(desc->rtp_header_extensions().size(), 0u);
+
+    desc = cricket::GetFirstVideoContentDescription(
+        pc_->remote_description()->description());
+    ASSERT_TRUE(desc != NULL);
+    EXPECT_GT(desc->rtp_header_extensions().size(), 0u);
+  }
+
+  void CreateOfferAsRemoteDescription() {
+    rtc::scoped_ptr<SessionDescriptionInterface> offer;
+    ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+    std::string sdp;
+    EXPECT_TRUE(offer->ToString(&sdp));
+    SessionDescriptionInterface* remote_offer =
+        webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                         sdp, NULL);
+    EXPECT_TRUE(DoSetRemoteDescription(remote_offer));
+    EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+  }
+
+  void CreateAndSetRemoteOffer(const std::string& sdp) {
+    SessionDescriptionInterface* remote_offer =
+        webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                         sdp, nullptr);
+    EXPECT_TRUE(DoSetRemoteDescription(remote_offer));
+    EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+  }
+
+  void CreateAnswerAsLocalDescription() {
+    scoped_ptr<SessionDescriptionInterface> answer;
+    ASSERT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+
+    // TODO(perkj): Currently SetLocalDescription fails if any parameters in an
+    // audio codec change, even if the parameter has nothing to do with
+    // receiving. Not all parameters are serialized to SDP.
+    // Since CreatePrAnswerAsLocalDescription serialize/deserialize
+    // the SessionDescription, it is necessary to do that here to in order to
+    // get ReceiveOfferCreatePrAnswerAndAnswer and RenegotiateAudioOnly to pass.
+    // https://code.google.com/p/webrtc/issues/detail?id=1356
+    std::string sdp;
+    EXPECT_TRUE(answer->ToString(&sdp));
+    SessionDescriptionInterface* new_answer =
+        webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+                                         sdp, NULL);
+    EXPECT_TRUE(DoSetLocalDescription(new_answer));
+    EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+  }
+
+  void CreatePrAnswerAsLocalDescription() {
+    scoped_ptr<SessionDescriptionInterface> answer;
+    ASSERT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+
+    std::string sdp;
+    EXPECT_TRUE(answer->ToString(&sdp));
+    SessionDescriptionInterface* pr_answer =
+        webrtc::CreateSessionDescription(SessionDescriptionInterface::kPrAnswer,
+                                         sdp, NULL);
+    EXPECT_TRUE(DoSetLocalDescription(pr_answer));
+    EXPECT_EQ(PeerConnectionInterface::kHaveLocalPrAnswer, observer_.state_);
+  }
+
+  void CreateOfferReceiveAnswer() {
+    CreateOfferAsLocalDescription();
+    std::string sdp;
+    EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+    CreateAnswerAsRemoteDescription(sdp);
+  }
+
+  void CreateOfferAsLocalDescription() {
+    rtc::scoped_ptr<SessionDescriptionInterface> offer;
+    ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+    // TODO(perkj): Currently SetLocalDescription fails if any parameters in an
+    // audio codec change, even if the parameter has nothing to do with
+    // receiving. Not all parameters are serialized to SDP.
+    // Since CreatePrAnswerAsLocalDescription serialize/deserialize
+    // the SessionDescription, it is necessary to do that here to in order to
+    // get ReceiveOfferCreatePrAnswerAndAnswer and RenegotiateAudioOnly to pass.
+    // https://code.google.com/p/webrtc/issues/detail?id=1356
+    std::string sdp;
+    EXPECT_TRUE(offer->ToString(&sdp));
+    SessionDescriptionInterface* new_offer =
+            webrtc::CreateSessionDescription(
+                SessionDescriptionInterface::kOffer,
+                sdp, NULL);
+
+    EXPECT_TRUE(DoSetLocalDescription(new_offer));
+    EXPECT_EQ(PeerConnectionInterface::kHaveLocalOffer, observer_.state_);
+    // Wait for the ice_complete message, so that SDP will have candidates.
+    EXPECT_TRUE_WAIT(observer_.ice_complete_, kTimeout);
+  }
+
+  void CreateAnswerAsRemoteDescription(const std::string& sdp) {
+    webrtc::JsepSessionDescription* answer = new webrtc::JsepSessionDescription(
+        SessionDescriptionInterface::kAnswer);
+    EXPECT_TRUE(answer->Initialize(sdp, NULL));
+    EXPECT_TRUE(DoSetRemoteDescription(answer));
+    EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+  }
+
+  void CreatePrAnswerAndAnswerAsRemoteDescription(const std::string& sdp) {
+    webrtc::JsepSessionDescription* pr_answer =
+        new webrtc::JsepSessionDescription(
+            SessionDescriptionInterface::kPrAnswer);
+    EXPECT_TRUE(pr_answer->Initialize(sdp, NULL));
+    EXPECT_TRUE(DoSetRemoteDescription(pr_answer));
+    EXPECT_EQ(PeerConnectionInterface::kHaveRemotePrAnswer, observer_.state_);
+    webrtc::JsepSessionDescription* answer =
+        new webrtc::JsepSessionDescription(
+            SessionDescriptionInterface::kAnswer);
+    EXPECT_TRUE(answer->Initialize(sdp, NULL));
+    EXPECT_TRUE(DoSetRemoteDescription(answer));
+    EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
+  }
+
+  // Help function used for waiting until a the last signaled remote stream has
+  // the same label as |stream_label|. In a few of the tests in this file we
+  // answer with the same session description as we offer and thus we can
+  // check if OnAddStream have been called with the same stream as we offer to
+  // send.
+  void WaitAndVerifyOnAddStream(const std::string& stream_label) {
+    EXPECT_EQ_WAIT(stream_label, observer_.GetLastAddedStreamLabel(), kTimeout);
+  }
+
+  // Creates an offer and applies it as a local session description.
+  // Creates an answer with the same SDP an the offer but removes all lines
+  // that start with a:ssrc"
+  void CreateOfferReceiveAnswerWithoutSsrc() {
+    CreateOfferAsLocalDescription();
+    std::string sdp;
+    EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+    SetSsrcToZero(&sdp);
+    CreateAnswerAsRemoteDescription(sdp);
+  }
+
+  // This function creates a MediaStream with label kStreams[0] and
+  // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the
+  // corresponding SessionDescriptionInterface. The SessionDescriptionInterface
+  // is returned in |desc| and the MediaStream is stored in
+  // |reference_collection_|
+  void CreateSessionDescriptionAndReference(
+      size_t number_of_audio_tracks,
+      size_t number_of_video_tracks,
+      SessionDescriptionInterface** desc) {
+    ASSERT_TRUE(desc != nullptr);
+    ASSERT_LE(number_of_audio_tracks, 2u);
+    ASSERT_LE(number_of_video_tracks, 2u);
+
+    reference_collection_ = StreamCollection::Create();
+    std::string sdp_ms1 = std::string(kSdpStringInit);
+
+    std::string mediastream_label = kStreams[0];
+
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+        webrtc::MediaStream::Create(mediastream_label));
+    reference_collection_->AddStream(stream);
+
+    if (number_of_audio_tracks > 0) {
+      sdp_ms1 += std::string(kSdpStringAudio);
+      sdp_ms1 += std::string(kSdpStringMs1Audio0);
+      AddAudioTrack(kAudioTracks[0], stream);
+    }
+    if (number_of_audio_tracks > 1) {
+      sdp_ms1 += kSdpStringMs1Audio1;
+      AddAudioTrack(kAudioTracks[1], stream);
+    }
+
+    if (number_of_video_tracks > 0) {
+      sdp_ms1 += std::string(kSdpStringVideo);
+      sdp_ms1 += std::string(kSdpStringMs1Video0);
+      AddVideoTrack(kVideoTracks[0], stream);
+    }
+    if (number_of_video_tracks > 1) {
+      sdp_ms1 += kSdpStringMs1Video1;
+      AddVideoTrack(kVideoTracks[1], stream);
+    }
+
+    *desc = webrtc::CreateSessionDescription(
+        SessionDescriptionInterface::kOffer, sdp_ms1, nullptr);
+  }
+
+  void AddAudioTrack(const std::string& track_id,
+                     MediaStreamInterface* stream) {
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+        webrtc::AudioTrack::Create(track_id, nullptr));
+    ASSERT_TRUE(stream->AddTrack(audio_track));
+  }
+
+  void AddVideoTrack(const std::string& track_id,
+                     MediaStreamInterface* stream) {
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+        webrtc::VideoTrack::Create(track_id, nullptr));
+    ASSERT_TRUE(stream->AddTrack(video_track));
+  }
+
+  cricket::FakePortAllocator* port_allocator_ = nullptr;
+  scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
+  scoped_refptr<PeerConnectionInterface> pc_;
+  MockPeerConnectionObserver observer_;
+  rtc::scoped_refptr<StreamCollection> reference_collection_;
+};
+
+TEST_F(PeerConnectionInterfaceTest,
+       CreatePeerConnectionWithDifferentConfigurations) {
+  CreatePeerConnectionWithDifferentConfigurations();
+}
+
+TEST_F(PeerConnectionInterfaceTest, AddStreams) {
+  CreatePeerConnection();
+  AddVideoStream(kStreamLabel1);
+  AddVoiceStream(kStreamLabel2);
+  ASSERT_EQ(2u, pc_->local_streams()->count());
+
+  // Test we can add multiple local streams to one peerconnection.
+  scoped_refptr<MediaStreamInterface> stream(
+      pc_factory_->CreateLocalMediaStream(kStreamLabel3));
+  scoped_refptr<AudioTrackInterface> audio_track(
+      pc_factory_->CreateAudioTrack(
+          kStreamLabel3, static_cast<AudioSourceInterface*>(NULL)));
+  stream->AddTrack(audio_track.get());
+  EXPECT_TRUE(pc_->AddStream(stream));
+  EXPECT_EQ(3u, pc_->local_streams()->count());
+
+  // Remove the third stream.
+  pc_->RemoveStream(pc_->local_streams()->at(2));
+  EXPECT_EQ(2u, pc_->local_streams()->count());
+
+  // Remove the second stream.
+  pc_->RemoveStream(pc_->local_streams()->at(1));
+  EXPECT_EQ(1u, pc_->local_streams()->count());
+
+  // Remove the first stream.
+  pc_->RemoveStream(pc_->local_streams()->at(0));
+  EXPECT_EQ(0u, pc_->local_streams()->count());
+}
+
+// Test that the created offer includes streams we added.
+TEST_F(PeerConnectionInterfaceTest, AddedStreamsPresentInOffer) {
+  CreatePeerConnection();
+  AddAudioVideoStream(kStreamLabel1, "audio_track", "video_track");
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.accept(), nullptr));
+
+  const cricket::ContentInfo* audio_content =
+      cricket::GetFirstAudioContent(offer->description());
+  const cricket::AudioContentDescription* audio_desc =
+      static_cast<const cricket::AudioContentDescription*>(
+          audio_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+
+  const cricket::ContentInfo* video_content =
+      cricket::GetFirstVideoContent(offer->description());
+  const cricket::VideoContentDescription* video_desc =
+      static_cast<const cricket::VideoContentDescription*>(
+          video_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+
+  // Add another stream and ensure the offer includes both the old and new
+  // streams.
+  AddAudioVideoStream(kStreamLabel2, "audio_track2", "video_track2");
+  ASSERT_TRUE(DoCreateOffer(offer.accept(), nullptr));
+
+  audio_content = cricket::GetFirstAudioContent(offer->description());
+  audio_desc = static_cast<const cricket::AudioContentDescription*>(
+      audio_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel2, "audio_track2"));
+
+  video_content = cricket::GetFirstVideoContent(offer->description());
+  video_desc = static_cast<const cricket::VideoContentDescription*>(
+      video_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel2, "video_track2"));
+}
+
+TEST_F(PeerConnectionInterfaceTest, RemoveStream) {
+  CreatePeerConnection();
+  AddVideoStream(kStreamLabel1);
+  ASSERT_EQ(1u, pc_->local_streams()->count());
+  pc_->RemoveStream(pc_->local_streams()->at(0));
+  EXPECT_EQ(0u, pc_->local_streams()->count());
+}
+
+// Test for AddTrack and RemoveTrack methods.
+// Tests that the created offer includes tracks we added,
+// and that the RtpSenders are created correctly.
+// Also tests that RemoveTrack removes the tracks from subsequent offers.
+TEST_F(PeerConnectionInterfaceTest, AddTrackRemoveTrack) {
+  CreatePeerConnection();
+  // Create a dummy stream, so tracks share a stream label.
+  scoped_refptr<MediaStreamInterface> stream(
+      pc_factory_->CreateLocalMediaStream(kStreamLabel1));
+  std::vector<MediaStreamInterface*> stream_list;
+  stream_list.push_back(stream.get());
+  scoped_refptr<AudioTrackInterface> audio_track(
+      pc_factory_->CreateAudioTrack("audio_track", nullptr));
+  scoped_refptr<VideoTrackInterface> video_track(
+      pc_factory_->CreateVideoTrack("video_track", nullptr));
+  auto audio_sender = pc_->AddTrack(audio_track, stream_list);
+  auto video_sender = pc_->AddTrack(video_track, stream_list);
+  EXPECT_EQ(kStreamLabel1, audio_sender->stream_id());
+  EXPECT_EQ("audio_track", audio_sender->id());
+  EXPECT_EQ(audio_track, audio_sender->track());
+  EXPECT_EQ(kStreamLabel1, video_sender->stream_id());
+  EXPECT_EQ("video_track", video_sender->id());
+  EXPECT_EQ(video_track, video_sender->track());
+
+  // Now create an offer and check for the senders.
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.accept(), nullptr));
+
+  const cricket::ContentInfo* audio_content =
+      cricket::GetFirstAudioContent(offer->description());
+  const cricket::AudioContentDescription* audio_desc =
+      static_cast<const cricket::AudioContentDescription*>(
+          audio_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+
+  const cricket::ContentInfo* video_content =
+      cricket::GetFirstVideoContent(offer->description());
+  const cricket::VideoContentDescription* video_desc =
+      static_cast<const cricket::VideoContentDescription*>(
+          video_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+
+  EXPECT_TRUE(DoSetLocalDescription(offer.release()));
+
+  // Now try removing the tracks.
+  EXPECT_TRUE(pc_->RemoveTrack(audio_sender));
+  EXPECT_TRUE(pc_->RemoveTrack(video_sender));
+
+  // Create a new offer and ensure it doesn't contain the removed senders.
+  ASSERT_TRUE(DoCreateOffer(offer.accept(), nullptr));
+
+  audio_content = cricket::GetFirstAudioContent(offer->description());
+  audio_desc = static_cast<const cricket::AudioContentDescription*>(
+      audio_content->description);
+  EXPECT_FALSE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+
+  video_content = cricket::GetFirstVideoContent(offer->description());
+  video_desc = static_cast<const cricket::VideoContentDescription*>(
+      video_content->description);
+  EXPECT_FALSE(
+      ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+
+  EXPECT_TRUE(DoSetLocalDescription(offer.release()));
+
+  // Calling RemoveTrack on a sender no longer attached to a PeerConnection
+  // should return false.
+  EXPECT_FALSE(pc_->RemoveTrack(audio_sender));
+  EXPECT_FALSE(pc_->RemoveTrack(video_sender));
+}
+
+// Test creating senders without a stream specified,
+// expecting a random stream ID to be generated.
+TEST_F(PeerConnectionInterfaceTest, AddTrackWithoutStream) {
+  CreatePeerConnection();
+  // Create a dummy stream, so tracks share a stream label.
+  scoped_refptr<AudioTrackInterface> audio_track(
+      pc_factory_->CreateAudioTrack("audio_track", nullptr));
+  scoped_refptr<VideoTrackInterface> video_track(
+      pc_factory_->CreateVideoTrack("video_track", nullptr));
+  auto audio_sender =
+      pc_->AddTrack(audio_track, std::vector<MediaStreamInterface*>());
+  auto video_sender =
+      pc_->AddTrack(video_track, std::vector<MediaStreamInterface*>());
+  EXPECT_EQ("audio_track", audio_sender->id());
+  EXPECT_EQ(audio_track, audio_sender->track());
+  EXPECT_EQ("video_track", video_sender->id());
+  EXPECT_EQ(video_track, video_sender->track());
+  // If the ID is truly a random GUID, it should be infinitely unlikely they
+  // will be the same.
+  EXPECT_NE(video_sender->stream_id(), audio_sender->stream_id());
+}
+
+TEST_F(PeerConnectionInterfaceTest, CreateOfferReceiveAnswer) {
+  InitiateCall();
+  WaitAndVerifyOnAddStream(kStreamLabel1);
+  VerifyRemoteRtpHeaderExtensions();
+}
+
+TEST_F(PeerConnectionInterfaceTest, CreateOfferReceivePrAnswerAndAnswer) {
+  CreatePeerConnection();
+  AddVideoStream(kStreamLabel1);
+  CreateOfferAsLocalDescription();
+  std::string offer;
+  EXPECT_TRUE(pc_->local_description()->ToString(&offer));
+  CreatePrAnswerAndAnswerAsRemoteDescription(offer);
+  WaitAndVerifyOnAddStream(kStreamLabel1);
+}
+
+TEST_F(PeerConnectionInterfaceTest, ReceiveOfferCreateAnswer) {
+  CreatePeerConnection();
+  AddVideoStream(kStreamLabel1);
+
+  CreateOfferAsRemoteDescription();
+  CreateAnswerAsLocalDescription();
+
+  WaitAndVerifyOnAddStream(kStreamLabel1);
+}
+
+TEST_F(PeerConnectionInterfaceTest, ReceiveOfferCreatePrAnswerAndAnswer) {
+  CreatePeerConnection();
+  AddVideoStream(kStreamLabel1);
+
+  CreateOfferAsRemoteDescription();
+  CreatePrAnswerAsLocalDescription();
+  CreateAnswerAsLocalDescription();
+
+  WaitAndVerifyOnAddStream(kStreamLabel1);
+}
+
+TEST_F(PeerConnectionInterfaceTest, Renegotiate) {
+  InitiateCall();
+  ASSERT_EQ(1u, pc_->remote_streams()->count());
+  pc_->RemoveStream(pc_->local_streams()->at(0));
+  CreateOfferReceiveAnswer();
+  EXPECT_EQ(0u, pc_->remote_streams()->count());
+  AddVideoStream(kStreamLabel1);
+  CreateOfferReceiveAnswer();
+}
+
+// Tests that after negotiating an audio only call, the respondent can perform a
+// renegotiation that removes the audio stream.
+TEST_F(PeerConnectionInterfaceTest, RenegotiateAudioOnly) {
+  CreatePeerConnection();
+  AddVoiceStream(kStreamLabel1);
+  CreateOfferAsRemoteDescription();
+  CreateAnswerAsLocalDescription();
+
+  ASSERT_EQ(1u, pc_->remote_streams()->count());
+  pc_->RemoveStream(pc_->local_streams()->at(0));
+  CreateOfferReceiveAnswer();
+  EXPECT_EQ(0u, pc_->remote_streams()->count());
+}
+
+// Test that candidates are generated and that we can parse our own candidates.
+TEST_F(PeerConnectionInterfaceTest, IceCandidates) {
+  CreatePeerConnection();
+
+  EXPECT_FALSE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
+  // SetRemoteDescription takes ownership of offer.
+  SessionDescriptionInterface* offer = NULL;
+  AddVideoStream(kStreamLabel1);
+  EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+  EXPECT_TRUE(DoSetRemoteDescription(offer));
+
+  // SetLocalDescription takes ownership of answer.
+  SessionDescriptionInterface* answer = NULL;
+  EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
+  EXPECT_TRUE(DoSetLocalDescription(answer));
+
+  EXPECT_TRUE_WAIT(observer_.last_candidate_.get() != NULL, kTimeout);
+  EXPECT_TRUE_WAIT(observer_.ice_complete_, kTimeout);
+
+  EXPECT_TRUE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
+}
+
+// Test that CreateOffer and CreateAnswer will fail if the track labels are
+// not unique.
+TEST_F(PeerConnectionInterfaceTest, CreateOfferAnswerWithInvalidStream) {
+  CreatePeerConnection();
+  // Create a regular offer for the CreateAnswer test later.
+  SessionDescriptionInterface* offer = NULL;
+  EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+  EXPECT_TRUE(offer != NULL);
+  delete offer;
+  offer = NULL;
+
+  // Create a local stream with audio&video tracks having same label.
+  AddAudioVideoStream(kStreamLabel1, "track_label", "track_label");
+
+  // Test CreateOffer
+  EXPECT_FALSE(DoCreateOffer(&offer, nullptr));
+
+  // Test CreateAnswer
+  SessionDescriptionInterface* answer = NULL;
+  EXPECT_FALSE(DoCreateAnswer(&answer, nullptr));
+}
+
+// Test that we will get different SSRCs for each tracks in the offer and answer
+// we created.
+TEST_F(PeerConnectionInterfaceTest, SsrcInOfferAnswer) {
+  CreatePeerConnection();
+  // Create a local stream with audio&video tracks having different labels.
+  AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+
+  // Test CreateOffer
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+  int audio_ssrc = 0;
+  int video_ssrc = 0;
+  EXPECT_TRUE(GetFirstSsrc(GetFirstAudioContent(offer->description()),
+                           &audio_ssrc));
+  EXPECT_TRUE(GetFirstSsrc(GetFirstVideoContent(offer->description()),
+                           &video_ssrc));
+  EXPECT_NE(audio_ssrc, video_ssrc);
+
+  // Test CreateAnswer
+  EXPECT_TRUE(DoSetRemoteDescription(offer.release()));
+  scoped_ptr<SessionDescriptionInterface> answer;
+  ASSERT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+  audio_ssrc = 0;
+  video_ssrc = 0;
+  EXPECT_TRUE(GetFirstSsrc(GetFirstAudioContent(answer->description()),
+                           &audio_ssrc));
+  EXPECT_TRUE(GetFirstSsrc(GetFirstVideoContent(answer->description()),
+                           &video_ssrc));
+  EXPECT_NE(audio_ssrc, video_ssrc);
+}
+
+// Test that it's possible to call AddTrack on a MediaStream after adding
+// the stream to a PeerConnection.
+// TODO(deadbeef): Remove this test once this behavior is no longer supported.
+TEST_F(PeerConnectionInterfaceTest, AddTrackAfterAddStream) {
+  CreatePeerConnection();
+  // Create audio stream and add to PeerConnection.
+  AddVoiceStream(kStreamLabel1);
+  MediaStreamInterface* stream = pc_->local_streams()->at(0);
+
+  // Add video track to the audio-only stream.
+  scoped_refptr<VideoTrackInterface> video_track(
+      pc_factory_->CreateVideoTrack("video_label", nullptr));
+  stream->AddTrack(video_track.get());
+
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+
+  const cricket::MediaContentDescription* video_desc =
+      cricket::GetFirstVideoContentDescription(offer->description());
+  EXPECT_TRUE(video_desc != nullptr);
+}
+
+// Test that it's possible to call RemoveTrack on a MediaStream after adding
+// the stream to a PeerConnection.
+// TODO(deadbeef): Remove this test once this behavior is no longer supported.
+TEST_F(PeerConnectionInterfaceTest, RemoveTrackAfterAddStream) {
+  CreatePeerConnection();
+  // Create audio/video stream and add to PeerConnection.
+  AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+  MediaStreamInterface* stream = pc_->local_streams()->at(0);
+
+  // Remove the video track.
+  stream->RemoveTrack(stream->GetVideoTracks()[0]);
+
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+
+  const cricket::MediaContentDescription* video_desc =
+      cricket::GetFirstVideoContentDescription(offer->description());
+  EXPECT_TRUE(video_desc == nullptr);
+}
+
+// Test creating a sender with a stream ID, and ensure the ID is populated
+// in the offer.
+TEST_F(PeerConnectionInterfaceTest, CreateSenderWithStream) {
+  CreatePeerConnection();
+  pc_->CreateSender("video", kStreamLabel1);
+
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.use(), nullptr));
+
+  const cricket::MediaContentDescription* video_desc =
+      cricket::GetFirstVideoContentDescription(offer->description());
+  ASSERT_TRUE(video_desc != nullptr);
+  ASSERT_EQ(1u, video_desc->streams().size());
+  EXPECT_EQ(kStreamLabel1, video_desc->streams()[0].sync_label);
+}
+
+// Test that we can specify a certain track that we want statistics about.
+TEST_F(PeerConnectionInterfaceTest, GetStatsForSpecificTrack) {
+  InitiateCall();
+  ASSERT_LT(0u, pc_->remote_streams()->count());
+  ASSERT_LT(0u, pc_->remote_streams()->at(0)->GetAudioTracks().size());
+  scoped_refptr<MediaStreamTrackInterface> remote_audio =
+      pc_->remote_streams()->at(0)->GetAudioTracks()[0];
+  EXPECT_TRUE(DoGetStats(remote_audio));
+
+  // Remove the stream. Since we are sending to our selves the local
+  // and the remote stream is the same.
+  pc_->RemoveStream(pc_->local_streams()->at(0));
+  // Do a re-negotiation.
+  CreateOfferReceiveAnswer();
+
+  ASSERT_EQ(0u, pc_->remote_streams()->count());
+
+  // Test that we still can get statistics for the old track. Even if it is not
+  // sent any longer.
+  EXPECT_TRUE(DoGetStats(remote_audio));
+}
+
+// Test that we can get stats on a video track.
+TEST_F(PeerConnectionInterfaceTest, GetStatsForVideoTrack) {
+  InitiateCall();
+  ASSERT_LT(0u, pc_->remote_streams()->count());
+  ASSERT_LT(0u, pc_->remote_streams()->at(0)->GetVideoTracks().size());
+  scoped_refptr<MediaStreamTrackInterface> remote_video =
+      pc_->remote_streams()->at(0)->GetVideoTracks()[0];
+  EXPECT_TRUE(DoGetStats(remote_video));
+}
+
+// Test that we don't get statistics for an invalid track.
+// TODO(tommi): Fix this test.  DoGetStats will return true
+// for the unknown track (since GetStats is async), but no
+// data is returned for the track.
+TEST_F(PeerConnectionInterfaceTest, DISABLED_GetStatsForInvalidTrack) {
+  InitiateCall();
+  scoped_refptr<AudioTrackInterface> unknown_audio_track(
+      pc_factory_->CreateAudioTrack("unknown track", NULL));
+  EXPECT_FALSE(DoGetStats(unknown_audio_track));
+}
+
+// This test setup two RTP data channels in loop back.
+TEST_F(PeerConnectionInterfaceTest, TestDataChannel) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+  scoped_refptr<DataChannelInterface> data1  =
+      pc_->CreateDataChannel("test1", NULL);
+  scoped_refptr<DataChannelInterface> data2  =
+      pc_->CreateDataChannel("test2", NULL);
+  ASSERT_TRUE(data1 != NULL);
+  rtc::scoped_ptr<MockDataChannelObserver> observer1(
+      new MockDataChannelObserver(data1));
+  rtc::scoped_ptr<MockDataChannelObserver> observer2(
+      new MockDataChannelObserver(data2));
+
+  EXPECT_EQ(DataChannelInterface::kConnecting, data1->state());
+  EXPECT_EQ(DataChannelInterface::kConnecting, data2->state());
+  std::string data_to_send1 = "testing testing";
+  std::string data_to_send2 = "testing something else";
+  EXPECT_FALSE(data1->Send(DataBuffer(data_to_send1)));
+
+  CreateOfferReceiveAnswer();
+  EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+  EXPECT_TRUE_WAIT(observer2->IsOpen(), kTimeout);
+
+  EXPECT_EQ(DataChannelInterface::kOpen, data1->state());
+  EXPECT_EQ(DataChannelInterface::kOpen, data2->state());
+  EXPECT_TRUE(data1->Send(DataBuffer(data_to_send1)));
+  EXPECT_TRUE(data2->Send(DataBuffer(data_to_send2)));
+
+  EXPECT_EQ_WAIT(data_to_send1, observer1->last_message(), kTimeout);
+  EXPECT_EQ_WAIT(data_to_send2, observer2->last_message(), kTimeout);
+
+  data1->Close();
+  EXPECT_EQ(DataChannelInterface::kClosing, data1->state());
+  CreateOfferReceiveAnswer();
+  EXPECT_FALSE(observer1->IsOpen());
+  EXPECT_EQ(DataChannelInterface::kClosed, data1->state());
+  EXPECT_TRUE(observer2->IsOpen());
+
+  data_to_send2 = "testing something else again";
+  EXPECT_TRUE(data2->Send(DataBuffer(data_to_send2)));
+
+  EXPECT_EQ_WAIT(data_to_send2, observer2->last_message(), kTimeout);
+}
+
+// This test verifies that sendnig binary data over RTP data channels should
+// fail.
+TEST_F(PeerConnectionInterfaceTest, TestSendBinaryOnRtpDataChannel) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+  scoped_refptr<DataChannelInterface> data1  =
+      pc_->CreateDataChannel("test1", NULL);
+  scoped_refptr<DataChannelInterface> data2  =
+      pc_->CreateDataChannel("test2", NULL);
+  ASSERT_TRUE(data1 != NULL);
+  rtc::scoped_ptr<MockDataChannelObserver> observer1(
+      new MockDataChannelObserver(data1));
+  rtc::scoped_ptr<MockDataChannelObserver> observer2(
+      new MockDataChannelObserver(data2));
+
+  EXPECT_EQ(DataChannelInterface::kConnecting, data1->state());
+  EXPECT_EQ(DataChannelInterface::kConnecting, data2->state());
+
+  CreateOfferReceiveAnswer();
+  EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+  EXPECT_TRUE_WAIT(observer2->IsOpen(), kTimeout);
+
+  EXPECT_EQ(DataChannelInterface::kOpen, data1->state());
+  EXPECT_EQ(DataChannelInterface::kOpen, data2->state());
+
+  rtc::Buffer buffer("test", 4);
+  EXPECT_FALSE(data1->Send(DataBuffer(buffer, true)));
+}
+
+// This test setup a RTP data channels in loop back and test that a channel is
+// opened even if the remote end answer with a zero SSRC.
+TEST_F(PeerConnectionInterfaceTest, TestSendOnlyDataChannel) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+  scoped_refptr<DataChannelInterface> data1  =
+      pc_->CreateDataChannel("test1", NULL);
+  rtc::scoped_ptr<MockDataChannelObserver> observer1(
+      new MockDataChannelObserver(data1));
+
+  CreateOfferReceiveAnswerWithoutSsrc();
+
+  EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+
+  data1->Close();
+  EXPECT_EQ(DataChannelInterface::kClosing, data1->state());
+  CreateOfferReceiveAnswerWithoutSsrc();
+  EXPECT_EQ(DataChannelInterface::kClosed, data1->state());
+  EXPECT_FALSE(observer1->IsOpen());
+}
+
+// This test that if a data channel is added in an answer a receive only channel
+// channel is created.
+TEST_F(PeerConnectionInterfaceTest, TestReceiveOnlyDataChannel) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  std::string offer_label = "offer_channel";
+  scoped_refptr<DataChannelInterface> offer_channel  =
+      pc_->CreateDataChannel(offer_label, NULL);
+
+  CreateOfferAsLocalDescription();
+
+  // Replace the data channel label in the offer and apply it as an answer.
+  std::string receive_label = "answer_channel";
+  std::string sdp;
+  EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+  rtc::replace_substrs(offer_label.c_str(), offer_label.length(),
+                             receive_label.c_str(), receive_label.length(),
+                             &sdp);
+  CreateAnswerAsRemoteDescription(sdp);
+
+  // Verify that a new incoming data channel has been created and that
+  // it is open but can't we written to.
+  ASSERT_TRUE(observer_.last_datachannel_ != NULL);
+  DataChannelInterface* received_channel = observer_.last_datachannel_;
+  EXPECT_EQ(DataChannelInterface::kConnecting, received_channel->state());
+  EXPECT_EQ(receive_label, received_channel->label());
+  EXPECT_FALSE(received_channel->Send(DataBuffer("something")));
+
+  // Verify that the channel we initially offered has been rejected.
+  EXPECT_EQ(DataChannelInterface::kClosed, offer_channel->state());
+
+  // Do another offer / answer exchange and verify that the data channel is
+  // opened.
+  CreateOfferReceiveAnswer();
+  EXPECT_EQ_WAIT(DataChannelInterface::kOpen, received_channel->state(),
+                 kTimeout);
+}
+
+// This test that no data channel is returned if a reliable channel is
+// requested.
+// TODO(perkj): Remove this test once reliable channels are implemented.
+TEST_F(PeerConnectionInterfaceTest, CreateReliableRtpDataChannelShouldFail) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  std::string label = "test";
+  webrtc::DataChannelInit config;
+  config.reliable = true;
+  scoped_refptr<DataChannelInterface> channel  =
+      pc_->CreateDataChannel(label, &config);
+  EXPECT_TRUE(channel == NULL);
+}
+
+// Verifies that duplicated label is not allowed for RTP data channel.
+TEST_F(PeerConnectionInterfaceTest, RtpDuplicatedLabelNotAllowed) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  std::string label = "test";
+  scoped_refptr<DataChannelInterface> channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_NE(channel, nullptr);
+
+  scoped_refptr<DataChannelInterface> dup_channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_EQ(dup_channel, nullptr);
+}
+
+// This tests that a SCTP data channel is returned using different
+// DataChannelInit configurations.
+TEST_F(PeerConnectionInterfaceTest, CreateSctpDataChannel) {
+  FakeConstraints constraints;
+  constraints.SetAllowDtlsSctpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  webrtc::DataChannelInit config;
+
+  scoped_refptr<DataChannelInterface> channel =
+      pc_->CreateDataChannel("1", &config);
+  EXPECT_TRUE(channel != NULL);
+  EXPECT_TRUE(channel->reliable());
+  EXPECT_TRUE(observer_.renegotiation_needed_);
+  observer_.renegotiation_needed_ = false;
+
+  config.ordered = false;
+  channel = pc_->CreateDataChannel("2", &config);
+  EXPECT_TRUE(channel != NULL);
+  EXPECT_TRUE(channel->reliable());
+  EXPECT_FALSE(observer_.renegotiation_needed_);
+
+  config.ordered = true;
+  config.maxRetransmits = 0;
+  channel = pc_->CreateDataChannel("3", &config);
+  EXPECT_TRUE(channel != NULL);
+  EXPECT_FALSE(channel->reliable());
+  EXPECT_FALSE(observer_.renegotiation_needed_);
+
+  config.maxRetransmits = -1;
+  config.maxRetransmitTime = 0;
+  channel = pc_->CreateDataChannel("4", &config);
+  EXPECT_TRUE(channel != NULL);
+  EXPECT_FALSE(channel->reliable());
+  EXPECT_FALSE(observer_.renegotiation_needed_);
+}
+
+// This tests that no data channel is returned if both maxRetransmits and
+// maxRetransmitTime are set for SCTP data channels.
+TEST_F(PeerConnectionInterfaceTest,
+       CreateSctpDataChannelShouldFailForInvalidConfig) {
+  FakeConstraints constraints;
+  constraints.SetAllowDtlsSctpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  std::string label = "test";
+  webrtc::DataChannelInit config;
+  config.maxRetransmits = 0;
+  config.maxRetransmitTime = 0;
+
+  scoped_refptr<DataChannelInterface> channel =
+      pc_->CreateDataChannel(label, &config);
+  EXPECT_TRUE(channel == NULL);
+}
+
+// The test verifies that creating a SCTP data channel with an id already in use
+// or out of range should fail.
+TEST_F(PeerConnectionInterfaceTest,
+       CreateSctpDataChannelWithInvalidIdShouldFail) {
+  FakeConstraints constraints;
+  constraints.SetAllowDtlsSctpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  webrtc::DataChannelInit config;
+  scoped_refptr<DataChannelInterface> channel;
+
+  config.id = 1;
+  channel = pc_->CreateDataChannel("1", &config);
+  EXPECT_TRUE(channel != NULL);
+  EXPECT_EQ(1, channel->id());
+
+  channel = pc_->CreateDataChannel("x", &config);
+  EXPECT_TRUE(channel == NULL);
+
+  config.id = cricket::kMaxSctpSid;
+  channel = pc_->CreateDataChannel("max", &config);
+  EXPECT_TRUE(channel != NULL);
+  EXPECT_EQ(config.id, channel->id());
+
+  config.id = cricket::kMaxSctpSid + 1;
+  channel = pc_->CreateDataChannel("x", &config);
+  EXPECT_TRUE(channel == NULL);
+}
+
+// Verifies that duplicated label is allowed for SCTP data channel.
+TEST_F(PeerConnectionInterfaceTest, SctpDuplicatedLabelAllowed) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+
+  std::string label = "test";
+  scoped_refptr<DataChannelInterface> channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_NE(channel, nullptr);
+
+  scoped_refptr<DataChannelInterface> dup_channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_NE(dup_channel, nullptr);
+}
+
+// This test verifies that OnRenegotiationNeeded is fired for every new RTP
+// DataChannel.
+TEST_F(PeerConnectionInterfaceTest, RenegotiationNeededForNewRtpDataChannel) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  scoped_refptr<DataChannelInterface> dc1  =
+      pc_->CreateDataChannel("test1", NULL);
+  EXPECT_TRUE(observer_.renegotiation_needed_);
+  observer_.renegotiation_needed_ = false;
+
+  scoped_refptr<DataChannelInterface> dc2  =
+      pc_->CreateDataChannel("test2", NULL);
+  EXPECT_TRUE(observer_.renegotiation_needed_);
+}
+
+// This test that a data channel closes when a PeerConnection is deleted/closed.
+TEST_F(PeerConnectionInterfaceTest, DataChannelCloseWhenPeerConnectionClose) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  scoped_refptr<DataChannelInterface> data1  =
+      pc_->CreateDataChannel("test1", NULL);
+  scoped_refptr<DataChannelInterface> data2  =
+      pc_->CreateDataChannel("test2", NULL);
+  ASSERT_TRUE(data1 != NULL);
+  rtc::scoped_ptr<MockDataChannelObserver> observer1(
+      new MockDataChannelObserver(data1));
+  rtc::scoped_ptr<MockDataChannelObserver> observer2(
+      new MockDataChannelObserver(data2));
+
+  CreateOfferReceiveAnswer();
+  EXPECT_TRUE_WAIT(observer1->IsOpen(), kTimeout);
+  EXPECT_TRUE_WAIT(observer2->IsOpen(), kTimeout);
+
+  ReleasePeerConnection();
+  EXPECT_EQ(DataChannelInterface::kClosed, data1->state());
+  EXPECT_EQ(DataChannelInterface::kClosed, data2->state());
+}
+
+// This test that data channels can be rejected in an answer.
+TEST_F(PeerConnectionInterfaceTest, TestRejectDataChannelInAnswer) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  scoped_refptr<DataChannelInterface> offer_channel(
+      pc_->CreateDataChannel("offer_channel", NULL));
+
+  CreateOfferAsLocalDescription();
+
+  // Create an answer where the m-line for data channels are rejected.
+  std::string sdp;
+  EXPECT_TRUE(pc_->local_description()->ToString(&sdp));
+  webrtc::JsepSessionDescription* answer = new webrtc::JsepSessionDescription(
+      SessionDescriptionInterface::kAnswer);
+  EXPECT_TRUE(answer->Initialize(sdp, NULL));
+  cricket::ContentInfo* data_info =
+      answer->description()->GetContentByName("data");
+  data_info->rejected = true;
+
+  DoSetRemoteDescription(answer);
+  EXPECT_EQ(DataChannelInterface::kClosed, offer_channel->state());
+}
+
+// Test that we can create a session description from an SDP string from
+// FireFox, use it as a remote session description, generate an answer and use
+// the answer as a local description.
+TEST_F(PeerConnectionInterfaceTest, ReceiveFireFoxOffer) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+  SessionDescriptionInterface* desc =
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                       webrtc::kFireFoxSdpOffer, nullptr);
+  EXPECT_TRUE(DoSetSessionDescription(desc, false));
+  CreateAnswerAsLocalDescription();
+  ASSERT_TRUE(pc_->local_description() != NULL);
+  ASSERT_TRUE(pc_->remote_description() != NULL);
+
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(pc_->local_description()->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+
+  content =
+      cricket::GetFirstVideoContent(pc_->local_description()->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+#ifdef HAVE_SCTP
+  content =
+      cricket::GetFirstDataContent(pc_->local_description()->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_TRUE(content->rejected);
+#endif
+}
+
+// Test that we can create an audio only offer and receive an answer with a
+// limited set of audio codecs and receive an updated offer with more audio
+// codecs, where the added codecs are not supported.
+TEST_F(PeerConnectionInterfaceTest, ReceiveUpdatedAudioOfferWithBadCodecs) {
+  CreatePeerConnection();
+  AddVoiceStream("audio_label");
+  CreateOfferAsLocalDescription();
+
+  SessionDescriptionInterface* answer =
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+                                       webrtc::kAudioSdp, nullptr);
+  EXPECT_TRUE(DoSetSessionDescription(answer, false));
+
+  SessionDescriptionInterface* updated_offer =
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                       webrtc::kAudioSdpWithUnsupportedCodecs,
+                                       nullptr);
+  EXPECT_TRUE(DoSetSessionDescription(updated_offer, false));
+  CreateAnswerAsLocalDescription();
+}
+
+// Test that if we're receiving (but not sending) a track, subsequent offers
+// will have m-lines with a=recvonly.
+TEST_F(PeerConnectionInterfaceTest, CreateSubsequentRecvOnlyOffer) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  CreateAnswerAsLocalDescription();
+
+  // At this point we should be receiving stream 1, but not sending anything.
+  // A new offer should be recvonly.
+  SessionDescriptionInterface* offer;
+  DoCreateOffer(&offer, nullptr);
+
+  const cricket::ContentInfo* video_content =
+      cricket::GetFirstVideoContent(offer->description());
+  const cricket::VideoContentDescription* video_desc =
+      static_cast<const cricket::VideoContentDescription*>(
+          video_content->description);
+  ASSERT_EQ(cricket::MD_RECVONLY, video_desc->direction());
+
+  const cricket::ContentInfo* audio_content =
+      cricket::GetFirstAudioContent(offer->description());
+  const cricket::AudioContentDescription* audio_desc =
+      static_cast<const cricket::AudioContentDescription*>(
+          audio_content->description);
+  ASSERT_EQ(cricket::MD_RECVONLY, audio_desc->direction());
+}
+
+// Test that if we're receiving (but not sending) a track, and the
+// offerToReceiveVideo/offerToReceiveAudio constraints are explicitly set to
+// false, the generated m-lines will be a=inactive.
+TEST_F(PeerConnectionInterfaceTest, CreateSubsequentInactiveOffer) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  CreateAnswerAsLocalDescription();
+
+  // At this point we should be receiving stream 1, but not sending anything.
+  // A new offer would be recvonly, but we'll set the "no receive" constraints
+  // to make it inactive.
+  SessionDescriptionInterface* offer;
+  FakeConstraints offer_constraints;
+  offer_constraints.AddMandatory(
+      webrtc::MediaConstraintsInterface::kOfferToReceiveVideo, false);
+  offer_constraints.AddMandatory(
+      webrtc::MediaConstraintsInterface::kOfferToReceiveAudio, false);
+  DoCreateOffer(&offer, &offer_constraints);
+
+  const cricket::ContentInfo* video_content =
+      cricket::GetFirstVideoContent(offer->description());
+  const cricket::VideoContentDescription* video_desc =
+      static_cast<const cricket::VideoContentDescription*>(
+          video_content->description);
+  ASSERT_EQ(cricket::MD_INACTIVE, video_desc->direction());
+
+  const cricket::ContentInfo* audio_content =
+      cricket::GetFirstAudioContent(offer->description());
+  const cricket::AudioContentDescription* audio_desc =
+      static_cast<const cricket::AudioContentDescription*>(
+          audio_content->description);
+  ASSERT_EQ(cricket::MD_INACTIVE, audio_desc->direction());
+}
+
+// Test that we can use SetConfiguration to change the ICE servers of the
+// PortAllocator.
+TEST_F(PeerConnectionInterfaceTest, SetConfigurationChangesIceServers) {
+  CreatePeerConnection();
+
+  PeerConnectionInterface::RTCConfiguration config;
+  PeerConnectionInterface::IceServer server;
+  server.uri = "stun:test_hostname";
+  config.servers.push_back(server);
+  EXPECT_TRUE(pc_->SetConfiguration(config));
+
+  EXPECT_EQ(1u, port_allocator_->stun_servers().size());
+  EXPECT_EQ("test_hostname",
+            port_allocator_->stun_servers().begin()->hostname());
+}
+
+// Test that PeerConnection::Close changes the states to closed and all remote
+// tracks change state to ended.
+TEST_F(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) {
+  // Initialize a PeerConnection and negotiate local and remote session
+  // description.
+  InitiateCall();
+  ASSERT_EQ(1u, pc_->local_streams()->count());
+  ASSERT_EQ(1u, pc_->remote_streams()->count());
+
+  pc_->Close();
+
+  EXPECT_EQ(PeerConnectionInterface::kClosed, pc_->signaling_state());
+  EXPECT_EQ(PeerConnectionInterface::kIceConnectionClosed,
+            pc_->ice_connection_state());
+  EXPECT_EQ(PeerConnectionInterface::kIceGatheringComplete,
+            pc_->ice_gathering_state());
+
+  EXPECT_EQ(1u, pc_->local_streams()->count());
+  EXPECT_EQ(1u, pc_->remote_streams()->count());
+
+  scoped_refptr<MediaStreamInterface> remote_stream =
+          pc_->remote_streams()->at(0);
+  EXPECT_EQ(MediaStreamTrackInterface::kEnded,
+            remote_stream->GetVideoTracks()[0]->state());
+  EXPECT_EQ(MediaStreamTrackInterface::kEnded,
+            remote_stream->GetAudioTracks()[0]->state());
+}
+
+// Test that PeerConnection methods fails gracefully after
+// PeerConnection::Close has been called.
+TEST_F(PeerConnectionInterfaceTest, CloseAndTestMethods) {
+  CreatePeerConnection();
+  AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
+  CreateOfferAsRemoteDescription();
+  CreateAnswerAsLocalDescription();
+
+  ASSERT_EQ(1u, pc_->local_streams()->count());
+  scoped_refptr<MediaStreamInterface> local_stream =
+      pc_->local_streams()->at(0);
+
+  pc_->Close();
+
+  pc_->RemoveStream(local_stream);
+  EXPECT_FALSE(pc_->AddStream(local_stream));
+
+  ASSERT_FALSE(local_stream->GetAudioTracks().empty());
+  rtc::scoped_refptr<webrtc::DtmfSenderInterface> dtmf_sender(
+      pc_->CreateDtmfSender(local_stream->GetAudioTracks()[0]));
+  EXPECT_TRUE(NULL == dtmf_sender);  // local stream has been removed.
+
+  EXPECT_TRUE(pc_->CreateDataChannel("test", NULL) == NULL);
+
+  EXPECT_TRUE(pc_->local_description() != NULL);
+  EXPECT_TRUE(pc_->remote_description() != NULL);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer;
+  EXPECT_TRUE(DoCreateOffer(offer.use(), nullptr));
+  rtc::scoped_ptr<SessionDescriptionInterface> answer;
+  EXPECT_TRUE(DoCreateAnswer(answer.use(), nullptr));
+
+  std::string sdp;
+  ASSERT_TRUE(pc_->remote_description()->ToString(&sdp));
+  SessionDescriptionInterface* remote_offer =
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                       sdp, NULL);
+  EXPECT_FALSE(DoSetRemoteDescription(remote_offer));
+
+  ASSERT_TRUE(pc_->local_description()->ToString(&sdp));
+  SessionDescriptionInterface* local_offer =
+        webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                         sdp, NULL);
+  EXPECT_FALSE(DoSetLocalDescription(local_offer));
+}
+
+// Test that GetStats can still be called after PeerConnection::Close.
+TEST_F(PeerConnectionInterfaceTest, CloseAndGetStats) {
+  InitiateCall();
+  pc_->Close();
+  DoGetStats(NULL);
+}
+
+// NOTE: The series of tests below come from what used to be
+// mediastreamsignaling_unittest.cc, and are mostly aimed at testing that
+// setting a remote or local description has the expected effects.
+
+// This test verifies that the remote MediaStreams corresponding to a received
+// SDP string is created. In this test the two separate MediaStreams are
+// signaled.
+TEST_F(PeerConnectionInterfaceTest, UpdateRemoteStreams) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+
+  rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+  EXPECT_TRUE(
+      CompareStreamCollections(observer_.remote_streams(), reference.get()));
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != nullptr);
+
+  // Create a session description based on another SDP with another
+  // MediaStream.
+  CreateAndSetRemoteOffer(kSdpStringWithStream1And2);
+
+  rtc::scoped_refptr<StreamCollection> reference2(CreateStreamCollection(2));
+  EXPECT_TRUE(
+      CompareStreamCollections(observer_.remote_streams(), reference2.get()));
+}
+
+// This test verifies that when remote tracks are added/removed from SDP, the
+// created remote streams are updated appropriately.
+TEST_F(PeerConnectionInterfaceTest,
+       AddRemoveTrackFromExistingRemoteMediaStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1;
+  CreateSessionDescriptionAndReference(1, 1, desc_ms1.accept());
+  EXPECT_TRUE(DoSetRemoteDescription(desc_ms1.release()));
+  EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+                                       reference_collection_));
+
+  // Add extra audio and video tracks to the same MediaStream.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks;
+  CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.accept());
+  EXPECT_TRUE(DoSetRemoteDescription(desc_ms1_two_tracks.release()));
+  EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+                                       reference_collection_));
+
+  // Remove the extra audio and video tracks.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2;
+  CreateSessionDescriptionAndReference(1, 1, desc_ms2.accept());
+  EXPECT_TRUE(DoSetRemoteDescription(desc_ms2.release()));
+  EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+                                       reference_collection_));
+}
+
+// This tests that remote tracks are ended if a local session description is set
+// that rejects the media content type.
+TEST_F(PeerConnectionInterfaceTest, RejectMediaContent) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // First create and set a remote offer, then reject its video content in our
+  // answer.
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+  rtc::scoped_refptr<webrtc::VideoTrackInterface> remote_video =
+      remote_stream->GetVideoTracks()[0];
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state());
+  rtc::scoped_refptr<webrtc::AudioTrackInterface> remote_audio =
+      remote_stream->GetAudioTracks()[0];
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+
+  rtc::scoped_ptr<SessionDescriptionInterface> local_answer;
+  EXPECT_TRUE(DoCreateAnswer(local_answer.accept(), nullptr));
+  cricket::ContentInfo* video_info =
+      local_answer->description()->GetContentByName("video");
+  video_info->rejected = true;
+  EXPECT_TRUE(DoSetLocalDescription(local_answer.release()));
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+
+  // Now create an offer where we reject both video and audio.
+  rtc::scoped_ptr<SessionDescriptionInterface> local_offer;
+  EXPECT_TRUE(DoCreateOffer(local_offer.accept(), nullptr));
+  video_info = local_offer->description()->GetContentByName("video");
+  ASSERT_TRUE(video_info != nullptr);
+  video_info->rejected = true;
+  cricket::ContentInfo* audio_info =
+      local_offer->description()->GetContentByName("audio");
+  ASSERT_TRUE(audio_info != nullptr);
+  audio_info->rejected = true;
+  EXPECT_TRUE(DoSetLocalDescription(local_offer.release()));
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
+}
+
+// This tests that we won't crash if the remote track has been removed outside
+// of PeerConnection and then PeerConnection tries to reject the track.
+TEST_F(PeerConnectionInterfaceTest, RemoveTrackThenRejectMediaContent) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> local_answer(
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+                                       kSdpStringWithStream1, nullptr));
+  cricket::ContentInfo* video_info =
+      local_answer->description()->GetContentByName("video");
+  video_info->rejected = true;
+  cricket::ContentInfo* audio_info =
+      local_answer->description()->GetContentByName("audio");
+  audio_info->rejected = true;
+  EXPECT_TRUE(DoSetLocalDescription(local_answer.release()));
+
+  // No crash is a pass.
+}
+
+// This tests that if a recvonly remote description is set, no remote streams
+// will be created, even if the description contains SSRCs/MSIDs.
+// See: https://code.google.com/p/webrtc/issues/detail?id=5054
+TEST_F(PeerConnectionInterfaceTest, RecvonlyDescriptionDoesntCreateStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+
+  std::string recvonly_offer = kSdpStringWithStream1;
+  rtc::replace_substrs(kSendrecv, strlen(kSendrecv), kRecvonly,
+                       strlen(kRecvonly), &recvonly_offer);
+  CreateAndSetRemoteOffer(recvonly_offer);
+
+  EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and no MSID support.
+// It also tests that the default stream is updated if a video m-line is added
+// in a subsequent session description.
+TEST_F(PeerConnectionInterfaceTest, SdpWithoutMsidCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
+  EXPECT_EQ("default", remote_stream->label());
+
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
+  EXPECT_EQ(MediaStreamTrackInterface::kLive,
+            remote_stream->GetAudioTracks()[0]->state());
+  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+  EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
+  EXPECT_EQ(MediaStreamTrackInterface::kLive,
+            remote_stream->GetVideoTracks()[0]->state());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and media direction is send only.
+TEST_F(PeerConnectionInterfaceTest,
+       SendOnlySdpWithoutMsidCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringSendOnlyWithoutStreams);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+  EXPECT_EQ("default", remote_stream->label());
+}
+
+// This tests that it won't crash when PeerConnection tries to remove
+// a remote track that as already been removed from the MediaStream.
+TEST_F(PeerConnectionInterfaceTest, RemoveAlreadyGoneRemoteStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+  // No crash is a pass.
+}
+
+// This tests that a default MediaStream is created if the remote session
+// description doesn't contain any streams and don't contain an indication if
+// MSID is supported.
+TEST_F(PeerConnectionInterfaceTest,
+       SdpWithoutMsidAndStreamsCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+}
+
+// This tests that a default MediaStream is not created if the remote session
+// description doesn't contain any streams but does support MSID.
+TEST_F(PeerConnectionInterfaceTest, SdpWithMsidDontCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithMsidWithoutStreams);
+  EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that when setting a new description, the old default tracks are
+// not destroyed and recreated.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5250
+TEST_F(PeerConnectionInterfaceTest, DefaultTracksNotDestroyedAndRecreated) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+  // Set the track to "disabled", then set a new description and ensure the
+  // track is still disabled, which ensures it hasn't been recreated.
+  remote_stream->GetAudioTracks()[0]->set_enabled(false);
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_FALSE(remote_stream->GetAudioTracks()[0]->enabled());
+}
+
+// This tests that a default MediaStream is not created if a remote session
+// description is updated to not have any MediaStreams.
+TEST_F(PeerConnectionInterfaceTest, VerifyDefaultStreamIsNotCreated) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+  EXPECT_TRUE(
+      CompareStreamCollections(observer_.remote_streams(), reference.get()));
+
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+  EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that an RtpSender is created when the local description is set
+// after adding a local stream.
+// TODO(deadbeef): This test and the one below it need to be updated when
+// an RtpSender's lifetime isn't determined by when a local description is set.
+TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
+  CreateSessionDescriptionAndReference(2, 2, desc_1.accept());
+
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(4u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+
+  // Remove an audio and video track.
+  pc_->RemoveStream(reference_collection_->at(0));
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_2;
+  CreateSessionDescriptionAndReference(1, 1, desc_2.accept());
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc_2.release()));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  EXPECT_FALSE(ContainsSender(senders, kAudioTracks[1]));
+  EXPECT_FALSE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that an RtpSender is created when the local description is set
+// before adding a local stream.
+TEST_F(PeerConnectionInterfaceTest,
+       AddLocalStreamAfterLocalDescriptionChanged) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
+  CreateSessionDescriptionAndReference(2, 2, desc_1.accept());
+
+  EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(0u, senders.size());
+
+  pc_->AddStream(reference_collection_->at(0));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(4u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that the expected behavior occurs if the SSRC on a local track is
+// changed when SetLocalDescription is called.
+TEST_F(PeerConnectionInterfaceTest,
+       ChangeSsrcOnTrackInLocalSessionDescription) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc;
+  CreateSessionDescriptionAndReference(1, 1, desc.accept());
+  std::string sdp;
+  desc->ToString(&sdp);
+
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+  // Change the ssrc of the audio and video track.
+  std::string ssrc_org = "a=ssrc:1";
+  std::string ssrc_to = "a=ssrc:97";
+  rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
+                       ssrc_to.length(), &sdp);
+  ssrc_org = "a=ssrc:2";
+  ssrc_to = "a=ssrc:98";
+  rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
+                       ssrc_to.length(), &sdp);
+  rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
+                                       nullptr));
+
+  EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  // TODO(deadbeef): Once RtpSenders expose parameters, check that the SSRC
+  // changed.
+}
+
+// This tests that the expected behavior occurs if a new session description is
+// set with the same tracks, but on a different MediaStream.
+TEST_F(PeerConnectionInterfaceTest, SignalSameTracksInSeparateMediaStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept(), nullptr));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc;
+  CreateSessionDescriptionAndReference(1, 1, desc.accept());
+  std::string sdp;
+  desc->ToString(&sdp);
+
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+  // Add a new MediaStream but with the same tracks as in the first stream.
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1(
+      webrtc::MediaStream::Create(kStreams[1]));
+  stream_1->AddTrack(reference_collection_->at(0)->GetVideoTracks()[0]);
+  stream_1->AddTrack(reference_collection_->at(0)->GetAudioTracks()[0]);
+  pc_->AddStream(stream_1);
+
+  // Replace msid in the original SDP.
+  rtc::replace_substrs(kStreams[0], strlen(kStreams[0]), kStreams[1],
+                       strlen(kStreams[1]), &sdp);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
+                                       nullptr));
+
+  EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+}
+
+// The following tests verify that session options are created correctly.
+// TODO(deadbeef): Convert these tests to be more end-to-end. Instead of
+// "verify options are converted correctly", should be "pass options into
+// CreateOffer and verify the correct offer is produced."
+
+TEST(CreateSessionOptionsTest, GetOptionsForOfferWithInvalidAudioOption) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = RTCOfferAnswerOptions::kUndefined - 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+
+  rtc_options.offer_to_receive_audio =
+      RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+}
+
+TEST(CreateSessionOptionsTest, GetOptionsForOfferWithInvalidVideoOption) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_video = RTCOfferAnswerOptions::kUndefined - 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+
+  rtc_options.offer_to_receive_video =
+      RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+}
+
+// Test that a MediaSessionOptions is created for an offer if
+// OfferToReceiveAudio and OfferToReceiveVideo options are set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithAudioVideo) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 1;
+  rtc_options.offer_to_receive_video = 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_TRUE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// OfferToReceiveAudio is set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithAudio) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_FALSE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// the default OfferOptions are used.
+TEST(CreateSessionOptionsTest, GetDefaultMediaSessionOptionsForOffer) {
+  RTCOfferAnswerOptions rtc_options;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_FALSE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+  EXPECT_TRUE(options.vad_enabled);
+  EXPECT_FALSE(options.audio_transport_options.ice_restart);
+  EXPECT_FALSE(options.video_transport_options.ice_restart);
+  EXPECT_FALSE(options.data_transport_options.ice_restart);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// OfferToReceiveVideo is set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithVideo) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 0;
+  rtc_options.offer_to_receive_video = 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_FALSE(options.has_audio());
+  EXPECT_TRUE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// UseRtpMux is set to false.
+TEST(CreateSessionOptionsTest,
+     GetMediaSessionOptionsForOfferWithBundleDisabled) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 1;
+  rtc_options.offer_to_receive_video = 1;
+  rtc_options.use_rtp_mux = false;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_TRUE(options.has_video());
+  EXPECT_FALSE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created to restart ice if
+// IceRestart is set. It also tests that subsequent MediaSessionOptions don't
+// have |audio_transport_options.ice_restart| etc. set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithIceRestart) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.ice_restart = true;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.audio_transport_options.ice_restart);
+  EXPECT_TRUE(options.video_transport_options.ice_restart);
+  EXPECT_TRUE(options.data_transport_options.ice_restart);
+
+  rtc_options = RTCOfferAnswerOptions();
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_FALSE(options.audio_transport_options.ice_restart);
+  EXPECT_FALSE(options.video_transport_options.ice_restart);
+  EXPECT_FALSE(options.data_transport_options.ice_restart);
+}
+
+// Test that the MediaConstraints in an answer don't affect if audio and video
+// is offered in an offer but that if kOfferToReceiveAudio or
+// kOfferToReceiveVideo constraints are true in an offer, the media type will be
+// included in subsequent answers.
+TEST(CreateSessionOptionsTest, MediaConstraintsInAnswer) {
+  FakeConstraints answer_c;
+  answer_c.SetMandatoryReceiveAudio(true);
+  answer_c.SetMandatoryReceiveVideo(true);
+
+  cricket::MediaSessionOptions answer_options;
+  EXPECT_TRUE(ParseConstraintsForAnswer(&answer_c, &answer_options));
+  EXPECT_TRUE(answer_options.has_audio());
+  EXPECT_TRUE(answer_options.has_video());
+
+  RTCOfferAnswerOptions rtc_offer_options;
+
+  cricket::MediaSessionOptions offer_options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_offer_options, &offer_options));
+  EXPECT_TRUE(offer_options.has_audio());
+  EXPECT_FALSE(offer_options.has_video());
+
+  RTCOfferAnswerOptions updated_rtc_offer_options;
+  updated_rtc_offer_options.offer_to_receive_audio = 1;
+  updated_rtc_offer_options.offer_to_receive_video = 1;
+
+  cricket::MediaSessionOptions updated_offer_options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(updated_rtc_offer_options,
+                                        &updated_offer_options));
+  EXPECT_TRUE(updated_offer_options.has_audio());
+  EXPECT_TRUE(updated_offer_options.has_video());
+
+  // Since an offer has been created with both audio and video, subsequent
+  // offers and answers should contain both audio and video.
+  // Answers will only contain the media types that exist in the offer
+  // regardless of the value of |updated_answer_options.has_audio| and
+  // |updated_answer_options.has_video|.
+  FakeConstraints updated_answer_c;
+  answer_c.SetMandatoryReceiveAudio(false);
+  answer_c.SetMandatoryReceiveVideo(false);
+
+  cricket::MediaSessionOptions updated_answer_options;
+  EXPECT_TRUE(
+      ParseConstraintsForAnswer(&updated_answer_c, &updated_answer_options));
+  EXPECT_TRUE(updated_answer_options.has_audio());
+  EXPECT_TRUE(updated_answer_options.has_video());
+}
diff --git a/webrtc/api/peerconnectionproxy.h b/webrtc/api/peerconnectionproxy.h
new file mode 100644
index 0000000..9faf014
--- /dev/null
+++ b/webrtc/api/peerconnectionproxy.h
@@ -0,0 +1,88 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_PEERCONNECTIONPROXY_H_
+#define WEBRTC_API_PEERCONNECTIONPROXY_H_
+
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/proxy.h"
+
+namespace webrtc {
+
+// Define proxy for PeerConnectionInterface.
+BEGIN_PROXY_MAP(PeerConnection)
+  PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
+                local_streams)
+  PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
+                remote_streams)
+  PROXY_METHOD1(bool, AddStream, MediaStreamInterface*)
+  PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
+  PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
+                AddTrack,
+                MediaStreamTrackInterface*,
+                std::vector<MediaStreamInterface*>)
+  PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*)
+  PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
+                CreateDtmfSender, AudioTrackInterface*)
+  PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
+                CreateSender,
+                const std::string&,
+                const std::string&)
+  PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
+                     GetSenders)
+  PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
+                     GetReceivers)
+  PROXY_METHOD3(bool, GetStats, StatsObserver*,
+                MediaStreamTrackInterface*,
+                StatsOutputLevel)
+  PROXY_METHOD2(rtc::scoped_refptr<DataChannelInterface>,
+                CreateDataChannel, const std::string&, const DataChannelInit*)
+  PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description)
+  PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description)
+  PROXY_METHOD2(void, CreateOffer, CreateSessionDescriptionObserver*,
+                const MediaConstraintsInterface*)
+  PROXY_METHOD2(void, CreateAnswer, CreateSessionDescriptionObserver*,
+                const MediaConstraintsInterface*)
+  PROXY_METHOD2(void, SetLocalDescription, SetSessionDescriptionObserver*,
+                SessionDescriptionInterface*)
+  PROXY_METHOD2(void, SetRemoteDescription, SetSessionDescriptionObserver*,
+                SessionDescriptionInterface*)
+  PROXY_METHOD1(bool,
+                SetConfiguration,
+                const PeerConnectionInterface::RTCConfiguration&);
+  PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*)
+  PROXY_METHOD1(void, RegisterUMAObserver, UMAObserver*)
+  PROXY_METHOD0(SignalingState, signaling_state)
+  PROXY_METHOD0(IceState, ice_state)
+  PROXY_METHOD0(IceConnectionState, ice_connection_state)
+  PROXY_METHOD0(IceGatheringState, ice_gathering_state)
+  PROXY_METHOD0(void, Close)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_PEERCONNECTIONPROXY_H_
diff --git a/webrtc/api/portallocatorfactory.cc b/webrtc/api/portallocatorfactory.cc
new file mode 100644
index 0000000..a5a98b0
--- /dev/null
+++ b/webrtc/api/portallocatorfactory.cc
@@ -0,0 +1,30 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+// TODO(deadbeef): Remove this file once chromium build files no longer
+// reference it.
+
+#include "webrtc/api/portallocatorfactory.h"
diff --git a/webrtc/api/portallocatorfactory.h b/webrtc/api/portallocatorfactory.h
new file mode 100644
index 0000000..bce7131
--- /dev/null
+++ b/webrtc/api/portallocatorfactory.h
@@ -0,0 +1,33 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+// TODO(deadbeef): Remove this file once chromium build files no longer
+// reference it.
+
+#ifndef WEBRTC_API_PORTALLOCATORFACTORY_H_
+#define WEBRTC_API_PORTALLOCATORFACTORY_H_
+
+#endif  // WEBRTC_API_PORTALLOCATORFACTORY_H_
diff --git a/webrtc/api/proxy.h b/webrtc/api/proxy.h
new file mode 100644
index 0000000..384e189
--- /dev/null
+++ b/webrtc/api/proxy.h
@@ -0,0 +1,391 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains Macros for creating proxies for webrtc MediaStream and
+// PeerConnection classes.
+
+//
+// Example usage:
+//
+// class TestInterface : public rtc::RefCountInterface {
+//  public:
+//   std::string FooA() = 0;
+//   std::string FooB(bool arg1) const = 0;
+//   std::string FooC(bool arg1)= 0;
+//  };
+//
+// Note that return types can not be a const reference.
+//
+// class Test : public TestInterface {
+// ... implementation of the interface.
+// };
+//
+// BEGIN_PROXY_MAP(Test)
+//   PROXY_METHOD0(std::string, FooA)
+//   PROXY_CONSTMETHOD1(std::string, FooB, arg1)
+//   PROXY_METHOD1(std::string, FooC, arg1)
+// END_PROXY()
+//
+// The proxy can be created using TestProxy::Create(Thread*, TestInterface*).
+
+#ifndef WEBRTC_API_PROXY_H_
+#define WEBRTC_API_PROXY_H_
+
+#include "webrtc/base/event.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+template <typename R>
+class ReturnType {
+ public:
+  template<typename C, typename M>
+  void Invoke(C* c, M m) { r_ = (c->*m)(); }
+  template<typename C, typename M, typename T1>
+  void Invoke(C* c, M m, T1 a1) { r_ = (c->*m)(a1); }
+  template<typename C, typename M, typename T1, typename T2>
+  void Invoke(C* c, M m, T1 a1, T2 a2) { r_ = (c->*m)(a1, a2); }
+  template<typename C, typename M, typename T1, typename T2, typename T3>
+  void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { r_ = (c->*m)(a1, a2, a3); }
+  template<typename C, typename M, typename T1, typename T2, typename T3,
+      typename T4>
+  void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4) {
+    r_ = (c->*m)(a1, a2, a3, a4);
+  }
+  template<typename C, typename M, typename T1, typename T2, typename T3,
+     typename T4, typename T5>
+  void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5) {
+    r_ = (c->*m)(a1, a2, a3, a4, a5);
+  }
+
+  R value() { return r_; }
+
+ private:
+  R r_;
+};
+
+template <>
+class ReturnType<void> {
+ public:
+  template<typename C, typename M>
+  void Invoke(C* c, M m) { (c->*m)(); }
+  template<typename C, typename M, typename T1>
+  void Invoke(C* c, M m, T1 a1) { (c->*m)(a1); }
+  template<typename C, typename M, typename T1, typename T2>
+  void Invoke(C* c, M m, T1 a1, T2 a2) { (c->*m)(a1, a2); }
+  template<typename C, typename M, typename T1, typename T2, typename T3>
+  void Invoke(C* c, M m, T1 a1, T2 a2, T3 a3) { (c->*m)(a1, a2, a3); }
+
+  void value() {}
+};
+
+namespace internal {
+
+class SynchronousMethodCall
+    : public rtc::MessageData,
+      public rtc::MessageHandler {
+ public:
+  explicit SynchronousMethodCall(rtc::MessageHandler* proxy)
+      : e_(), proxy_(proxy) {}
+  ~SynchronousMethodCall() {}
+
+  void Invoke(rtc::Thread* t) {
+    if (t->IsCurrent()) {
+      proxy_->OnMessage(NULL);
+    } else {
+      e_.reset(new rtc::Event(false, false));
+      t->Post(this, 0);
+      e_->Wait(rtc::Event::kForever);
+    }
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { proxy_->OnMessage(NULL); e_->Set(); }
+  rtc::scoped_ptr<rtc::Event> e_;
+  rtc::MessageHandler* proxy_;
+};
+
+}  // namespace internal
+
+template <typename C, typename R>
+class MethodCall0 : public rtc::Message,
+                    public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)();
+  MethodCall0(C* c, Method m) : c_(c), m_(m) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) {  r_.Invoke(c_, m_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+};
+
+template <typename C, typename R>
+class ConstMethodCall0 : public rtc::Message,
+                         public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)() const;
+  ConstMethodCall0(C* c, Method m) : c_(c), m_(m) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+};
+
+template <typename C, typename R,  typename T1>
+class MethodCall1 : public rtc::Message,
+                    public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)(T1 a1);
+  MethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(a1) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+  T1 a1_;
+};
+
+template <typename C, typename R,  typename T1>
+class ConstMethodCall1 : public rtc::Message,
+                         public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)(T1 a1) const;
+  ConstMethodCall1(C* c, Method m, T1 a1) : c_(c), m_(m), a1_(a1) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+  T1 a1_;
+};
+
+template <typename C, typename R, typename T1, typename T2>
+class MethodCall2 : public rtc::Message,
+                    public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)(T1 a1, T2 a2);
+  MethodCall2(C* c, Method m, T1 a1, T2 a2) : c_(c), m_(m), a1_(a1), a2_(a2) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+  T1 a1_;
+  T2 a2_;
+};
+
+template <typename C, typename R, typename T1, typename T2, typename T3>
+class MethodCall3 : public rtc::Message,
+                    public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)(T1 a1, T2 a2, T3 a3);
+  MethodCall3(C* c, Method m, T1 a1, T2 a2, T3 a3)
+      : c_(c), m_(m), a1_(a1), a2_(a2), a3_(a3) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_, a3_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+  T1 a1_;
+  T2 a2_;
+  T3 a3_;
+};
+
+template <typename C, typename R, typename T1, typename T2, typename T3,
+    typename T4>
+class MethodCall4 : public rtc::Message,
+                    public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4);
+  MethodCall4(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4)
+      : c_(c), m_(m), a1_(a1), a2_(a2), a3_(a3), a4_(a4) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_, a3_, a4_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+  T1 a1_;
+  T2 a2_;
+  T3 a3_;
+  T4 a4_;
+};
+
+template <typename C, typename R, typename T1, typename T2, typename T3,
+    typename T4, typename T5>
+class MethodCall5 : public rtc::Message,
+                    public rtc::MessageHandler {
+ public:
+  typedef R (C::*Method)(T1 a1, T2 a2, T3 a3, T4 a4, T5 a5);
+  MethodCall5(C* c, Method m, T1 a1, T2 a2, T3 a3, T4 a4, T5 a5)
+      : c_(c), m_(m), a1_(a1), a2_(a2), a3_(a3), a4_(a4), a5_(a5) {}
+
+  R Marshal(rtc::Thread* t) {
+    internal::SynchronousMethodCall(this).Invoke(t);
+    return r_.value();
+  }
+
+ private:
+  void OnMessage(rtc::Message*) { r_.Invoke(c_, m_, a1_, a2_, a3_, a4_, a5_); }
+
+  C* c_;
+  Method m_;
+  ReturnType<R> r_;
+  T1 a1_;
+  T2 a2_;
+  T3 a3_;
+  T4 a4_;
+  T5 a5_;
+};
+
+#define BEGIN_PROXY_MAP(c)                                                \
+  class c##Proxy : public c##Interface {                                  \
+   protected:                                                             \
+    typedef c##Interface C;                                               \
+    c##Proxy(rtc::Thread* thread, C* c) : owner_thread_(thread), c_(c) {} \
+    ~c##Proxy() {                                                         \
+      MethodCall0<c##Proxy, void> call(this, &c##Proxy::Release_s);       \
+      call.Marshal(owner_thread_);                                        \
+    }                                                                     \
+                                                                          \
+   public:                                                                \
+    static rtc::scoped_refptr<C> Create(rtc::Thread* thread, C* c) {      \
+      return new rtc::RefCountedObject<c##Proxy>(thread, c);              \
+    }
+
+#define PROXY_METHOD0(r, method)                  \
+  r method() override {                           \
+    MethodCall0<C, r> call(c_.get(), &C::method); \
+    return call.Marshal(owner_thread_);           \
+  }
+
+#define PROXY_CONSTMETHOD0(r, method)                  \
+  r method() const override {                          \
+    ConstMethodCall0<C, r> call(c_.get(), &C::method); \
+    return call.Marshal(owner_thread_);                \
+  }
+
+#define PROXY_METHOD1(r, method, t1)                      \
+  r method(t1 a1) override {                              \
+    MethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
+    return call.Marshal(owner_thread_);                   \
+  }
+
+#define PROXY_CONSTMETHOD1(r, method, t1)                      \
+  r method(t1 a1) const override {                             \
+    ConstMethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
+    return call.Marshal(owner_thread_);                        \
+  }
+
+#define PROXY_METHOD2(r, method, t1, t2)                          \
+  r method(t1 a1, t2 a2) override {                               \
+    MethodCall2<C, r, t1, t2> call(c_.get(), &C::method, a1, a2); \
+    return call.Marshal(owner_thread_);                           \
+  }
+
+#define PROXY_METHOD3(r, method, t1, t2, t3)                              \
+  r method(t1 a1, t2 a2, t3 a3) override {                                \
+    MethodCall3<C, r, t1, t2, t3> call(c_.get(), &C::method, a1, a2, a3); \
+    return call.Marshal(owner_thread_);                                   \
+  }
+
+#define PROXY_METHOD4(r, method, t1, t2, t3, t4)                             \
+  r method(t1 a1, t2 a2, t3 a3, t4 a4) override {                            \
+    MethodCall4<C, r, t1, t2, t3, t4> call(c_.get(), &C::method, a1, a2, a3, \
+                                           a4);                              \
+    return call.Marshal(owner_thread_);                                      \
+  }
+
+#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5)                         \
+  r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override {                     \
+    MethodCall5<C, r, t1, t2, t3, t4, t5> call(c_.get(), &C::method, a1, a2, \
+                                               a3, a4, a5);                  \
+    return call.Marshal(owner_thread_);                                      \
+  }
+
+#define END_PROXY() \
+   private:\
+    void Release_s() {\
+      c_ = NULL;\
+    }\
+    mutable rtc::Thread* owner_thread_;\
+    rtc::scoped_refptr<C> c_;\
+  };\
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_API_PROXY_H_
diff --git a/webrtc/api/proxy_unittest.cc b/webrtc/api/proxy_unittest.cc
new file mode 100644
index 0000000..8fa7363
--- /dev/null
+++ b/webrtc/api/proxy_unittest.cc
@@ -0,0 +1,170 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/proxy.h"
+
+#include <string>
+
+#include "testing/base/public/gmock.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/thread.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Exactly;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+
+namespace webrtc {
+
+// Interface used for testing here.
+class FakeInterface : public rtc::RefCountInterface {
+ public:
+  virtual void VoidMethod0() = 0;
+  virtual std::string Method0() = 0;
+  virtual std::string ConstMethod0() const = 0;
+  virtual std::string Method1(std::string s) = 0;
+  virtual std::string ConstMethod1(std::string s) const = 0;
+  virtual std::string Method2(std::string s1, std::string s2) = 0;
+
+ protected:
+  ~FakeInterface() {}
+};
+
+// Proxy for the test interface.
+BEGIN_PROXY_MAP(Fake)
+  PROXY_METHOD0(void, VoidMethod0)
+  PROXY_METHOD0(std::string, Method0)
+  PROXY_CONSTMETHOD0(std::string, ConstMethod0)
+  PROXY_METHOD1(std::string, Method1, std::string)
+  PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
+  PROXY_METHOD2(std::string, Method2, std::string, std::string)
+END_PROXY()
+
+// Implementation of the test interface.
+class Fake : public FakeInterface {
+ public:
+  static rtc::scoped_refptr<Fake> Create() {
+    return new rtc::RefCountedObject<Fake>();
+  }
+
+  MOCK_METHOD0(VoidMethod0, void());
+  MOCK_METHOD0(Method0, std::string());
+  MOCK_CONST_METHOD0(ConstMethod0, std::string());
+
+  MOCK_METHOD1(Method1, std::string(std::string));
+  MOCK_CONST_METHOD1(ConstMethod1, std::string(std::string));
+
+  MOCK_METHOD2(Method2, std::string(std::string, std::string));
+
+ protected:
+  Fake() {}
+  ~Fake() {}
+};
+
+class ProxyTest: public testing::Test {
+ public:
+  // Checks that the functions is called on the |signaling_thread_|.
+  void CheckThread() {
+    EXPECT_EQ(rtc::Thread::Current(), signaling_thread_.get());
+  }
+
+ protected:
+  virtual void SetUp() {
+    signaling_thread_.reset(new rtc::Thread());
+    ASSERT_TRUE(signaling_thread_->Start());
+    fake_ = Fake::Create();
+    fake_proxy_ = FakeProxy::Create(signaling_thread_.get(), fake_.get());
+  }
+
+ protected:
+  rtc::scoped_ptr<rtc::Thread> signaling_thread_;
+  rtc::scoped_refptr<FakeInterface> fake_proxy_;
+  rtc::scoped_refptr<Fake> fake_;
+};
+
+TEST_F(ProxyTest, VoidMethod0) {
+  EXPECT_CALL(*fake_, VoidMethod0())
+            .Times(Exactly(1))
+            .WillOnce(InvokeWithoutArgs(this, &ProxyTest::CheckThread));
+  fake_proxy_->VoidMethod0();
+}
+
+TEST_F(ProxyTest, Method0) {
+  EXPECT_CALL(*fake_, Method0())
+            .Times(Exactly(1))
+            .WillOnce(
+                DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+                      Return("Method0")));
+  EXPECT_EQ("Method0",
+            fake_proxy_->Method0());
+}
+
+TEST_F(ProxyTest, ConstMethod0) {
+  EXPECT_CALL(*fake_, ConstMethod0())
+            .Times(Exactly(1))
+            .WillOnce(
+                DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+                      Return("ConstMethod0")));
+  EXPECT_EQ("ConstMethod0",
+            fake_proxy_->ConstMethod0());
+}
+
+TEST_F(ProxyTest, Method1) {
+  const std::string arg1 = "arg1";
+  EXPECT_CALL(*fake_, Method1(arg1))
+            .Times(Exactly(1))
+            .WillOnce(
+                DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+                      Return("Method1")));
+  EXPECT_EQ("Method1", fake_proxy_->Method1(arg1));
+}
+
+TEST_F(ProxyTest, ConstMethod1) {
+  const std::string arg1 = "arg1";
+  EXPECT_CALL(*fake_, ConstMethod1(arg1))
+            .Times(Exactly(1))
+            .WillOnce(
+                DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+                      Return("ConstMethod1")));
+  EXPECT_EQ("ConstMethod1", fake_proxy_->ConstMethod1(arg1));
+}
+
+TEST_F(ProxyTest, Method2) {
+  const std::string arg1 = "arg1";
+  const std::string arg2 = "arg2";
+  EXPECT_CALL(*fake_, Method2(arg1, arg2))
+            .Times(Exactly(1))
+            .WillOnce(
+                DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+                      Return("Method2")));
+  EXPECT_EQ("Method2", fake_proxy_->Method2(arg1, arg2));
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/remoteaudiosource.cc b/webrtc/api/remoteaudiosource.cc
new file mode 100644
index 0000000..9a0900d
--- /dev/null
+++ b/webrtc/api/remoteaudiosource.cc
@@ -0,0 +1,176 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/remoteaudiosource.h"
+
+#include <algorithm>
+#include <functional>
+#include <utility>
+
+#include "webrtc/api/mediastreamprovider.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc {
+
+class RemoteAudioSource::MessageHandler : public rtc::MessageHandler {
+ public:
+  explicit MessageHandler(RemoteAudioSource* source) : source_(source) {}
+
+ private:
+  ~MessageHandler() override {}
+
+  void OnMessage(rtc::Message* msg) override {
+    source_->OnMessage(msg);
+    delete this;
+  }
+
+  const rtc::scoped_refptr<RemoteAudioSource> source_;
+  RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MessageHandler);
+};
+
+class RemoteAudioSource::Sink : public AudioSinkInterface {
+ public:
+  explicit Sink(RemoteAudioSource* source) : source_(source) {}
+  ~Sink() override { source_->OnAudioProviderGone(); }
+
+ private:
+  void OnData(const AudioSinkInterface::Data& audio) override {
+    if (source_)
+      source_->OnData(audio);
+  }
+
+  const rtc::scoped_refptr<RemoteAudioSource> source_;
+  RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Sink);
+};
+
+rtc::scoped_refptr<RemoteAudioSource> RemoteAudioSource::Create(
+    uint32_t ssrc,
+    AudioProviderInterface* provider) {
+  rtc::scoped_refptr<RemoteAudioSource> ret(
+      new rtc::RefCountedObject<RemoteAudioSource>());
+  ret->Initialize(ssrc, provider);
+  return ret;
+}
+
+RemoteAudioSource::RemoteAudioSource()
+    : main_thread_(rtc::Thread::Current()),
+      state_(MediaSourceInterface::kLive) {
+  RTC_DCHECK(main_thread_);
+}
+
+RemoteAudioSource::~RemoteAudioSource() {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  RTC_DCHECK(audio_observers_.empty());
+  RTC_DCHECK(sinks_.empty());
+}
+
+void RemoteAudioSource::Initialize(uint32_t ssrc,
+                                   AudioProviderInterface* provider) {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  // To make sure we always get notified when the provider goes out of scope,
+  // we register for callbacks here and not on demand in AddSink.
+  if (provider) {  // May be null in tests.
+    provider->SetRawAudioSink(
+        ssrc, rtc::scoped_ptr<AudioSinkInterface>(new Sink(this)));
+  }
+}
+
+MediaSourceInterface::SourceState RemoteAudioSource::state() const {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  return state_;
+}
+
+bool RemoteAudioSource::remote() const {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  return true;
+}
+
+void RemoteAudioSource::SetVolume(double volume) {
+  RTC_DCHECK(volume >= 0 && volume <= 10);
+  for (auto* observer : audio_observers_)
+    observer->OnSetVolume(volume);
+}
+
+void RemoteAudioSource::RegisterAudioObserver(AudioObserver* observer) {
+  RTC_DCHECK(observer != NULL);
+  RTC_DCHECK(std::find(audio_observers_.begin(), audio_observers_.end(),
+                       observer) == audio_observers_.end());
+  audio_observers_.push_back(observer);
+}
+
+void RemoteAudioSource::UnregisterAudioObserver(AudioObserver* observer) {
+  RTC_DCHECK(observer != NULL);
+  audio_observers_.remove(observer);
+}
+
+void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  RTC_DCHECK(sink);
+
+  if (state_ != MediaSourceInterface::kLive) {
+    LOG(LS_ERROR) << "Can't register sink as the source isn't live.";
+    return;
+  }
+
+  rtc::CritScope lock(&sink_lock_);
+  RTC_DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end());
+  sinks_.push_back(sink);
+}
+
+void RemoteAudioSource::RemoveSink(AudioTrackSinkInterface* sink) {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  RTC_DCHECK(sink);
+
+  rtc::CritScope lock(&sink_lock_);
+  sinks_.remove(sink);
+}
+
+void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) {
+  // Called on the externally-owned audio callback thread, via/from webrtc.
+  rtc::CritScope lock(&sink_lock_);
+  for (auto* sink : sinks_) {
+    sink->OnData(audio.data, 16, audio.sample_rate, audio.channels,
+                 audio.samples_per_channel);
+  }
+}
+
+void RemoteAudioSource::OnAudioProviderGone() {
+  // Called when the data provider is deleted.  It may be the worker thread
+  // in libjingle or may be a different worker thread.
+  main_thread_->Post(new MessageHandler(this));
+}
+
+void RemoteAudioSource::OnMessage(rtc::Message* msg) {
+  RTC_DCHECK(main_thread_->IsCurrent());
+  sinks_.clear();
+  state_ = MediaSourceInterface::kEnded;
+  FireOnChanged();
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/remoteaudiosource.h b/webrtc/api/remoteaudiosource.h
new file mode 100644
index 0000000..a46b130
--- /dev/null
+++ b/webrtc/api/remoteaudiosource.h
@@ -0,0 +1,96 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_REMOTEAUDIOSOURCE_H_
+#define WEBRTC_API_REMOTEAUDIOSOURCE_H_
+
+#include <list>
+#include <string>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/notifier.h"
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/media/base/audiorenderer.h"
+
+namespace rtc {
+struct Message;
+class Thread;
+}  // namespace rtc
+
+namespace webrtc {
+
+class AudioProviderInterface;
+
+// This class implements the audio source used by the remote audio track.
+class RemoteAudioSource : public Notifier<AudioSourceInterface> {
+ public:
+  // Creates an instance of RemoteAudioSource.
+  static rtc::scoped_refptr<RemoteAudioSource> Create(
+      uint32_t ssrc,
+      AudioProviderInterface* provider);
+
+  // MediaSourceInterface implementation.
+  MediaSourceInterface::SourceState state() const override;
+  bool remote() const override;
+
+  void AddSink(AudioTrackSinkInterface* sink) override;
+  void RemoveSink(AudioTrackSinkInterface* sink) override;
+
+ protected:
+  RemoteAudioSource();
+  ~RemoteAudioSource() override;
+
+  // Post construction initialize where we can do things like save a reference
+  // to ourselves (need to be fully constructed).
+  void Initialize(uint32_t ssrc, AudioProviderInterface* provider);
+
+ private:
+  typedef std::list<AudioObserver*> AudioObserverList;
+
+  // AudioSourceInterface implementation.
+  void SetVolume(double volume) override;
+  void RegisterAudioObserver(AudioObserver* observer) override;
+  void UnregisterAudioObserver(AudioObserver* observer) override;
+
+  class Sink;
+  void OnData(const AudioSinkInterface::Data& audio);
+  void OnAudioProviderGone();
+
+  class MessageHandler;
+  void OnMessage(rtc::Message* msg);
+
+  AudioObserverList audio_observers_;
+  rtc::CriticalSection sink_lock_;
+  std::list<AudioTrackSinkInterface*> sinks_;
+  rtc::Thread* const main_thread_;
+  SourceState state_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_REMOTEAUDIOSOURCE_H_
diff --git a/webrtc/api/remoteaudiotrack.cc b/webrtc/api/remoteaudiotrack.cc
new file mode 100644
index 0000000..5f0b23e
--- /dev/null
+++ b/webrtc/api/remoteaudiotrack.cc
@@ -0,0 +1,28 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(tommi): Delete this file when removed from build files in Chromium.
diff --git a/webrtc/api/remoteaudiotrack.h b/webrtc/api/remoteaudiotrack.h
new file mode 100644
index 0000000..5f0b23e
--- /dev/null
+++ b/webrtc/api/remoteaudiotrack.h
@@ -0,0 +1,28 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// TODO(tommi): Delete this file when removed from build files in Chromium.
diff --git a/webrtc/api/remotevideocapturer.cc b/webrtc/api/remotevideocapturer.cc
new file mode 100644
index 0000000..b7be8f8
--- /dev/null
+++ b/webrtc/api/remotevideocapturer.cc
@@ -0,0 +1,95 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/remotevideocapturer.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/media/base/videoframe.h"
+
+namespace webrtc {
+
+RemoteVideoCapturer::RemoteVideoCapturer() {}
+
+RemoteVideoCapturer::~RemoteVideoCapturer() {}
+
+cricket::CaptureState RemoteVideoCapturer::Start(
+    const cricket::VideoFormat& capture_format) {
+  if (capture_state() == cricket::CS_RUNNING) {
+    LOG(LS_WARNING)
+        << "RemoteVideoCapturer::Start called when it's already started.";
+    return capture_state();
+  }
+
+  LOG(LS_INFO) << "RemoteVideoCapturer::Start";
+  SetCaptureFormat(&capture_format);
+  return cricket::CS_RUNNING;
+}
+
+void RemoteVideoCapturer::Stop() {
+  if (capture_state() == cricket::CS_STOPPED) {
+    LOG(LS_WARNING)
+        << "RemoteVideoCapturer::Stop called when it's already stopped.";
+    return;
+  }
+
+  LOG(LS_INFO) << "RemoteVideoCapturer::Stop";
+  SetCaptureFormat(NULL);
+  SetCaptureState(cricket::CS_STOPPED);
+}
+
+bool RemoteVideoCapturer::IsRunning() {
+  return capture_state() == cricket::CS_RUNNING;
+}
+
+bool RemoteVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
+  if (!fourccs)
+    return false;
+  fourccs->push_back(cricket::FOURCC_I420);
+  return true;
+}
+
+bool RemoteVideoCapturer::GetBestCaptureFormat(
+    const cricket::VideoFormat& desired, cricket::VideoFormat* best_format) {
+  if (!best_format) {
+    return false;
+  }
+
+  // RemoteVideoCapturer does not support capability enumeration.
+  // Use the desired format as the best format.
+  best_format->width = desired.width;
+  best_format->height = desired.height;
+  best_format->fourcc = cricket::FOURCC_I420;
+  best_format->interval = desired.interval;
+  return true;
+}
+
+bool RemoteVideoCapturer::IsScreencast() const {
+  // TODO(ronghuawu): what about remote screencast stream.
+  return false;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/remotevideocapturer.h b/webrtc/api/remotevideocapturer.h
new file mode 100644
index 0000000..15c1134
--- /dev/null
+++ b/webrtc/api/remotevideocapturer.h
@@ -0,0 +1,65 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_REMOTEVIDEOCAPTURER_H_
+#define WEBRTC_API_REMOTEVIDEOCAPTURER_H_
+
+#include <vector>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/media/base/videorenderer.h"
+
+namespace webrtc {
+
+// RemoteVideoCapturer implements a simple cricket::VideoCapturer which
+// gets decoded remote video frames from media channel.
+// It's used as the remote video source's VideoCapturer so that the remote video
+// can be used as a cricket::VideoCapturer and in that way a remote video stream
+// can implement the MediaStreamSourceInterface.
+class RemoteVideoCapturer : public cricket::VideoCapturer {
+ public:
+  RemoteVideoCapturer();
+  virtual ~RemoteVideoCapturer();
+
+  // cricket::VideoCapturer implementation.
+  cricket::CaptureState Start(
+      const cricket::VideoFormat& capture_format) override;
+  void Stop() override;
+  bool IsRunning() override;
+  bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
+  bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+                            cricket::VideoFormat* best_format) override;
+  bool IsScreencast() const override;
+
+ private:
+  RTC_DISALLOW_COPY_AND_ASSIGN(RemoteVideoCapturer);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_REMOTEVIDEOCAPTURER_H_
diff --git a/webrtc/api/remotevideocapturer_unittest.cc b/webrtc/api/remotevideocapturer_unittest.cc
new file mode 100644
index 0000000..f8906e3
--- /dev/null
+++ b/webrtc/api/remotevideocapturer_unittest.cc
@@ -0,0 +1,132 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "webrtc/api/remotevideocapturer.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/media/webrtc/webrtcvideoframe.h"
+
+using cricket::CaptureState;
+using cricket::VideoCapturer;
+using cricket::VideoFormat;
+using cricket::VideoFormatPod;
+using cricket::VideoFrame;
+
+static const int kMaxWaitMs = 1000;
+static const VideoFormatPod kTestFormat =
+    {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
+
+class RemoteVideoCapturerTest : public testing::Test,
+                                public sigslot::has_slots<> {
+ protected:
+  RemoteVideoCapturerTest()
+      : captured_frame_num_(0),
+        capture_state_(cricket::CS_STOPPED) {}
+
+  virtual void SetUp() {
+    capturer_.SignalStateChange.connect(
+        this, &RemoteVideoCapturerTest::OnStateChange);
+    capturer_.SignalVideoFrame.connect(
+        this, &RemoteVideoCapturerTest::OnVideoFrame);
+  }
+
+  ~RemoteVideoCapturerTest() {
+    capturer_.SignalStateChange.disconnect(this);
+    capturer_.SignalVideoFrame.disconnect(this);
+  }
+
+  int captured_frame_num() const {
+    return captured_frame_num_;
+  }
+
+  CaptureState capture_state() const {
+    return capture_state_;
+  }
+
+  webrtc::RemoteVideoCapturer capturer_;
+
+ private:
+  void OnStateChange(VideoCapturer* capturer,
+                     CaptureState capture_state) {
+    EXPECT_EQ(&capturer_, capturer);
+    capture_state_ = capture_state;
+  }
+
+  void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) {
+    EXPECT_EQ(&capturer_, capturer);
+    ++captured_frame_num_;
+  }
+
+  int captured_frame_num_;
+  CaptureState capture_state_;
+};
+
+TEST_F(RemoteVideoCapturerTest, StartStop) {
+  // Start
+  EXPECT_TRUE(
+      capturer_.StartCapturing(VideoFormat(kTestFormat)));
+  EXPECT_TRUE_WAIT((cricket::CS_RUNNING == capture_state()), kMaxWaitMs);
+  EXPECT_EQ(VideoFormat(kTestFormat),
+            *capturer_.GetCaptureFormat());
+  EXPECT_TRUE(capturer_.IsRunning());
+
+  // Stop
+  capturer_.Stop();
+  EXPECT_TRUE_WAIT((cricket::CS_STOPPED == capture_state()), kMaxWaitMs);
+  EXPECT_TRUE(NULL == capturer_.GetCaptureFormat());
+}
+
+TEST_F(RemoteVideoCapturerTest, GetPreferredFourccs) {
+  EXPECT_FALSE(capturer_.GetPreferredFourccs(NULL));
+
+  std::vector<uint32_t> fourccs;
+  EXPECT_TRUE(capturer_.GetPreferredFourccs(&fourccs));
+  EXPECT_EQ(1u, fourccs.size());
+  EXPECT_EQ(cricket::FOURCC_I420, fourccs.at(0));
+}
+
+TEST_F(RemoteVideoCapturerTest, GetBestCaptureFormat) {
+  VideoFormat desired = VideoFormat(kTestFormat);
+  EXPECT_FALSE(capturer_.GetBestCaptureFormat(desired, NULL));
+
+  VideoFormat expected_format = VideoFormat(kTestFormat);
+  expected_format.fourcc = cricket::FOURCC_I420;
+  VideoFormat best_format;
+  EXPECT_TRUE(capturer_.GetBestCaptureFormat(desired, &best_format));
+  EXPECT_EQ(expected_format, best_format);
+}
+
+TEST_F(RemoteVideoCapturerTest, InputFrame) {
+  EXPECT_EQ(0, captured_frame_num());
+
+  cricket::WebRtcVideoFrame test_frame;
+  capturer_.SignalVideoFrame(&capturer_, &test_frame);
+  EXPECT_EQ(1, captured_frame_num());
+  capturer_.SignalVideoFrame(&capturer_, &test_frame);
+  EXPECT_EQ(2, captured_frame_num());
+}
diff --git a/webrtc/api/rtpreceiver.cc b/webrtc/api/rtpreceiver.cc
new file mode 100644
index 0000000..11d074a
--- /dev/null
+++ b/webrtc/api/rtpreceiver.cc
@@ -0,0 +1,107 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/rtpreceiver.h"
+
+#include "webrtc/api/videosourceinterface.h"
+
+namespace webrtc {
+
+AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track,
+                                   uint32_t ssrc,
+                                   AudioProviderInterface* provider)
+    : id_(track->id()),
+      track_(track),
+      ssrc_(ssrc),
+      provider_(provider),
+      cached_track_enabled_(track->enabled()) {
+  RTC_DCHECK(track_->GetSource()->remote());
+  track_->RegisterObserver(this);
+  track_->GetSource()->RegisterAudioObserver(this);
+  Reconfigure();
+}
+
+AudioRtpReceiver::~AudioRtpReceiver() {
+  track_->GetSource()->UnregisterAudioObserver(this);
+  track_->UnregisterObserver(this);
+  Stop();
+}
+
+void AudioRtpReceiver::OnChanged() {
+  if (cached_track_enabled_ != track_->enabled()) {
+    cached_track_enabled_ = track_->enabled();
+    Reconfigure();
+  }
+}
+
+void AudioRtpReceiver::OnSetVolume(double volume) {
+  // When the track is disabled, the volume of the source, which is the
+  // corresponding WebRtc Voice Engine channel will be 0. So we do not allow
+  // setting the volume to the source when the track is disabled.
+  if (provider_ && track_->enabled())
+    provider_->SetAudioPlayoutVolume(ssrc_, volume);
+}
+
+void AudioRtpReceiver::Stop() {
+  // TODO(deadbeef): Need to do more here to fully stop receiving packets.
+  if (!provider_) {
+    return;
+  }
+  provider_->SetAudioPlayout(ssrc_, false);
+  provider_ = nullptr;
+}
+
+void AudioRtpReceiver::Reconfigure() {
+  if (!provider_) {
+    return;
+  }
+  provider_->SetAudioPlayout(ssrc_, track_->enabled());
+}
+
+VideoRtpReceiver::VideoRtpReceiver(VideoTrackInterface* track,
+                                   uint32_t ssrc,
+                                   VideoProviderInterface* provider)
+    : id_(track->id()), track_(track), ssrc_(ssrc), provider_(provider) {
+  provider_->SetVideoPlayout(ssrc_, true, track_->GetSink());
+}
+
+VideoRtpReceiver::~VideoRtpReceiver() {
+  // Since cricket::VideoRenderer is not reference counted,
+  // we need to remove it from the provider before we are deleted.
+  Stop();
+}
+
+void VideoRtpReceiver::Stop() {
+  // TODO(deadbeef): Need to do more here to fully stop receiving packets.
+  if (!provider_) {
+    return;
+  }
+  provider_->SetVideoPlayout(ssrc_, false, nullptr);
+  provider_ = nullptr;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/rtpreceiver.h b/webrtc/api/rtpreceiver.h
new file mode 100644
index 0000000..016ec6a
--- /dev/null
+++ b/webrtc/api/rtpreceiver.h
@@ -0,0 +1,104 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains classes that implement RtpReceiverInterface.
+// An RtpReceiver associates a MediaStreamTrackInterface with an underlying
+// transport (provided by AudioProviderInterface/VideoProviderInterface)
+
+#ifndef WEBRTC_API_RTPRECEIVER_H_
+#define WEBRTC_API_RTPRECEIVER_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreamprovider.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/base/basictypes.h"
+
+namespace webrtc {
+
+class AudioRtpReceiver : public ObserverInterface,
+                         public AudioSourceInterface::AudioObserver,
+                         public rtc::RefCountedObject<RtpReceiverInterface> {
+ public:
+  AudioRtpReceiver(AudioTrackInterface* track,
+                   uint32_t ssrc,
+                   AudioProviderInterface* provider);
+
+  virtual ~AudioRtpReceiver();
+
+  // ObserverInterface implementation
+  void OnChanged() override;
+
+  // AudioSourceInterface::AudioObserver implementation
+  void OnSetVolume(double volume) override;
+
+  // RtpReceiverInterface implementation
+  rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+    return track_.get();
+  }
+
+  std::string id() const override { return id_; }
+
+  void Stop() override;
+
+ private:
+  void Reconfigure();
+
+  const std::string id_;
+  const rtc::scoped_refptr<AudioTrackInterface> track_;
+  const uint32_t ssrc_;
+  AudioProviderInterface* provider_;  // Set to null in Stop().
+  bool cached_track_enabled_;
+};
+
+class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInterface> {
+ public:
+  VideoRtpReceiver(VideoTrackInterface* track,
+                   uint32_t ssrc,
+                   VideoProviderInterface* provider);
+
+  virtual ~VideoRtpReceiver();
+
+  // RtpReceiverInterface implementation
+  rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+    return track_.get();
+  }
+
+  std::string id() const override { return id_; }
+
+  void Stop() override;
+
+ private:
+  std::string id_;
+  rtc::scoped_refptr<VideoTrackInterface> track_;
+  uint32_t ssrc_;
+  VideoProviderInterface* provider_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_RTPRECEIVER_H_
diff --git a/webrtc/api/rtpreceiverinterface.h b/webrtc/api/rtpreceiverinterface.h
new file mode 100644
index 0000000..961d869
--- /dev/null
+++ b/webrtc/api/rtpreceiverinterface.h
@@ -0,0 +1,66 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for RtpReceivers
+// http://w3c.github.io/webrtc-pc/#rtcrtpreceiver-interface
+
+#ifndef WEBRTC_API_RTPRECEIVERINTERFACE_H_
+#define WEBRTC_API_RTPRECEIVERINTERFACE_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/proxy.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class RtpReceiverInterface : public rtc::RefCountInterface {
+ public:
+  virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
+
+  // Not to be confused with "mid", this is a field we can temporarily use
+  // to uniquely identify a receiver until we implement Unified Plan SDP.
+  virtual std::string id() const = 0;
+
+  virtual void Stop() = 0;
+
+ protected:
+  virtual ~RtpReceiverInterface() {}
+};
+
+// Define proxy for RtpReceiverInterface.
+BEGIN_PROXY_MAP(RtpReceiver)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_CONSTMETHOD0(std::string, id)
+PROXY_METHOD0(void, Stop)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_RTPRECEIVERINTERFACE_H_
diff --git a/webrtc/api/rtpsender.cc b/webrtc/api/rtpsender.cc
new file mode 100644
index 0000000..f20f464
--- /dev/null
+++ b/webrtc/api/rtpsender.cc
@@ -0,0 +1,348 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/rtpsender.h"
+
+#include "webrtc/api/localaudiosource.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/helpers.h"
+
+namespace webrtc {
+
+LocalAudioSinkAdapter::LocalAudioSinkAdapter() : sink_(nullptr) {}
+
+LocalAudioSinkAdapter::~LocalAudioSinkAdapter() {
+  rtc::CritScope lock(&lock_);
+  if (sink_)
+    sink_->OnClose();
+}
+
+void LocalAudioSinkAdapter::OnData(const void* audio_data,
+                                   int bits_per_sample,
+                                   int sample_rate,
+                                   size_t number_of_channels,
+                                   size_t number_of_frames) {
+  rtc::CritScope lock(&lock_);
+  if (sink_) {
+    sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
+                  number_of_frames);
+  }
+}
+
+void LocalAudioSinkAdapter::SetSink(cricket::AudioRenderer::Sink* sink) {
+  rtc::CritScope lock(&lock_);
+  ASSERT(!sink || !sink_);
+  sink_ = sink;
+}
+
+AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
+                               const std::string& stream_id,
+                               AudioProviderInterface* provider,
+                               StatsCollector* stats)
+    : id_(track->id()),
+      stream_id_(stream_id),
+      provider_(provider),
+      stats_(stats),
+      track_(track),
+      cached_track_enabled_(track->enabled()),
+      sink_adapter_(new LocalAudioSinkAdapter()) {
+  RTC_DCHECK(provider != nullptr);
+  track_->RegisterObserver(this);
+  track_->AddSink(sink_adapter_.get());
+}
+
+AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
+                               AudioProviderInterface* provider,
+                               StatsCollector* stats)
+    : id_(track->id()),
+      stream_id_(rtc::CreateRandomUuid()),
+      provider_(provider),
+      stats_(stats),
+      track_(track),
+      cached_track_enabled_(track->enabled()),
+      sink_adapter_(new LocalAudioSinkAdapter()) {
+  RTC_DCHECK(provider != nullptr);
+  track_->RegisterObserver(this);
+  track_->AddSink(sink_adapter_.get());
+}
+
+AudioRtpSender::AudioRtpSender(AudioProviderInterface* provider,
+                               StatsCollector* stats)
+    : id_(rtc::CreateRandomUuid()),
+      stream_id_(rtc::CreateRandomUuid()),
+      provider_(provider),
+      stats_(stats),
+      sink_adapter_(new LocalAudioSinkAdapter()) {}
+
+AudioRtpSender::~AudioRtpSender() {
+  Stop();
+}
+
+void AudioRtpSender::OnChanged() {
+  RTC_DCHECK(!stopped_);
+  if (cached_track_enabled_ != track_->enabled()) {
+    cached_track_enabled_ = track_->enabled();
+    if (can_send_track()) {
+      SetAudioSend();
+    }
+  }
+}
+
+bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
+  if (stopped_) {
+    LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
+    return false;
+  }
+  if (track && track->kind() != MediaStreamTrackInterface::kAudioKind) {
+    LOG(LS_ERROR) << "SetTrack called on audio RtpSender with " << track->kind()
+                  << " track.";
+    return false;
+  }
+  AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track);
+
+  // Detach from old track.
+  if (track_) {
+    track_->RemoveSink(sink_adapter_.get());
+    track_->UnregisterObserver(this);
+  }
+
+  if (can_send_track() && stats_) {
+    stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
+  }
+
+  // Attach to new track.
+  bool prev_can_send_track = can_send_track();
+  track_ = audio_track;
+  if (track_) {
+    cached_track_enabled_ = track_->enabled();
+    track_->RegisterObserver(this);
+    track_->AddSink(sink_adapter_.get());
+  }
+
+  // Update audio provider.
+  if (can_send_track()) {
+    SetAudioSend();
+    if (stats_) {
+      stats_->AddLocalAudioTrack(track_.get(), ssrc_);
+    }
+  } else if (prev_can_send_track) {
+    cricket::AudioOptions options;
+    provider_->SetAudioSend(ssrc_, false, options, nullptr);
+  }
+  return true;
+}
+
+void AudioRtpSender::SetSsrc(uint32_t ssrc) {
+  if (stopped_ || ssrc == ssrc_) {
+    return;
+  }
+  // If we are already sending with a particular SSRC, stop sending.
+  if (can_send_track()) {
+    cricket::AudioOptions options;
+    provider_->SetAudioSend(ssrc_, false, options, nullptr);
+    if (stats_) {
+      stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
+    }
+  }
+  ssrc_ = ssrc;
+  if (can_send_track()) {
+    SetAudioSend();
+    if (stats_) {
+      stats_->AddLocalAudioTrack(track_.get(), ssrc_);
+    }
+  }
+}
+
+void AudioRtpSender::Stop() {
+  // TODO(deadbeef): Need to do more here to fully stop sending packets.
+  if (stopped_) {
+    return;
+  }
+  if (track_) {
+    track_->RemoveSink(sink_adapter_.get());
+    track_->UnregisterObserver(this);
+  }
+  if (can_send_track()) {
+    cricket::AudioOptions options;
+    provider_->SetAudioSend(ssrc_, false, options, nullptr);
+    if (stats_) {
+      stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
+    }
+  }
+  stopped_ = true;
+}
+
+void AudioRtpSender::SetAudioSend() {
+  RTC_DCHECK(!stopped_ && can_send_track());
+  cricket::AudioOptions options;
+#if !defined(WEBRTC_CHROMIUM_BUILD)
+  // TODO(tommi): Remove this hack when we move CreateAudioSource out of
+  // PeerConnection.  This is a bit of a strange way to apply local audio
+  // options since it is also applied to all streams/channels, local or remote.
+  if (track_->enabled() && track_->GetSource() &&
+      !track_->GetSource()->remote()) {
+    // TODO(xians): Remove this static_cast since we should be able to connect
+    // a remote audio track to a peer connection.
+    options = static_cast<LocalAudioSource*>(track_->GetSource())->options();
+  }
+#endif
+
+  cricket::AudioRenderer* renderer = sink_adapter_.get();
+  ASSERT(renderer != nullptr);
+  provider_->SetAudioSend(ssrc_, track_->enabled(), options, renderer);
+}
+
+VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
+                               const std::string& stream_id,
+                               VideoProviderInterface* provider)
+    : id_(track->id()),
+      stream_id_(stream_id),
+      provider_(provider),
+      track_(track),
+      cached_track_enabled_(track->enabled()) {
+  RTC_DCHECK(provider != nullptr);
+  track_->RegisterObserver(this);
+}
+
+VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
+                               VideoProviderInterface* provider)
+    : id_(track->id()),
+      stream_id_(rtc::CreateRandomUuid()),
+      provider_(provider),
+      track_(track),
+      cached_track_enabled_(track->enabled()) {
+  RTC_DCHECK(provider != nullptr);
+  track_->RegisterObserver(this);
+}
+
+VideoRtpSender::VideoRtpSender(VideoProviderInterface* provider)
+    : id_(rtc::CreateRandomUuid()),
+      stream_id_(rtc::CreateRandomUuid()),
+      provider_(provider) {}
+
+VideoRtpSender::~VideoRtpSender() {
+  Stop();
+}
+
+void VideoRtpSender::OnChanged() {
+  RTC_DCHECK(!stopped_);
+  if (cached_track_enabled_ != track_->enabled()) {
+    cached_track_enabled_ = track_->enabled();
+    if (can_send_track()) {
+      SetVideoSend();
+    }
+  }
+}
+
+bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
+  if (stopped_) {
+    LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
+    return false;
+  }
+  if (track && track->kind() != MediaStreamTrackInterface::kVideoKind) {
+    LOG(LS_ERROR) << "SetTrack called on video RtpSender with " << track->kind()
+                  << " track.";
+    return false;
+  }
+  VideoTrackInterface* video_track = static_cast<VideoTrackInterface*>(track);
+
+  // Detach from old track.
+  if (track_) {
+    track_->UnregisterObserver(this);
+  }
+
+  // Attach to new track.
+  bool prev_can_send_track = can_send_track();
+  track_ = video_track;
+  if (track_) {
+    cached_track_enabled_ = track_->enabled();
+    track_->RegisterObserver(this);
+  }
+
+  // Update video provider.
+  if (can_send_track()) {
+    VideoSourceInterface* source = track_->GetSource();
+    // TODO(deadbeef): If SetTrack is called with a disabled track, and the
+    // previous track was enabled, this could cause a frame from the new track
+    // to slip out. Really, what we need is for SetCaptureDevice and
+    // SetVideoSend
+    // to be combined into one atomic operation, all the way down to
+    // WebRtcVideoSendStream.
+    provider_->SetCaptureDevice(ssrc_,
+                                source ? source->GetVideoCapturer() : nullptr);
+    SetVideoSend();
+  } else if (prev_can_send_track) {
+    provider_->SetCaptureDevice(ssrc_, nullptr);
+    provider_->SetVideoSend(ssrc_, false, nullptr);
+  }
+  return true;
+}
+
+void VideoRtpSender::SetSsrc(uint32_t ssrc) {
+  if (stopped_ || ssrc == ssrc_) {
+    return;
+  }
+  // If we are already sending with a particular SSRC, stop sending.
+  if (can_send_track()) {
+    provider_->SetCaptureDevice(ssrc_, nullptr);
+    provider_->SetVideoSend(ssrc_, false, nullptr);
+  }
+  ssrc_ = ssrc;
+  if (can_send_track()) {
+    VideoSourceInterface* source = track_->GetSource();
+    provider_->SetCaptureDevice(ssrc_,
+                                source ? source->GetVideoCapturer() : nullptr);
+    SetVideoSend();
+  }
+}
+
+void VideoRtpSender::Stop() {
+  // TODO(deadbeef): Need to do more here to fully stop sending packets.
+  if (stopped_) {
+    return;
+  }
+  if (track_) {
+    track_->UnregisterObserver(this);
+  }
+  if (can_send_track()) {
+    provider_->SetCaptureDevice(ssrc_, nullptr);
+    provider_->SetVideoSend(ssrc_, false, nullptr);
+  }
+  stopped_ = true;
+}
+
+void VideoRtpSender::SetVideoSend() {
+  RTC_DCHECK(!stopped_ && can_send_track());
+  const cricket::VideoOptions* options = nullptr;
+  VideoSourceInterface* source = track_->GetSource();
+  if (track_->enabled() && source) {
+    options = source->options();
+  }
+  provider_->SetVideoSend(ssrc_, track_->enabled(), options);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/rtpsender.h b/webrtc/api/rtpsender.h
new file mode 100644
index 0000000..45b765d
--- /dev/null
+++ b/webrtc/api/rtpsender.h
@@ -0,0 +1,195 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains classes that implement RtpSenderInterface.
+// An RtpSender associates a MediaStreamTrackInterface with an underlying
+// transport (provided by AudioProviderInterface/VideoProviderInterface)
+
+#ifndef WEBRTC_API_RTPSENDER_H_
+#define WEBRTC_API_RTPSENDER_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreamprovider.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/statscollector.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/media/base/audiorenderer.h"
+
+namespace webrtc {
+
+// LocalAudioSinkAdapter receives data callback as a sink to the local
+// AudioTrack, and passes the data to the sink of AudioRenderer.
+class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
+                              public cricket::AudioRenderer {
+ public:
+  LocalAudioSinkAdapter();
+  virtual ~LocalAudioSinkAdapter();
+
+ private:
+  // AudioSinkInterface implementation.
+  void OnData(const void* audio_data,
+              int bits_per_sample,
+              int sample_rate,
+              size_t number_of_channels,
+              size_t number_of_frames) override;
+
+  // cricket::AudioRenderer implementation.
+  void SetSink(cricket::AudioRenderer::Sink* sink) override;
+
+  cricket::AudioRenderer::Sink* sink_;
+  // Critical section protecting |sink_|.
+  rtc::CriticalSection lock_;
+};
+
+class AudioRtpSender : public ObserverInterface,
+                       public rtc::RefCountedObject<RtpSenderInterface> {
+ public:
+  // StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
+  // at the appropriate times.
+  AudioRtpSender(AudioTrackInterface* track,
+                 const std::string& stream_id,
+                 AudioProviderInterface* provider,
+                 StatsCollector* stats);
+
+  // Randomly generates stream_id.
+  AudioRtpSender(AudioTrackInterface* track,
+                 AudioProviderInterface* provider,
+                 StatsCollector* stats);
+
+  // Randomly generates id and stream_id.
+  AudioRtpSender(AudioProviderInterface* provider, StatsCollector* stats);
+
+  virtual ~AudioRtpSender();
+
+  // ObserverInterface implementation
+  void OnChanged() override;
+
+  // RtpSenderInterface implementation
+  bool SetTrack(MediaStreamTrackInterface* track) override;
+  rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+    return track_.get();
+  }
+
+  void SetSsrc(uint32_t ssrc) override;
+
+  uint32_t ssrc() const override { return ssrc_; }
+
+  cricket::MediaType media_type() const override {
+    return cricket::MEDIA_TYPE_AUDIO;
+  }
+
+  std::string id() const override { return id_; }
+
+  void set_stream_id(const std::string& stream_id) override {
+    stream_id_ = stream_id;
+  }
+  std::string stream_id() const override { return stream_id_; }
+
+  void Stop() override;
+
+ private:
+  bool can_send_track() const { return track_ && ssrc_; }
+  // Helper function to construct options for
+  // AudioProviderInterface::SetAudioSend.
+  void SetAudioSend();
+
+  std::string id_;
+  std::string stream_id_;
+  AudioProviderInterface* provider_;
+  StatsCollector* stats_;
+  rtc::scoped_refptr<AudioTrackInterface> track_;
+  uint32_t ssrc_ = 0;
+  bool cached_track_enabled_ = false;
+  bool stopped_ = false;
+
+  // Used to pass the data callback from the |track_| to the other end of
+  // cricket::AudioRenderer.
+  rtc::scoped_ptr<LocalAudioSinkAdapter> sink_adapter_;
+};
+
+class VideoRtpSender : public ObserverInterface,
+                       public rtc::RefCountedObject<RtpSenderInterface> {
+ public:
+  VideoRtpSender(VideoTrackInterface* track,
+                 const std::string& stream_id,
+                 VideoProviderInterface* provider);
+
+  // Randomly generates stream_id.
+  VideoRtpSender(VideoTrackInterface* track, VideoProviderInterface* provider);
+
+  // Randomly generates id and stream_id.
+  explicit VideoRtpSender(VideoProviderInterface* provider);
+
+  virtual ~VideoRtpSender();
+
+  // ObserverInterface implementation
+  void OnChanged() override;
+
+  // RtpSenderInterface implementation
+  bool SetTrack(MediaStreamTrackInterface* track) override;
+  rtc::scoped_refptr<MediaStreamTrackInterface> track() const override {
+    return track_.get();
+  }
+
+  void SetSsrc(uint32_t ssrc) override;
+
+  uint32_t ssrc() const override { return ssrc_; }
+
+  cricket::MediaType media_type() const override {
+    return cricket::MEDIA_TYPE_VIDEO;
+  }
+
+  std::string id() const override { return id_; }
+
+  void set_stream_id(const std::string& stream_id) override {
+    stream_id_ = stream_id;
+  }
+  std::string stream_id() const override { return stream_id_; }
+
+  void Stop() override;
+
+ private:
+  bool can_send_track() const { return track_ && ssrc_; }
+  // Helper function to construct options for
+  // VideoProviderInterface::SetVideoSend.
+  void SetVideoSend();
+
+  std::string id_;
+  std::string stream_id_;
+  VideoProviderInterface* provider_;
+  rtc::scoped_refptr<VideoTrackInterface> track_;
+  uint32_t ssrc_ = 0;
+  bool cached_track_enabled_ = false;
+  bool stopped_ = false;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_RTPSENDER_H_
diff --git a/webrtc/api/rtpsenderinterface.h b/webrtc/api/rtpsenderinterface.h
new file mode 100644
index 0000000..740e985
--- /dev/null
+++ b/webrtc/api/rtpsenderinterface.h
@@ -0,0 +1,90 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for RtpSenders
+// http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface
+
+#ifndef WEBRTC_API_RTPSENDERINTERFACE_H_
+#define WEBRTC_API_RTPSENDERINTERFACE_H_
+
+#include <string>
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/proxy.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class RtpSenderInterface : public rtc::RefCountInterface {
+ public:
+  // Returns true if successful in setting the track.
+  // Fails if an audio track is set on a video RtpSender, or vice-versa.
+  virtual bool SetTrack(MediaStreamTrackInterface* track) = 0;
+  virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
+
+  // Used to set the SSRC of the sender, once a local description has been set.
+  // If |ssrc| is 0, this indiates that the sender should disconnect from the
+  // underlying transport (this occurs if the sender isn't seen in a local
+  // description).
+  virtual void SetSsrc(uint32_t ssrc) = 0;
+  virtual uint32_t ssrc() const = 0;
+
+  // Audio or video sender?
+  virtual cricket::MediaType media_type() const = 0;
+
+  // Not to be confused with "mid", this is a field we can temporarily use
+  // to uniquely identify a receiver until we implement Unified Plan SDP.
+  virtual std::string id() const = 0;
+
+  // TODO(deadbeef): Support one sender having multiple stream ids.
+  virtual void set_stream_id(const std::string& stream_id) = 0;
+  virtual std::string stream_id() const = 0;
+
+  virtual void Stop() = 0;
+
+ protected:
+  virtual ~RtpSenderInterface() {}
+};
+
+// Define proxy for RtpSenderInterface.
+BEGIN_PROXY_MAP(RtpSender)
+PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
+PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
+PROXY_METHOD1(void, SetSsrc, uint32_t)
+PROXY_CONSTMETHOD0(uint32_t, ssrc)
+PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
+PROXY_CONSTMETHOD0(std::string, id)
+PROXY_METHOD1(void, set_stream_id, const std::string&)
+PROXY_CONSTMETHOD0(std::string, stream_id)
+PROXY_METHOD0(void, Stop)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_RTPSENDERINTERFACE_H_
diff --git a/webrtc/api/rtpsenderreceiver_unittest.cc b/webrtc/api/rtpsenderreceiver_unittest.cc
new file mode 100644
index 0000000..faca657
--- /dev/null
+++ b/webrtc/api/rtpsenderreceiver_unittest.cc
@@ -0,0 +1,515 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+#include <utility>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/mediastream.h"
+#include "webrtc/api/remoteaudiosource.h"
+#include "webrtc/api/rtpreceiver.h"
+#include "webrtc/api/rtpsender.h"
+#include "webrtc/api/streamcollection.h"
+#include "webrtc/api/videosource.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/media/base/fakevideocapturer.h"
+#include "webrtc/media/base/mediachannel.h"
+
+using ::testing::_;
+using ::testing::Exactly;
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kVideoTrackId[] = "video_1";
+static const char kAudioTrackId[] = "audio_1";
+static const uint32_t kVideoSsrc = 98;
+static const uint32_t kVideoSsrc2 = 100;
+static const uint32_t kAudioSsrc = 99;
+static const uint32_t kAudioSsrc2 = 101;
+
+namespace webrtc {
+
+// Helper class to test RtpSender/RtpReceiver.
+class MockAudioProvider : public AudioProviderInterface {
+ public:
+  ~MockAudioProvider() override {}
+
+  MOCK_METHOD2(SetAudioPlayout,
+               void(uint32_t ssrc,
+                    bool enable));
+  MOCK_METHOD4(SetAudioSend,
+               void(uint32_t ssrc,
+                    bool enable,
+                    const cricket::AudioOptions& options,
+                    cricket::AudioRenderer* renderer));
+  MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32_t ssrc, double volume));
+
+  void SetRawAudioSink(uint32_t,
+                       rtc::scoped_ptr<AudioSinkInterface> sink) override {
+    sink_ = std::move(sink);
+  }
+
+ private:
+  rtc::scoped_ptr<AudioSinkInterface> sink_;
+};
+
+// Helper class to test RtpSender/RtpReceiver.
+class MockVideoProvider : public VideoProviderInterface {
+ public:
+  virtual ~MockVideoProvider() {}
+  MOCK_METHOD2(SetCaptureDevice,
+               bool(uint32_t ssrc, cricket::VideoCapturer* camera));
+  MOCK_METHOD3(SetVideoPlayout,
+               void(uint32_t ssrc,
+                    bool enable,
+                    rtc::VideoSinkInterface<cricket::VideoFrame>* sink));
+  MOCK_METHOD3(SetVideoSend,
+               void(uint32_t ssrc,
+                    bool enable,
+                    const cricket::VideoOptions* options));
+};
+
+class FakeVideoSource : public Notifier<VideoSourceInterface> {
+ public:
+  static rtc::scoped_refptr<FakeVideoSource> Create(bool remote) {
+    return new rtc::RefCountedObject<FakeVideoSource>(remote);
+  }
+  virtual cricket::VideoCapturer* GetVideoCapturer() { return &fake_capturer_; }
+  virtual void Stop() {}
+  virtual void Restart() {}
+  virtual void AddSink(rtc::VideoSinkInterface<cricket::VideoFrame>* output) {}
+  virtual void RemoveSink(
+      rtc::VideoSinkInterface<cricket::VideoFrame>* output) {}
+  virtual SourceState state() const { return state_; }
+  virtual bool remote() const { return remote_; }
+  virtual const cricket::VideoOptions* options() const { return &options_; }
+  virtual cricket::VideoRenderer* FrameInput() { return NULL; }
+
+ protected:
+  explicit FakeVideoSource(bool remote) : state_(kLive), remote_(remote) {}
+  ~FakeVideoSource() {}
+
+ private:
+  cricket::FakeVideoCapturer fake_capturer_;
+  SourceState state_;
+  bool remote_;
+  cricket::VideoOptions options_;
+};
+
+class RtpSenderReceiverTest : public testing::Test {
+ public:
+  virtual void SetUp() {
+    stream_ = MediaStream::Create(kStreamLabel1);
+  }
+
+  void AddVideoTrack(bool remote) {
+    rtc::scoped_refptr<VideoSourceInterface> source(
+        FakeVideoSource::Create(remote));
+    video_track_ = VideoTrack::Create(kVideoTrackId, source);
+    EXPECT_TRUE(stream_->AddTrack(video_track_));
+  }
+
+  void CreateAudioRtpSender() {
+    audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
+    EXPECT_TRUE(stream_->AddTrack(audio_track_));
+    EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+    audio_rtp_sender_ =
+        new AudioRtpSender(stream_->GetAudioTracks()[0], stream_->label(),
+                           &audio_provider_, nullptr);
+    audio_rtp_sender_->SetSsrc(kAudioSsrc);
+  }
+
+  void CreateVideoRtpSender() {
+    AddVideoTrack(false);
+    EXPECT_CALL(video_provider_,
+                SetCaptureDevice(
+                    kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
+    EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+    video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
+                                           stream_->label(), &video_provider_);
+    video_rtp_sender_->SetSsrc(kVideoSsrc);
+  }
+
+  void DestroyAudioRtpSender() {
+    EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _))
+        .Times(1);
+    audio_rtp_sender_ = nullptr;
+  }
+
+  void DestroyVideoRtpSender() {
+    EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL)).Times(1);
+    EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+    video_rtp_sender_ = nullptr;
+  }
+
+  void CreateAudioRtpReceiver() {
+    audio_track_ = AudioTrack::Create(
+        kAudioTrackId, RemoteAudioSource::Create(kAudioSsrc, NULL));
+    EXPECT_TRUE(stream_->AddTrack(audio_track_));
+    EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
+    audio_rtp_receiver_ = new AudioRtpReceiver(stream_->GetAudioTracks()[0],
+                                               kAudioSsrc, &audio_provider_);
+  }
+
+  void CreateVideoRtpReceiver() {
+    AddVideoTrack(true);
+    EXPECT_CALL(video_provider_,
+                SetVideoPlayout(kVideoSsrc, true,
+                                video_track_->GetSink()));
+    video_rtp_receiver_ = new VideoRtpReceiver(stream_->GetVideoTracks()[0],
+                                               kVideoSsrc, &video_provider_);
+  }
+
+  void DestroyAudioRtpReceiver() {
+    EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
+    audio_rtp_receiver_ = nullptr;
+  }
+
+  void DestroyVideoRtpReceiver() {
+    EXPECT_CALL(video_provider_, SetVideoPlayout(kVideoSsrc, false, NULL));
+    video_rtp_receiver_ = nullptr;
+  }
+
+ protected:
+  MockAudioProvider audio_provider_;
+  MockVideoProvider video_provider_;
+  rtc::scoped_refptr<AudioRtpSender> audio_rtp_sender_;
+  rtc::scoped_refptr<VideoRtpSender> video_rtp_sender_;
+  rtc::scoped_refptr<AudioRtpReceiver> audio_rtp_receiver_;
+  rtc::scoped_refptr<VideoRtpReceiver> video_rtp_receiver_;
+  rtc::scoped_refptr<MediaStreamInterface> stream_;
+  rtc::scoped_refptr<VideoTrackInterface> video_track_;
+  rtc::scoped_refptr<AudioTrackInterface> audio_track_;
+};
+
+// Test that |audio_provider_| is notified when an audio track is associated
+// and disassociated with an AudioRtpSender.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpSender) {
+  CreateAudioRtpSender();
+  DestroyAudioRtpSender();
+}
+
+// Test that |video_provider_| is notified when a video track is associated and
+// disassociated with a VideoRtpSender.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpSender) {
+  CreateVideoRtpSender();
+  DestroyVideoRtpSender();
+}
+
+// Test that |audio_provider_| is notified when a remote audio and track is
+// associated and disassociated with an AudioRtpReceiver.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyAudioRtpReceiver) {
+  CreateAudioRtpReceiver();
+  DestroyAudioRtpReceiver();
+}
+
+// Test that |video_provider_| is notified when a remote
+// video track is associated and disassociated with a VideoRtpReceiver.
+TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiver) {
+  CreateVideoRtpReceiver();
+  DestroyVideoRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, LocalAudioTrackDisable) {
+  CreateAudioRtpSender();
+
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _));
+  audio_track_->set_enabled(false);
+
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+  audio_track_->set_enabled(true);
+
+  DestroyAudioRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, RemoteAudioTrackDisable) {
+  CreateAudioRtpReceiver();
+
+  EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
+  audio_track_->set_enabled(false);
+
+  EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
+  audio_track_->set_enabled(true);
+
+  DestroyAudioRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, LocalVideoTrackDisable) {
+  CreateVideoRtpSender();
+
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _));
+  video_track_->set_enabled(false);
+
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+  video_track_->set_enabled(true);
+
+  DestroyVideoRtpSender();
+}
+
+TEST_F(RtpSenderReceiverTest, RemoteVideoTrackDisable) {
+  CreateVideoRtpReceiver();
+
+  video_track_->set_enabled(false);
+
+  video_track_->set_enabled(true);
+
+  DestroyVideoRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
+  CreateAudioRtpReceiver();
+
+  double volume = 0.5;
+  EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, volume));
+  audio_track_->GetSource()->SetVolume(volume);
+
+  // Disable the audio track, this should prevent setting the volume.
+  EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, false));
+  audio_track_->set_enabled(false);
+  audio_track_->GetSource()->SetVolume(1.0);
+
+  EXPECT_CALL(audio_provider_, SetAudioPlayout(kAudioSsrc, true));
+  audio_track_->set_enabled(true);
+
+  double new_volume = 0.8;
+  EXPECT_CALL(audio_provider_, SetAudioPlayoutVolume(kAudioSsrc, new_volume));
+  audio_track_->GetSource()->SetVolume(new_volume);
+
+  DestroyAudioRtpReceiver();
+}
+
+// Test that provider methods aren't called without both a track and an SSRC.
+TEST_F(RtpSenderReceiverTest, AudioSenderWithoutTrackAndSsrc) {
+  rtc::scoped_refptr<AudioRtpSender> sender =
+      new AudioRtpSender(&audio_provider_, nullptr);
+  rtc::scoped_refptr<AudioTrackInterface> track =
+      AudioTrack::Create(kAudioTrackId, nullptr);
+  EXPECT_TRUE(sender->SetTrack(track));
+  EXPECT_TRUE(sender->SetTrack(nullptr));
+  sender->SetSsrc(kAudioSsrc);
+  sender->SetSsrc(0);
+  // Just let it get destroyed and make sure it doesn't call any methods on the
+  // provider interface.
+}
+
+// Test that provider methods aren't called without both a track and an SSRC.
+TEST_F(RtpSenderReceiverTest, VideoSenderWithoutTrackAndSsrc) {
+  rtc::scoped_refptr<VideoRtpSender> sender =
+      new VideoRtpSender(&video_provider_);
+  EXPECT_TRUE(sender->SetTrack(video_track_));
+  EXPECT_TRUE(sender->SetTrack(nullptr));
+  sender->SetSsrc(kVideoSsrc);
+  sender->SetSsrc(0);
+  // Just let it get destroyed and make sure it doesn't call any methods on the
+  // provider interface.
+}
+
+// Test that an audio sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set first.
+TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) {
+  rtc::scoped_refptr<AudioRtpSender> sender =
+      new AudioRtpSender(&audio_provider_, nullptr);
+  rtc::scoped_refptr<AudioTrackInterface> track =
+      AudioTrack::Create(kAudioTrackId, nullptr);
+  sender->SetSsrc(kAudioSsrc);
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+  sender->SetTrack(track);
+
+  // Calls expected from destructor.
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+}
+
+// Test that an audio sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set last.
+TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupTrackThenSsrc) {
+  rtc::scoped_refptr<AudioRtpSender> sender =
+      new AudioRtpSender(&audio_provider_, nullptr);
+  rtc::scoped_refptr<AudioTrackInterface> track =
+      AudioTrack::Create(kAudioTrackId, nullptr);
+  sender->SetTrack(track);
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+  sender->SetSsrc(kAudioSsrc);
+
+  // Calls expected from destructor.
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+}
+
+// Test that a video sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set first.
+TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupSsrcThenTrack) {
+  AddVideoTrack(false);
+  rtc::scoped_refptr<VideoRtpSender> sender =
+      new VideoRtpSender(&video_provider_);
+  sender->SetSsrc(kVideoSsrc);
+  EXPECT_CALL(video_provider_,
+              SetCaptureDevice(kVideoSsrc,
+                               video_track_->GetSource()->GetVideoCapturer()));
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+  sender->SetTrack(video_track_);
+
+  // Calls expected from destructor.
+  EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+}
+
+// Test that a video sender calls the expected methods on the provider once
+// it has a track and SSRC, when the SSRC is set last.
+TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupTrackThenSsrc) {
+  AddVideoTrack(false);
+  rtc::scoped_refptr<VideoRtpSender> sender =
+      new VideoRtpSender(&video_provider_);
+  sender->SetTrack(video_track_);
+  EXPECT_CALL(video_provider_,
+              SetCaptureDevice(kVideoSsrc,
+                               video_track_->GetSource()->GetVideoCapturer()));
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+  sender->SetSsrc(kVideoSsrc);
+
+  // Calls expected from destructor.
+  EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+}
+
+// Test that the sender is disconnected from the provider when its SSRC is
+// set to 0.
+TEST_F(RtpSenderReceiverTest, AudioSenderSsrcSetToZero) {
+  rtc::scoped_refptr<AudioTrackInterface> track =
+      AudioTrack::Create(kAudioTrackId, nullptr);
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+  rtc::scoped_refptr<AudioRtpSender> sender =
+      new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
+  sender->SetSsrc(kAudioSsrc);
+
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+  sender->SetSsrc(0);
+
+  // Make sure it's SetSsrc that called methods on the provider, and not the
+  // destructor.
+  EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
+}
+
+// Test that the sender is disconnected from the provider when its SSRC is
+// set to 0.
+TEST_F(RtpSenderReceiverTest, VideoSenderSsrcSetToZero) {
+  AddVideoTrack(false);
+  EXPECT_CALL(video_provider_,
+              SetCaptureDevice(kVideoSsrc,
+                               video_track_->GetSource()->GetVideoCapturer()));
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+  rtc::scoped_refptr<VideoRtpSender> sender =
+      new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+  sender->SetSsrc(kVideoSsrc);
+
+  EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+  sender->SetSsrc(0);
+
+  // Make sure it's SetSsrc that called methods on the provider, and not the
+  // destructor.
+  EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
+  EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
+  rtc::scoped_refptr<AudioTrackInterface> track =
+      AudioTrack::Create(kAudioTrackId, nullptr);
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+  rtc::scoped_refptr<AudioRtpSender> sender =
+      new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
+  sender->SetSsrc(kAudioSsrc);
+
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+  EXPECT_TRUE(sender->SetTrack(nullptr));
+
+  // Make sure it's SetTrack that called methods on the provider, and not the
+  // destructor.
+  EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderTrackSetToNull) {
+  AddVideoTrack(false);
+  EXPECT_CALL(video_provider_,
+              SetCaptureDevice(kVideoSsrc,
+                               video_track_->GetSource()->GetVideoCapturer()));
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+  rtc::scoped_refptr<VideoRtpSender> sender =
+      new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+  sender->SetSsrc(kVideoSsrc);
+
+  EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+  EXPECT_TRUE(sender->SetTrack(nullptr));
+
+  // Make sure it's SetTrack that called methods on the provider, and not the
+  // destructor.
+  EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
+  EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
+}
+
+TEST_F(RtpSenderReceiverTest, AudioSenderSsrcChanged) {
+  AddVideoTrack(false);
+  rtc::scoped_refptr<AudioTrackInterface> track =
+      AudioTrack::Create(kAudioTrackId, nullptr);
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
+  rtc::scoped_refptr<AudioRtpSender> sender =
+      new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
+  sender->SetSsrc(kAudioSsrc);
+
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, true, _, _)).Times(1);
+  sender->SetSsrc(kAudioSsrc2);
+
+  // Calls expected from destructor.
+  EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, false, _, _)).Times(1);
+}
+
+TEST_F(RtpSenderReceiverTest, VideoSenderSsrcChanged) {
+  AddVideoTrack(false);
+  EXPECT_CALL(video_provider_,
+              SetCaptureDevice(kVideoSsrc,
+                               video_track_->GetSource()->GetVideoCapturer()));
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
+  rtc::scoped_refptr<VideoRtpSender> sender =
+      new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+  sender->SetSsrc(kVideoSsrc);
+
+  EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
+  EXPECT_CALL(video_provider_,
+              SetCaptureDevice(kVideoSsrc2,
+                               video_track_->GetSource()->GetVideoCapturer()));
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, true, _));
+  sender->SetSsrc(kVideoSsrc2);
+
+  // Calls expected from destructor.
+  EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc2, nullptr)).Times(1);
+  EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, false, _)).Times(1);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/sctputils.cc b/webrtc/api/sctputils.cc
new file mode 100644
index 0000000..84cb293
--- /dev/null
+++ b/webrtc/api/sctputils.cc
@@ -0,0 +1,205 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/sctputils.h"
+
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/logging.h"
+
+namespace webrtc {
+
+// Format defined at
+// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-01#section
+
+static const uint8_t DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
+static const uint8_t DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE = 0x02;
+
+enum DataChannelOpenMessageChannelType {
+  DCOMCT_ORDERED_RELIABLE = 0x00,
+  DCOMCT_ORDERED_PARTIAL_RTXS = 0x01,
+  DCOMCT_ORDERED_PARTIAL_TIME = 0x02,
+  DCOMCT_UNORDERED_RELIABLE = 0x80,
+  DCOMCT_UNORDERED_PARTIAL_RTXS = 0x81,
+  DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
+};
+
+bool IsOpenMessage(const rtc::Buffer& payload) {
+  // Format defined at
+  // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+
+  rtc::ByteBuffer buffer(payload);
+  uint8_t message_type;
+  if (!buffer.ReadUInt8(&message_type)) {
+    LOG(LS_WARNING) << "Could not read OPEN message type.";
+    return false;
+  }
+  return message_type == DATA_CHANNEL_OPEN_MESSAGE_TYPE;
+}
+
+bool ParseDataChannelOpenMessage(const rtc::Buffer& payload,
+                                 std::string* label,
+                                 DataChannelInit* config) {
+  // Format defined at
+  // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+
+  rtc::ByteBuffer buffer(payload);
+  uint8_t message_type;
+  if (!buffer.ReadUInt8(&message_type)) {
+    LOG(LS_WARNING) << "Could not read OPEN message type.";
+    return false;
+  }
+  if (message_type != DATA_CHANNEL_OPEN_MESSAGE_TYPE) {
+    LOG(LS_WARNING) << "Data Channel OPEN message of unexpected type: "
+                    << message_type;
+    return false;
+  }
+
+  uint8_t channel_type;
+  if (!buffer.ReadUInt8(&channel_type)) {
+    LOG(LS_WARNING) << "Could not read OPEN message channel type.";
+    return false;
+  }
+
+  uint16_t priority;
+  if (!buffer.ReadUInt16(&priority)) {
+    LOG(LS_WARNING) << "Could not read OPEN message reliabilility prioirty.";
+    return false;
+  }
+  uint32_t reliability_param;
+  if (!buffer.ReadUInt32(&reliability_param)) {
+    LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
+    return false;
+  }
+  uint16_t label_length;
+  if (!buffer.ReadUInt16(&label_length)) {
+    LOG(LS_WARNING) << "Could not read OPEN message label length.";
+    return false;
+  }
+  uint16_t protocol_length;
+  if (!buffer.ReadUInt16(&protocol_length)) {
+    LOG(LS_WARNING) << "Could not read OPEN message protocol length.";
+    return false;
+  }
+  if (!buffer.ReadString(label, (size_t) label_length)) {
+    LOG(LS_WARNING) << "Could not read OPEN message label";
+    return false;
+  }
+  if (!buffer.ReadString(&config->protocol, protocol_length)) {
+    LOG(LS_WARNING) << "Could not read OPEN message protocol.";
+    return false;
+  }
+
+  config->ordered = true;
+  switch (channel_type) {
+    case DCOMCT_UNORDERED_RELIABLE:
+    case DCOMCT_UNORDERED_PARTIAL_RTXS:
+    case DCOMCT_UNORDERED_PARTIAL_TIME:
+      config->ordered = false;
+  }
+
+  config->maxRetransmits = -1;
+  config->maxRetransmitTime = -1;
+  switch (channel_type) {
+    case DCOMCT_ORDERED_PARTIAL_RTXS:
+    case DCOMCT_UNORDERED_PARTIAL_RTXS:
+      config->maxRetransmits = reliability_param;
+      break;
+    case DCOMCT_ORDERED_PARTIAL_TIME:
+    case DCOMCT_UNORDERED_PARTIAL_TIME:
+      config->maxRetransmitTime = reliability_param;
+      break;
+  }
+  return true;
+}
+
+bool ParseDataChannelOpenAckMessage(const rtc::Buffer& payload) {
+  rtc::ByteBuffer buffer(payload);
+  uint8_t message_type;
+  if (!buffer.ReadUInt8(&message_type)) {
+    LOG(LS_WARNING) << "Could not read OPEN_ACK message type.";
+    return false;
+  }
+  if (message_type != DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE) {
+    LOG(LS_WARNING) << "Data Channel OPEN_ACK message of unexpected type: "
+                    << message_type;
+    return false;
+  }
+  return true;
+}
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+                                 const DataChannelInit& config,
+                                 rtc::Buffer* payload) {
+  // Format defined at
+  // http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
+  uint8_t channel_type = 0;
+  uint32_t reliability_param = 0;
+  uint16_t priority = 0;
+  if (config.ordered) {
+    if (config.maxRetransmits > -1) {
+      channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
+      reliability_param = config.maxRetransmits;
+    } else if (config.maxRetransmitTime > -1) {
+      channel_type = DCOMCT_ORDERED_PARTIAL_TIME;
+      reliability_param = config.maxRetransmitTime;
+    } else {
+      channel_type = DCOMCT_ORDERED_RELIABLE;
+    }
+  } else {
+    if (config.maxRetransmits > -1) {
+      channel_type = DCOMCT_UNORDERED_PARTIAL_RTXS;
+      reliability_param = config.maxRetransmits;
+    } else if (config.maxRetransmitTime > -1) {
+      channel_type = DCOMCT_UNORDERED_PARTIAL_TIME;
+      reliability_param = config.maxRetransmitTime;
+    } else {
+      channel_type = DCOMCT_UNORDERED_RELIABLE;
+    }
+  }
+
+  rtc::ByteBuffer buffer(
+      NULL, 20 + label.length() + config.protocol.length(),
+      rtc::ByteBuffer::ORDER_NETWORK);
+  buffer.WriteUInt8(DATA_CHANNEL_OPEN_MESSAGE_TYPE);
+  buffer.WriteUInt8(channel_type);
+  buffer.WriteUInt16(priority);
+  buffer.WriteUInt32(reliability_param);
+  buffer.WriteUInt16(static_cast<uint16_t>(label.length()));
+  buffer.WriteUInt16(static_cast<uint16_t>(config.protocol.length()));
+  buffer.WriteString(label);
+  buffer.WriteString(config.protocol);
+  payload->SetData(buffer.Data(), buffer.Length());
+  return true;
+}
+
+void WriteDataChannelOpenAckMessage(rtc::Buffer* payload) {
+  rtc::ByteBuffer buffer(rtc::ByteBuffer::ORDER_NETWORK);
+  buffer.WriteUInt8(DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE);
+  payload->SetData(buffer.Data(), buffer.Length());
+}
+}  // namespace webrtc
diff --git a/webrtc/api/sctputils.h b/webrtc/api/sctputils.h
new file mode 100644
index 0000000..a3bdb5c
--- /dev/null
+++ b/webrtc/api/sctputils.h
@@ -0,0 +1,58 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_SCTPUTILS_H_
+#define WEBRTC_API_SCTPUTILS_H_
+
+#include <string>
+
+#include "webrtc/api/datachannelinterface.h"
+
+namespace rtc {
+class Buffer;
+}  // namespace rtc
+
+namespace webrtc {
+struct DataChannelInit;
+
+// Read the message type and return true if it's an OPEN message.
+bool IsOpenMessage(const rtc::Buffer& payload);
+
+bool ParseDataChannelOpenMessage(const rtc::Buffer& payload,
+                                 std::string* label,
+                                 DataChannelInit* config);
+
+bool ParseDataChannelOpenAckMessage(const rtc::Buffer& payload);
+
+bool WriteDataChannelOpenMessage(const std::string& label,
+                                 const DataChannelInit& config,
+                                 rtc::Buffer* payload);
+
+void WriteDataChannelOpenAckMessage(rtc::Buffer* payload);
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_SCTPUTILS_H_
diff --git a/webrtc/api/sctputils_unittest.cc b/webrtc/api/sctputils_unittest.cc
new file mode 100644
index 0000000..8e29d4c
--- /dev/null
+++ b/webrtc/api/sctputils_unittest.cc
@@ -0,0 +1,178 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/sctputils.h"
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/gunit.h"
+
+class SctpUtilsTest : public testing::Test {
+ public:
+  void VerifyOpenMessageFormat(const rtc::Buffer& packet,
+                               const std::string& label,
+                               const webrtc::DataChannelInit& config) {
+    uint8_t message_type;
+    uint8_t channel_type;
+    uint32_t reliability;
+    uint16_t priority;
+    uint16_t label_length;
+    uint16_t protocol_length;
+
+    rtc::ByteBuffer buffer(packet.data(), packet.length());
+    ASSERT_TRUE(buffer.ReadUInt8(&message_type));
+    EXPECT_EQ(0x03, message_type);
+
+    ASSERT_TRUE(buffer.ReadUInt8(&channel_type));
+    if (config.ordered) {
+      EXPECT_EQ(config.maxRetransmits > -1 ?
+                    0x01 : (config.maxRetransmitTime > -1 ? 0x02 : 0),
+                channel_type);
+    } else {
+      EXPECT_EQ(config.maxRetransmits > -1 ?
+                    0x81 : (config.maxRetransmitTime > -1 ? 0x82 : 0x80),
+                channel_type);
+    }
+
+    ASSERT_TRUE(buffer.ReadUInt16(&priority));
+
+    ASSERT_TRUE(buffer.ReadUInt32(&reliability));
+    if (config.maxRetransmits > -1 || config.maxRetransmitTime > -1) {
+      EXPECT_EQ(config.maxRetransmits > -1 ?
+                    config.maxRetransmits : config.maxRetransmitTime,
+                static_cast<int>(reliability));
+    }
+
+    ASSERT_TRUE(buffer.ReadUInt16(&label_length));
+    ASSERT_TRUE(buffer.ReadUInt16(&protocol_length));
+    EXPECT_EQ(label.size(), label_length);
+    EXPECT_EQ(config.protocol.size(), protocol_length);
+
+    std::string label_output;
+    ASSERT_TRUE(buffer.ReadString(&label_output, label_length));
+    EXPECT_EQ(label, label_output);
+    std::string protocol_output;
+    ASSERT_TRUE(buffer.ReadString(&protocol_output, protocol_length));
+    EXPECT_EQ(config.protocol, protocol_output);
+  }
+};
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithOrderedReliable) {
+  webrtc::DataChannelInit config;
+  std::string label = "abc";
+  config.protocol = "y";
+
+  rtc::Buffer packet;
+  ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+  VerifyOpenMessageFormat(packet, label, config);
+
+  std::string output_label;
+  webrtc::DataChannelInit output_config;
+  ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
+      packet, &output_label, &output_config));
+
+  EXPECT_EQ(label, output_label);
+  EXPECT_EQ(config.protocol, output_config.protocol);
+  EXPECT_EQ(config.ordered, output_config.ordered);
+  EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
+  EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmitTime) {
+  webrtc::DataChannelInit config;
+  std::string label = "abc";
+  config.ordered = false;
+  config.maxRetransmitTime = 10;
+  config.protocol = "y";
+
+  rtc::Buffer packet;
+  ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+  VerifyOpenMessageFormat(packet, label, config);
+
+  std::string output_label;
+  webrtc::DataChannelInit output_config;
+  ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
+      packet, &output_label, &output_config));
+
+  EXPECT_EQ(label, output_label);
+  EXPECT_EQ(config.protocol, output_config.protocol);
+  EXPECT_EQ(config.ordered, output_config.ordered);
+  EXPECT_EQ(config.maxRetransmitTime, output_config.maxRetransmitTime);
+  EXPECT_EQ(-1, output_config.maxRetransmits);
+}
+
+TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) {
+  webrtc::DataChannelInit config;
+  std::string label = "abc";
+  config.maxRetransmits = 10;
+  config.protocol = "y";
+
+  rtc::Buffer packet;
+  ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet));
+
+  VerifyOpenMessageFormat(packet, label, config);
+
+  std::string output_label;
+  webrtc::DataChannelInit output_config;
+  ASSERT_TRUE(webrtc::ParseDataChannelOpenMessage(
+      packet, &output_label, &output_config));
+
+  EXPECT_EQ(label, output_label);
+  EXPECT_EQ(config.protocol, output_config.protocol);
+  EXPECT_EQ(config.ordered, output_config.ordered);
+  EXPECT_EQ(config.maxRetransmits, output_config.maxRetransmits);
+  EXPECT_EQ(-1, output_config.maxRetransmitTime);
+}
+
+TEST_F(SctpUtilsTest, WriteParseAckMessage) {
+  rtc::Buffer packet;
+  webrtc::WriteDataChannelOpenAckMessage(&packet);
+
+  uint8_t message_type;
+  rtc::ByteBuffer buffer(packet.data(), packet.length());
+  ASSERT_TRUE(buffer.ReadUInt8(&message_type));
+  EXPECT_EQ(0x02, message_type);
+
+  EXPECT_TRUE(webrtc::ParseDataChannelOpenAckMessage(packet));
+}
+
+TEST_F(SctpUtilsTest, TestIsOpenMessage) {
+  rtc::ByteBuffer open;
+  open.WriteUInt8(0x03);
+  EXPECT_TRUE(webrtc::IsOpenMessage(open));
+
+  rtc::ByteBuffer openAck;
+  openAck.WriteUInt8(0x02);
+  EXPECT_FALSE(webrtc::IsOpenMessage(open));
+
+  rtc::ByteBuffer invalid;
+  openAck.WriteUInt8(0x01);
+  EXPECT_FALSE(webrtc::IsOpenMessage(invalid));
+
+  rtc::ByteBuffer empty;
+  EXPECT_FALSE(webrtc::IsOpenMessage(empty));
+}
diff --git a/webrtc/api/statscollector.cc b/webrtc/api/statscollector.cc
new file mode 100644
index 0000000..c326ea1
--- /dev/null
+++ b/webrtc/api/statscollector.cc
@@ -0,0 +1,962 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/statscollector.h"
+
+#include <utility>
+#include <vector>
+
+#include "webrtc/api/peerconnection.h"
+#include "talk/session/media/channel.h"
+#include "webrtc/base/base64.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/timing.h"
+
+using rtc::scoped_ptr;
+
+namespace webrtc {
+namespace {
+
+// The following is the enum RTCStatsIceCandidateType from
+// http://w3c.github.io/webrtc-stats/#rtcstatsicecandidatetype-enum such that
+// our stats report for ice candidate type could conform to that.
+const char STATSREPORT_LOCAL_PORT_TYPE[] = "host";
+const char STATSREPORT_STUN_PORT_TYPE[] = "serverreflexive";
+const char STATSREPORT_PRFLX_PORT_TYPE[] = "peerreflexive";
+const char STATSREPORT_RELAY_PORT_TYPE[] = "relayed";
+
+// Strings used by the stats collector to report adapter types. This fits the
+// general stype of http://w3c.github.io/webrtc-stats than what
+// AdapterTypeToString does.
+const char* STATSREPORT_ADAPTER_TYPE_ETHERNET = "lan";
+const char* STATSREPORT_ADAPTER_TYPE_WIFI = "wlan";
+const char* STATSREPORT_ADAPTER_TYPE_WWAN = "wwan";
+const char* STATSREPORT_ADAPTER_TYPE_VPN = "vpn";
+const char* STATSREPORT_ADAPTER_TYPE_LOOPBACK = "loopback";
+
+template<typename ValueType>
+struct TypeForAdd {
+  const StatsReport::StatsValueName name;
+  const ValueType& value;
+};
+
+typedef TypeForAdd<bool> BoolForAdd;
+typedef TypeForAdd<float> FloatForAdd;
+typedef TypeForAdd<int64_t> Int64ForAdd;
+typedef TypeForAdd<int> IntForAdd;
+
+StatsReport::Id GetTransportIdFromProxy(const ProxyTransportMap& map,
+                                        const std::string& proxy) {
+  RTC_DCHECK(!proxy.empty());
+  auto found = map.find(proxy);
+  if (found == map.end()) {
+    return StatsReport::Id();
+  }
+
+  return StatsReport::NewComponentId(
+      found->second, cricket::ICE_CANDIDATE_COMPONENT_RTP);
+}
+
+StatsReport* AddTrackReport(StatsCollection* reports,
+                            const std::string& track_id) {
+  // Adds an empty track report.
+  StatsReport::Id id(
+      StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track_id));
+  StatsReport* report = reports->ReplaceOrAddNew(id);
+  report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+  return report;
+}
+
+template <class TrackVector>
+void CreateTrackReports(const TrackVector& tracks, StatsCollection* reports,
+                        TrackIdMap& track_ids) {
+  for (const auto& track : tracks) {
+    const std::string& track_id = track->id();
+    StatsReport* report = AddTrackReport(reports, track_id);
+    RTC_DCHECK(report != nullptr);
+    track_ids[track_id] = report;
+  }
+}
+
+void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info,
+                                 StatsReport* report) {
+  report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
+  report->AddInt64(StatsReport::kStatsValueNameBytesSent, info.bytes_sent);
+  report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms);
+}
+
+void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info,
+                                    StatsReport* report) {
+  report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
+}
+
+void SetAudioProcessingStats(StatsReport* report,
+                             bool typing_noise_detected,
+                             int echo_return_loss,
+                             int echo_return_loss_enhancement,
+                             int echo_delay_median_ms,
+                             float aec_quality_min,
+                             int echo_delay_std_ms) {
+  report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState,
+                     typing_noise_detected);
+  report->AddFloat(StatsReport::kStatsValueNameEchoCancellationQualityMin,
+                   aec_quality_min);
+  const IntForAdd ints[] = {
+    { StatsReport::kStatsValueNameEchoReturnLoss, echo_return_loss },
+    { StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+      echo_return_loss_enhancement },
+    { StatsReport::kStatsValueNameEchoDelayMedian, echo_delay_median_ms },
+    { StatsReport::kStatsValueNameEchoDelayStdDev, echo_delay_std_ms },
+  };
+  for (const auto& i : ints)
+    report->AddInt(i.name, i.value);
+}
+
+void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
+  ExtractCommonReceiveProperties(info, report);
+  const FloatForAdd floats[] = {
+    { StatsReport::kStatsValueNameExpandRate, info.expand_rate },
+    { StatsReport::kStatsValueNameSecondaryDecodedRate,
+      info.secondary_decoded_rate },
+    { StatsReport::kStatsValueNameSpeechExpandRate, info.speech_expand_rate },
+    { StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate },
+    { StatsReport::kStatsValueNamePreemptiveExpandRate,
+      info.preemptive_expand_rate },
+  };
+
+  const IntForAdd ints[] = {
+    { StatsReport::kStatsValueNameAudioOutputLevel, info.audio_level },
+    { StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms },
+    { StatsReport::kStatsValueNameDecodingCNG, info.decoding_cng },
+    { StatsReport::kStatsValueNameDecodingCTN, info.decoding_calls_to_neteq },
+    { StatsReport::kStatsValueNameDecodingCTSG,
+      info.decoding_calls_to_silence_generator },
+    { StatsReport::kStatsValueNameDecodingNormal, info.decoding_normal },
+    { StatsReport::kStatsValueNameDecodingPLC, info.decoding_plc },
+    { StatsReport::kStatsValueNameDecodingPLCCNG, info.decoding_plc_cng },
+    { StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms },
+    { StatsReport::kStatsValueNameJitterReceived, info.jitter_ms },
+    { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+    { StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd },
+    { StatsReport::kStatsValueNamePreferredJitterBufferMs,
+      info.jitter_buffer_preferred_ms },
+  };
+
+  for (const auto& f : floats)
+    report->AddFloat(f.name, f.value);
+
+  for (const auto& i : ints)
+    report->AddInt(i.name, i.value);
+
+  report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
+                   info.bytes_rcvd);
+  report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+                   info.capture_start_ntp_time_ms);
+  report->AddString(StatsReport::kStatsValueNameMediaType, "audio");
+}
+
+void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) {
+  ExtractCommonSendProperties(info, report);
+
+  SetAudioProcessingStats(
+      report, info.typing_noise_detected, info.echo_return_loss,
+      info.echo_return_loss_enhancement, info.echo_delay_median_ms,
+      info.aec_quality_min, info.echo_delay_std_ms);
+
+  RTC_DCHECK_GE(info.audio_level, 0);
+  const IntForAdd ints[] = {
+    { StatsReport::kStatsValueNameAudioInputLevel, info.audio_level},
+    { StatsReport::kStatsValueNameJitterReceived, info.jitter_ms },
+    { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+    { StatsReport::kStatsValueNamePacketsSent, info.packets_sent },
+  };
+
+  for (const auto& i : ints)
+    report->AddInt(i.name, i.value);
+  report->AddString(StatsReport::kStatsValueNameMediaType, "audio");
+}
+
+void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
+  ExtractCommonReceiveProperties(info, report);
+  report->AddString(StatsReport::kStatsValueNameCodecImplementationName,
+                    info.decoder_implementation_name);
+  report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
+                   info.bytes_rcvd);
+  report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+                   info.capture_start_ntp_time_ms);
+  const IntForAdd ints[] = {
+    { StatsReport::kStatsValueNameCurrentDelayMs, info.current_delay_ms },
+    { StatsReport::kStatsValueNameDecodeMs, info.decode_ms },
+    { StatsReport::kStatsValueNameFirsSent, info.firs_sent },
+    { StatsReport::kStatsValueNameFrameHeightReceived, info.frame_height },
+    { StatsReport::kStatsValueNameFrameRateDecoded, info.framerate_decoded },
+    { StatsReport::kStatsValueNameFrameRateOutput, info.framerate_output },
+    { StatsReport::kStatsValueNameFrameRateReceived, info.framerate_rcvd },
+    { StatsReport::kStatsValueNameFrameWidthReceived, info.frame_width },
+    { StatsReport::kStatsValueNameJitterBufferMs, info.jitter_buffer_ms },
+    { StatsReport::kStatsValueNameMaxDecodeMs, info.max_decode_ms },
+    { StatsReport::kStatsValueNameMinPlayoutDelayMs,
+      info.min_playout_delay_ms },
+    { StatsReport::kStatsValueNameNacksSent, info.nacks_sent },
+    { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+    { StatsReport::kStatsValueNamePacketsReceived, info.packets_rcvd },
+    { StatsReport::kStatsValueNamePlisSent, info.plis_sent },
+    { StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms },
+    { StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms },
+  };
+
+  for (const auto& i : ints)
+    report->AddInt(i.name, i.value);
+  report->AddString(StatsReport::kStatsValueNameMediaType, "video");
+}
+
+void ExtractStats(const cricket::VideoSenderInfo& info, StatsReport* report) {
+  ExtractCommonSendProperties(info, report);
+
+  report->AddString(StatsReport::kStatsValueNameCodecImplementationName,
+                    info.encoder_implementation_name);
+  report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution,
+                     (info.adapt_reason & 0x2) > 0);
+  report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution,
+                     (info.adapt_reason & 0x1) > 0);
+  report->AddBoolean(StatsReport::kStatsValueNameViewLimitedResolution,
+                     (info.adapt_reason & 0x4) > 0);
+
+  const IntForAdd ints[] = {
+    { StatsReport::kStatsValueNameAdaptationChanges, info.adapt_changes },
+    { StatsReport::kStatsValueNameAvgEncodeMs, info.avg_encode_ms },
+    { StatsReport::kStatsValueNameEncodeUsagePercent,
+      info.encode_usage_percent },
+    { StatsReport::kStatsValueNameFirsReceived, info.firs_rcvd },
+    { StatsReport::kStatsValueNameFrameHeightInput, info.input_frame_height },
+    { StatsReport::kStatsValueNameFrameHeightSent, info.send_frame_height },
+    { StatsReport::kStatsValueNameFrameRateInput, info.framerate_input },
+    { StatsReport::kStatsValueNameFrameRateSent, info.framerate_sent },
+    { StatsReport::kStatsValueNameFrameWidthInput, info.input_frame_width },
+    { StatsReport::kStatsValueNameFrameWidthSent, info.send_frame_width },
+    { StatsReport::kStatsValueNameNacksReceived, info.nacks_rcvd },
+    { StatsReport::kStatsValueNamePacketsLost, info.packets_lost },
+    { StatsReport::kStatsValueNamePacketsSent, info.packets_sent },
+    { StatsReport::kStatsValueNamePlisReceived, info.plis_rcvd },
+  };
+
+  for (const auto& i : ints)
+    report->AddInt(i.name, i.value);
+  report->AddString(StatsReport::kStatsValueNameMediaType, "video");
+}
+
+void ExtractStats(const cricket::BandwidthEstimationInfo& info,
+                  double stats_gathering_started,
+                  PeerConnectionInterface::StatsOutputLevel level,
+                  StatsReport* report) {
+  RTC_DCHECK(report->type() == StatsReport::kStatsReportTypeBwe);
+
+  report->set_timestamp(stats_gathering_started);
+  const IntForAdd ints[] = {
+    { StatsReport::kStatsValueNameAvailableSendBandwidth,
+      info.available_send_bandwidth },
+    { StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+      info.available_recv_bandwidth },
+    { StatsReport::kStatsValueNameTargetEncBitrate, info.target_enc_bitrate },
+    { StatsReport::kStatsValueNameActualEncBitrate, info.actual_enc_bitrate },
+    { StatsReport::kStatsValueNameRetransmitBitrate, info.retransmit_bitrate },
+    { StatsReport::kStatsValueNameTransmitBitrate, info.transmit_bitrate },
+  };
+  for (const auto& i : ints)
+    report->AddInt(i.name, i.value);
+  report->AddInt64(StatsReport::kStatsValueNameBucketDelay, info.bucket_delay);
+}
+
+void ExtractRemoteStats(const cricket::MediaSenderInfo& info,
+                        StatsReport* report) {
+  report->set_timestamp(info.remote_stats[0].timestamp);
+  // TODO(hta): Extract some stats here.
+}
+
+void ExtractRemoteStats(const cricket::MediaReceiverInfo& info,
+                        StatsReport* report) {
+  report->set_timestamp(info.remote_stats[0].timestamp);
+  // TODO(hta): Extract some stats here.
+}
+
+// Template to extract stats from a data vector.
+// In order to use the template, the functions that are called from it,
+// ExtractStats and ExtractRemoteStats, must be defined and overloaded
+// for each type.
+template<typename T>
+void ExtractStatsFromList(const std::vector<T>& data,
+                          const StatsReport::Id& transport_id,
+                          StatsCollector* collector,
+                          StatsReport::Direction direction) {
+  for (const auto& d : data) {
+    uint32_t ssrc = d.ssrc();
+    // Each track can have stats for both local and remote objects.
+    // TODO(hta): Handle the case of multiple SSRCs per object.
+    StatsReport* report = collector->PrepareReport(true, ssrc, transport_id,
+                                                   direction);
+    if (report)
+      ExtractStats(d, report);
+
+    if (!d.remote_stats.empty()) {
+      report = collector->PrepareReport(false, ssrc, transport_id, direction);
+      if (report)
+        ExtractRemoteStats(d, report);
+    }
+  }
+}
+
+}  // namespace
+
+const char* IceCandidateTypeToStatsType(const std::string& candidate_type) {
+  if (candidate_type == cricket::LOCAL_PORT_TYPE) {
+    return STATSREPORT_LOCAL_PORT_TYPE;
+  }
+  if (candidate_type == cricket::STUN_PORT_TYPE) {
+    return STATSREPORT_STUN_PORT_TYPE;
+  }
+  if (candidate_type == cricket::PRFLX_PORT_TYPE) {
+    return STATSREPORT_PRFLX_PORT_TYPE;
+  }
+  if (candidate_type == cricket::RELAY_PORT_TYPE) {
+    return STATSREPORT_RELAY_PORT_TYPE;
+  }
+  RTC_DCHECK(false);
+  return "unknown";
+}
+
+const char* AdapterTypeToStatsType(rtc::AdapterType type) {
+  switch (type) {
+    case rtc::ADAPTER_TYPE_UNKNOWN:
+      return "unknown";
+    case rtc::ADAPTER_TYPE_ETHERNET:
+      return STATSREPORT_ADAPTER_TYPE_ETHERNET;
+    case rtc::ADAPTER_TYPE_WIFI:
+      return STATSREPORT_ADAPTER_TYPE_WIFI;
+    case rtc::ADAPTER_TYPE_CELLULAR:
+      return STATSREPORT_ADAPTER_TYPE_WWAN;
+    case rtc::ADAPTER_TYPE_VPN:
+      return STATSREPORT_ADAPTER_TYPE_VPN;
+    case rtc::ADAPTER_TYPE_LOOPBACK:
+      return STATSREPORT_ADAPTER_TYPE_LOOPBACK;
+    default:
+      RTC_DCHECK(false);
+      return "";
+  }
+}
+
+StatsCollector::StatsCollector(PeerConnection* pc)
+    : pc_(pc), stats_gathering_started_(0) {
+  RTC_DCHECK(pc_);
+}
+
+StatsCollector::~StatsCollector() {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+}
+
+double StatsCollector::GetTimeNow() {
+  return rtc::Timing::WallTimeNow() * rtc::kNumMillisecsPerSec;
+}
+
+// Adds a MediaStream with tracks that can be used as a |selector| in a call
+// to GetStats.
+void StatsCollector::AddStream(MediaStreamInterface* stream) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  RTC_DCHECK(stream != NULL);
+
+  CreateTrackReports<AudioTrackVector>(stream->GetAudioTracks(),
+                                       &reports_, track_ids_);
+  CreateTrackReports<VideoTrackVector>(stream->GetVideoTracks(),
+                                       &reports_, track_ids_);
+}
+
+void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track,
+                                        uint32_t ssrc) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  RTC_DCHECK(audio_track != NULL);
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+  for (const auto& track : local_audio_tracks_)
+    RTC_DCHECK(track.first != audio_track || track.second != ssrc);
+#endif
+
+  local_audio_tracks_.push_back(std::make_pair(audio_track, ssrc));
+
+  // Create the kStatsReportTypeTrack report for the new track if there is no
+  // report yet.
+  StatsReport::Id id(StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack,
+                                             audio_track->id()));
+  StatsReport* report = reports_.Find(id);
+  if (!report) {
+    report = reports_.InsertNew(id);
+    report->AddString(StatsReport::kStatsValueNameTrackId, audio_track->id());
+  }
+}
+
+void StatsCollector::RemoveLocalAudioTrack(AudioTrackInterface* audio_track,
+                                           uint32_t ssrc) {
+  RTC_DCHECK(audio_track != NULL);
+  local_audio_tracks_.erase(std::remove_if(local_audio_tracks_.begin(),
+      local_audio_tracks_.end(),
+      [audio_track, ssrc](const LocalAudioTrackVector::value_type& track) {
+        return track.first == audio_track && track.second == ssrc;
+      }));
+}
+
+void StatsCollector::GetStats(MediaStreamTrackInterface* track,
+                              StatsReports* reports) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  RTC_DCHECK(reports != NULL);
+  RTC_DCHECK(reports->empty());
+
+  rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+  if (!track) {
+    reports->reserve(reports_.size());
+    for (auto* r : reports_)
+      reports->push_back(r);
+    return;
+  }
+
+  StatsReport* report = reports_.Find(StatsReport::NewTypedId(
+      StatsReport::kStatsReportTypeSession, pc_->session()->id()));
+  if (report)
+    reports->push_back(report);
+
+  report = reports_.Find(StatsReport::NewTypedId(
+      StatsReport::kStatsReportTypeTrack, track->id()));
+
+  if (!report)
+    return;
+
+  reports->push_back(report);
+
+  std::string track_id;
+  for (const auto* r : reports_) {
+    if (r->type() != StatsReport::kStatsReportTypeSsrc)
+      continue;
+
+    const StatsReport::Value* v =
+        r->FindValue(StatsReport::kStatsValueNameTrackId);
+    if (v && v->string_val() == track->id())
+      reports->push_back(r);
+  }
+}
+
+void
+StatsCollector::UpdateStats(PeerConnectionInterface::StatsOutputLevel level) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  double time_now = GetTimeNow();
+  // Calls to UpdateStats() that occur less than kMinGatherStatsPeriod number of
+  // ms apart will be ignored.
+  const double kMinGatherStatsPeriod = 50;
+  if (stats_gathering_started_ != 0 &&
+      stats_gathering_started_ + kMinGatherStatsPeriod > time_now) {
+    return;
+  }
+  stats_gathering_started_ = time_now;
+
+  if (pc_->session()) {
+    // TODO(tommi): All of these hop over to the worker thread to fetch
+    // information.  We could use an AsyncInvoker to run all of these and post
+    // the information back to the signaling thread where we can create and
+    // update stats reports.  That would also clean up the threading story a bit
+    // since we'd be creating/updating the stats report objects consistently on
+    // the same thread (this class has no locks right now).
+    ExtractSessionInfo();
+    ExtractVoiceInfo();
+    ExtractVideoInfo(level);
+    ExtractDataInfo();
+    UpdateTrackReports();
+  }
+}
+
+StatsReport* StatsCollector::PrepareReport(
+    bool local,
+    uint32_t ssrc,
+    const StatsReport::Id& transport_id,
+    StatsReport::Direction direction) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  StatsReport::Id id(StatsReport::NewIdWithDirection(
+      local ? StatsReport::kStatsReportTypeSsrc
+            : StatsReport::kStatsReportTypeRemoteSsrc,
+      rtc::ToString<uint32_t>(ssrc), direction));
+  StatsReport* report = reports_.Find(id);
+
+  // Use the ID of the track that is currently mapped to the SSRC, if any.
+  std::string track_id;
+  if (!GetTrackIdBySsrc(ssrc, &track_id, direction)) {
+    if (!report) {
+      // The ssrc is not used by any track or existing report, return NULL
+      // in such case to indicate no report is prepared for the ssrc.
+      return NULL;
+    }
+
+    // The ssrc is not used by any existing track. Keeps the old track id
+    // since we want to report the stats for inactive ssrc.
+    const StatsReport::Value* v =
+        report->FindValue(StatsReport::kStatsValueNameTrackId);
+    if (v)
+      track_id = v->string_val();
+  }
+
+  if (!report)
+    report = reports_.InsertNew(id);
+
+  // FYI - for remote reports, the timestamp will be overwritten later.
+  report->set_timestamp(stats_gathering_started_);
+
+  report->AddInt64(StatsReport::kStatsValueNameSsrc, ssrc);
+  report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+  // Add the mapping of SSRC to transport.
+  report->AddId(StatsReport::kStatsValueNameTransportId, transport_id);
+  return report;
+}
+
+StatsReport* StatsCollector::AddOneCertificateReport(
+    const rtc::SSLCertificate* cert, const StatsReport* issuer) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+  // TODO(bemasc): Move this computation to a helper class that caches these
+  // values to reduce CPU use in GetStats.  This will require adding a fast
+  // SSLCertificate::Equals() method to detect certificate changes.
+
+  std::string digest_algorithm;
+  if (!cert->GetSignatureDigestAlgorithm(&digest_algorithm))
+    return nullptr;
+
+  rtc::scoped_ptr<rtc::SSLFingerprint> ssl_fingerprint(
+      rtc::SSLFingerprint::Create(digest_algorithm, cert));
+
+  // SSLFingerprint::Create can fail if the algorithm returned by
+  // SSLCertificate::GetSignatureDigestAlgorithm is not supported by the
+  // implementation of SSLCertificate::ComputeDigest.  This currently happens
+  // with MD5- and SHA-224-signed certificates when linked to libNSS.
+  if (!ssl_fingerprint)
+    return nullptr;
+
+  std::string fingerprint = ssl_fingerprint->GetRfc4572Fingerprint();
+
+  rtc::Buffer der_buffer;
+  cert->ToDER(&der_buffer);
+  std::string der_base64;
+  rtc::Base64::EncodeFromArray(der_buffer.data(), der_buffer.size(),
+                               &der_base64);
+
+  StatsReport::Id id(StatsReport::NewTypedId(
+      StatsReport::kStatsReportTypeCertificate, fingerprint));
+  StatsReport* report = reports_.ReplaceOrAddNew(id);
+  report->set_timestamp(stats_gathering_started_);
+  report->AddString(StatsReport::kStatsValueNameFingerprint, fingerprint);
+  report->AddString(StatsReport::kStatsValueNameFingerprintAlgorithm,
+                    digest_algorithm);
+  report->AddString(StatsReport::kStatsValueNameDer, der_base64);
+  if (issuer)
+    report->AddId(StatsReport::kStatsValueNameIssuerId, issuer->id());
+  return report;
+}
+
+StatsReport* StatsCollector::AddCertificateReports(
+    const rtc::SSLCertificate* cert) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  // Produces a chain of StatsReports representing this certificate and the rest
+  // of its chain, and adds those reports to |reports_|.  The return value is
+  // the id of the leaf report.  The provided cert must be non-null, so at least
+  // one report will always be provided and the returned string will never be
+  // empty.
+  RTC_DCHECK(cert != NULL);
+
+  StatsReport* issuer = nullptr;
+  rtc::scoped_ptr<rtc::SSLCertChain> chain;
+  if (cert->GetChain(chain.accept())) {
+    // This loop runs in reverse, i.e. from root to leaf, so that each
+    // certificate's issuer's report ID is known before the child certificate's
+    // report is generated.  The root certificate does not have an issuer ID
+    // value.
+    for (ptrdiff_t i = chain->GetSize() - 1; i >= 0; --i) {
+      const rtc::SSLCertificate& cert_i = chain->Get(i);
+      issuer = AddOneCertificateReport(&cert_i, issuer);
+    }
+  }
+  // Add the leaf certificate.
+  return AddOneCertificateReport(cert, issuer);
+}
+
+StatsReport* StatsCollector::AddConnectionInfoReport(
+    const std::string& content_name, int component, int connection_id,
+    const StatsReport::Id& channel_report_id,
+    const cricket::ConnectionInfo& info) {
+  StatsReport::Id id(StatsReport::NewCandidatePairId(content_name, component,
+                                                     connection_id));
+  StatsReport* report = reports_.ReplaceOrAddNew(id);
+  report->set_timestamp(stats_gathering_started_);
+
+  const BoolForAdd bools[] = {
+    {StatsReport::kStatsValueNameActiveConnection, info.best_connection},
+    {StatsReport::kStatsValueNameReceiving, info.receiving},
+    {StatsReport::kStatsValueNameWritable, info.writable},
+  };
+  for (const auto& b : bools)
+    report->AddBoolean(b.name, b.value);
+
+  report->AddId(StatsReport::kStatsValueNameChannelId, channel_report_id);
+  report->AddId(StatsReport::kStatsValueNameLocalCandidateId,
+                AddCandidateReport(info.local_candidate, true)->id());
+  report->AddId(StatsReport::kStatsValueNameRemoteCandidateId,
+                AddCandidateReport(info.remote_candidate, false)->id());
+
+  const Int64ForAdd int64s[] = {
+    { StatsReport::kStatsValueNameBytesReceived, info.recv_total_bytes },
+    { StatsReport::kStatsValueNameBytesSent, info.sent_total_bytes },
+    { StatsReport::kStatsValueNamePacketsSent, info.sent_total_packets },
+    { StatsReport::kStatsValueNameRtt, info.rtt },
+    { StatsReport::kStatsValueNameSendPacketsDiscarded,
+      info.sent_discarded_packets },
+  };
+  for (const auto& i : int64s)
+    report->AddInt64(i.name, i.value);
+
+  report->AddString(StatsReport::kStatsValueNameLocalAddress,
+                    info.local_candidate.address().ToString());
+  report->AddString(StatsReport::kStatsValueNameLocalCandidateType,
+                    info.local_candidate.type());
+  report->AddString(StatsReport::kStatsValueNameRemoteAddress,
+                    info.remote_candidate.address().ToString());
+  report->AddString(StatsReport::kStatsValueNameRemoteCandidateType,
+                    info.remote_candidate.type());
+  report->AddString(StatsReport::kStatsValueNameTransportType,
+                    info.local_candidate.protocol());
+
+  return report;
+}
+
+StatsReport* StatsCollector::AddCandidateReport(
+    const cricket::Candidate& candidate,
+    bool local) {
+  StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id()));
+  StatsReport* report = reports_.Find(id);
+  if (!report) {
+    report = reports_.InsertNew(id);
+    report->set_timestamp(stats_gathering_started_);
+    if (local) {
+      report->AddString(StatsReport::kStatsValueNameCandidateNetworkType,
+                        AdapterTypeToStatsType(candidate.network_type()));
+    }
+    report->AddString(StatsReport::kStatsValueNameCandidateIPAddress,
+                      candidate.address().ipaddr().ToString());
+    report->AddString(StatsReport::kStatsValueNameCandidatePortNumber,
+                      candidate.address().PortAsString());
+    report->AddInt(StatsReport::kStatsValueNameCandidatePriority,
+                   candidate.priority());
+    report->AddString(StatsReport::kStatsValueNameCandidateType,
+                      IceCandidateTypeToStatsType(candidate.type()));
+    report->AddString(StatsReport::kStatsValueNameCandidateTransportType,
+                      candidate.protocol());
+  }
+
+  return report;
+}
+
+void StatsCollector::ExtractSessionInfo() {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+  // Extract information from the base session.
+  StatsReport::Id id(StatsReport::NewTypedId(
+      StatsReport::kStatsReportTypeSession, pc_->session()->id()));
+  StatsReport* report = reports_.ReplaceOrAddNew(id);
+  report->set_timestamp(stats_gathering_started_);
+  report->AddBoolean(StatsReport::kStatsValueNameInitiator,
+                     pc_->session()->initial_offerer());
+
+  SessionStats stats;
+  if (!pc_->session()->GetTransportStats(&stats)) {
+    return;
+  }
+
+  // Store the proxy map away for use in SSRC reporting.
+  // TODO(tommi): This shouldn't be necessary if we post the stats back to the
+  // signaling thread after fetching them on the worker thread, then just use
+  // the proxy map directly from the session stats.
+  // As is, if GetStats() failed, we could be using old (incorrect?) proxy
+  // data.
+  proxy_to_transport_ = stats.proxy_to_transport;
+
+  for (const auto& transport_iter : stats.transport_stats) {
+    // Attempt to get a copy of the certificates from the transport and
+    // expose them in stats reports.  All channels in a transport share the
+    // same local and remote certificates.
+    //
+    StatsReport::Id local_cert_report_id, remote_cert_report_id;
+    rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+    if (pc_->session()->GetLocalCertificate(
+            transport_iter.second.transport_name, &certificate)) {
+      StatsReport* r = AddCertificateReports(&(certificate->ssl_certificate()));
+      if (r)
+        local_cert_report_id = r->id();
+    }
+
+    rtc::scoped_ptr<rtc::SSLCertificate> cert;
+    if (pc_->session()->GetRemoteSSLCertificate(
+            transport_iter.second.transport_name, cert.accept())) {
+      StatsReport* r = AddCertificateReports(cert.get());
+      if (r)
+        remote_cert_report_id = r->id();
+    }
+
+    for (const auto& channel_iter : transport_iter.second.channel_stats) {
+      StatsReport::Id id(StatsReport::NewComponentId(
+          transport_iter.second.transport_name, channel_iter.component));
+      StatsReport* channel_report = reports_.ReplaceOrAddNew(id);
+      channel_report->set_timestamp(stats_gathering_started_);
+      channel_report->AddInt(StatsReport::kStatsValueNameComponent,
+                             channel_iter.component);
+      if (local_cert_report_id.get()) {
+        channel_report->AddId(StatsReport::kStatsValueNameLocalCertificateId,
+                              local_cert_report_id);
+      }
+      if (remote_cert_report_id.get()) {
+        channel_report->AddId(StatsReport::kStatsValueNameRemoteCertificateId,
+                              remote_cert_report_id);
+      }
+      int srtp_crypto_suite = channel_iter.srtp_crypto_suite;
+      if (srtp_crypto_suite != rtc::SRTP_INVALID_CRYPTO_SUITE &&
+          rtc::SrtpCryptoSuiteToName(srtp_crypto_suite).length()) {
+        channel_report->AddString(
+            StatsReport::kStatsValueNameSrtpCipher,
+            rtc::SrtpCryptoSuiteToName(srtp_crypto_suite));
+      }
+      int ssl_cipher_suite = channel_iter.ssl_cipher_suite;
+      if (ssl_cipher_suite != rtc::TLS_NULL_WITH_NULL_NULL &&
+          rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite)
+              .length()) {
+        channel_report->AddString(
+            StatsReport::kStatsValueNameDtlsCipher,
+            rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite));
+      }
+
+      int connection_id = 0;
+      for (const cricket::ConnectionInfo& info :
+               channel_iter.connection_infos) {
+        StatsReport* connection_report = AddConnectionInfoReport(
+            transport_iter.first, channel_iter.component, connection_id++,
+            channel_report->id(), info);
+        if (info.best_connection) {
+          channel_report->AddId(
+              StatsReport::kStatsValueNameSelectedCandidatePairId,
+              connection_report->id());
+        }
+      }
+    }
+  }
+}
+
+void StatsCollector::ExtractVoiceInfo() {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+  if (!pc_->session()->voice_channel()) {
+    return;
+  }
+  cricket::VoiceMediaInfo voice_info;
+  if (!pc_->session()->voice_channel()->GetStats(&voice_info)) {
+    LOG(LS_ERROR) << "Failed to get voice channel stats.";
+    return;
+  }
+
+  // TODO(tommi): The above code should run on the worker thread and post the
+  // results back to the signaling thread, where we can add data to the reports.
+  rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+  StatsReport::Id transport_id(GetTransportIdFromProxy(
+      proxy_to_transport_, pc_->session()->voice_channel()->content_name()));
+  if (!transport_id.get()) {
+    LOG(LS_ERROR) << "Failed to get transport name for proxy "
+                  << pc_->session()->voice_channel()->content_name();
+    return;
+  }
+
+  ExtractStatsFromList(voice_info.receivers, transport_id, this,
+      StatsReport::kReceive);
+  ExtractStatsFromList(voice_info.senders, transport_id, this,
+      StatsReport::kSend);
+
+  UpdateStatsFromExistingLocalAudioTracks();
+}
+
+void StatsCollector::ExtractVideoInfo(
+    PeerConnectionInterface::StatsOutputLevel level) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+  if (!pc_->session()->video_channel())
+    return;
+
+  cricket::VideoMediaInfo video_info;
+  if (!pc_->session()->video_channel()->GetStats(&video_info)) {
+    LOG(LS_ERROR) << "Failed to get video channel stats.";
+    return;
+  }
+
+  // TODO(tommi): The above code should run on the worker thread and post the
+  // results back to the signaling thread, where we can add data to the reports.
+  rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+  StatsReport::Id transport_id(GetTransportIdFromProxy(
+      proxy_to_transport_, pc_->session()->video_channel()->content_name()));
+  if (!transport_id.get()) {
+    LOG(LS_ERROR) << "Failed to get transport name for proxy "
+                  << pc_->session()->video_channel()->content_name();
+    return;
+  }
+  ExtractStatsFromList(video_info.receivers, transport_id, this,
+      StatsReport::kReceive);
+  ExtractStatsFromList(video_info.senders, transport_id, this,
+      StatsReport::kSend);
+  if (video_info.bw_estimations.size() != 1) {
+    LOG(LS_ERROR) << "BWEs count: " << video_info.bw_estimations.size();
+  } else {
+    StatsReport::Id report_id(StatsReport::NewBandwidthEstimationId());
+    StatsReport* report = reports_.FindOrAddNew(report_id);
+    ExtractStats(
+        video_info.bw_estimations[0], stats_gathering_started_, level, report);
+  }
+}
+
+void StatsCollector::ExtractDataInfo() {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+  rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+  for (const auto& dc : pc_->sctp_data_channels()) {
+    StatsReport::Id id(StatsReport::NewTypedIntId(
+        StatsReport::kStatsReportTypeDataChannel, dc->id()));
+    StatsReport* report = reports_.ReplaceOrAddNew(id);
+    report->set_timestamp(stats_gathering_started_);
+    report->AddString(StatsReport::kStatsValueNameLabel, dc->label());
+    report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id());
+    report->AddString(StatsReport::kStatsValueNameProtocol, dc->protocol());
+    report->AddString(StatsReport::kStatsValueNameState,
+                      DataChannelInterface::DataStateString(dc->state()));
+  }
+}
+
+StatsReport* StatsCollector::GetReport(const StatsReport::StatsType& type,
+                                       const std::string& id,
+                                       StatsReport::Direction direction) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc ||
+             type == StatsReport::kStatsReportTypeRemoteSsrc);
+  return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction));
+}
+
+void StatsCollector::UpdateStatsFromExistingLocalAudioTracks() {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  // Loop through the existing local audio tracks.
+  for (const auto& it : local_audio_tracks_) {
+    AudioTrackInterface* track = it.first;
+    uint32_t ssrc = it.second;
+    StatsReport* report =
+        GetReport(StatsReport::kStatsReportTypeSsrc,
+                  rtc::ToString<uint32_t>(ssrc), StatsReport::kSend);
+    if (report == NULL) {
+      // This can happen if a local audio track is added to a stream on the
+      // fly and the report has not been set up yet. Do nothing in this case.
+      LOG(LS_ERROR) << "Stats report does not exist for ssrc " << ssrc;
+      continue;
+    }
+
+    // The same ssrc can be used by both local and remote audio tracks.
+    const StatsReport::Value* v =
+        report->FindValue(StatsReport::kStatsValueNameTrackId);
+    if (!v || v->string_val() != track->id())
+      continue;
+
+    report->set_timestamp(stats_gathering_started_);
+    UpdateReportFromAudioTrack(track, report);
+  }
+}
+
+void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track,
+                                                StatsReport* report) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  RTC_DCHECK(track != NULL);
+
+  // Don't overwrite report values if they're not available.
+  int signal_level;
+  if (track->GetSignalLevel(&signal_level)) {
+    RTC_DCHECK_GE(signal_level, 0);
+    report->AddInt(StatsReport::kStatsValueNameAudioInputLevel, signal_level);
+  }
+
+  auto audio_processor(track->GetAudioProcessor());
+
+  if (audio_processor.get()) {
+    AudioProcessorInterface::AudioProcessorStats stats;
+    audio_processor->GetStats(&stats);
+
+    SetAudioProcessingStats(
+        report, stats.typing_noise_detected, stats.echo_return_loss,
+        stats.echo_return_loss_enhancement, stats.echo_delay_median_ms,
+        stats.aec_quality_min, stats.echo_delay_std_ms);
+  }
+}
+
+bool StatsCollector::GetTrackIdBySsrc(uint32_t ssrc,
+                                      std::string* track_id,
+                                      StatsReport::Direction direction) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+  if (direction == StatsReport::kSend) {
+    if (!pc_->session()->GetLocalTrackIdBySsrc(ssrc, track_id)) {
+      LOG(LS_WARNING) << "The SSRC " << ssrc
+                      << " is not associated with a sending track";
+      return false;
+    }
+  } else {
+    RTC_DCHECK(direction == StatsReport::kReceive);
+    if (!pc_->session()->GetRemoteTrackIdBySsrc(ssrc, track_id)) {
+      LOG(LS_WARNING) << "The SSRC " << ssrc
+                      << " is not associated with a receiving track";
+      return false;
+    }
+  }
+
+  return true;
+}
+
+void StatsCollector::UpdateTrackReports() {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
+
+  rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+  for (const auto& entry : track_ids_) {
+    StatsReport* report = entry.second;
+    report->set_timestamp(stats_gathering_started_);
+  }
+}
+
+void StatsCollector::ClearUpdateStatsCacheForTest() {
+  stats_gathering_started_ = 0;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/statscollector.h b/webrtc/api/statscollector.h
new file mode 100644
index 0000000..caeac82
--- /dev/null
+++ b/webrtc/api/statscollector.h
@@ -0,0 +1,169 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains a class used for gathering statistics from an ongoing
+// libjingle PeerConnection.
+
+#ifndef WEBRTC_API_STATSCOLLECTOR_H_
+#define WEBRTC_API_STATSCOLLECTOR_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/statstypes.h"
+#include "webrtc/api/webrtcsession.h"
+
+namespace webrtc {
+
+class PeerConnection;
+
+// Conversion function to convert candidate type string to the corresponding one
+// from  enum RTCStatsIceCandidateType.
+const char* IceCandidateTypeToStatsType(const std::string& candidate_type);
+
+// Conversion function to convert adapter type to report string which are more
+// fitting to the general style of http://w3c.github.io/webrtc-stats. This is
+// only used by stats collector.
+const char* AdapterTypeToStatsType(rtc::AdapterType type);
+
+// A mapping between track ids and their StatsReport.
+typedef std::map<std::string, StatsReport*> TrackIdMap;
+
+class StatsCollector {
+ public:
+  // The caller is responsible for ensuring that the pc outlives the
+  // StatsCollector instance.
+  explicit StatsCollector(PeerConnection* pc);
+  virtual ~StatsCollector();
+
+  // Adds a MediaStream with tracks that can be used as a |selector| in a call
+  // to GetStats.
+  void AddStream(MediaStreamInterface* stream);
+
+  // Adds a local audio track that is used for getting some voice statistics.
+  void AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc);
+
+  // Removes a local audio tracks that is used for getting some voice
+  // statistics.
+  void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc);
+
+  // Gather statistics from the session and store them for future use.
+  void UpdateStats(PeerConnectionInterface::StatsOutputLevel level);
+
+  // Gets a StatsReports of the last collected stats. Note that UpdateStats must
+  // be called before this function to get the most recent stats. |selector| is
+  // a track label or empty string. The most recent reports are stored in
+  // |reports|.
+  // TODO(tommi): Change this contract to accept a callback object instead
+  // of filling in |reports|.  As is, there's a requirement that the caller
+  // uses |reports| immediately without allowing any async activity on
+  // the thread (message handling etc) and then discard the results.
+  void GetStats(MediaStreamTrackInterface* track,
+                StatsReports* reports);
+
+  // Prepare a local or remote SSRC report for the given ssrc. Used internally
+  // in the ExtractStatsFromList template.
+  StatsReport* PrepareReport(bool local,
+                             uint32_t ssrc,
+                             const StatsReport::Id& transport_id,
+                             StatsReport::Direction direction);
+
+  // Method used by the unittest to force a update of stats since UpdateStats()
+  // that occur less than kMinGatherStatsPeriod number of ms apart will be
+  // ignored.
+  void ClearUpdateStatsCacheForTest();
+
+ private:
+  friend class StatsCollectorTest;
+
+  // Overridden in unit tests to fake timing.
+  virtual double GetTimeNow();
+
+  bool CopySelectedReports(const std::string& selector, StatsReports* reports);
+
+  // Helper method for AddCertificateReports.
+  StatsReport* AddOneCertificateReport(
+      const rtc::SSLCertificate* cert, const StatsReport* issuer);
+
+  // Helper method for creating IceCandidate report. |is_local| indicates
+  // whether this candidate is local or remote.
+  StatsReport* AddCandidateReport(const cricket::Candidate& candidate,
+                                  bool local);
+
+  // Adds a report for this certificate and every certificate in its chain, and
+  // returns the leaf certificate's report.
+  StatsReport* AddCertificateReports(const rtc::SSLCertificate* cert);
+
+  StatsReport* AddConnectionInfoReport(const std::string& content_name,
+      int component, int connection_id,
+      const StatsReport::Id& channel_report_id,
+      const cricket::ConnectionInfo& info);
+
+  void ExtractDataInfo();
+  void ExtractSessionInfo();
+  void ExtractVoiceInfo();
+  void ExtractVideoInfo(PeerConnectionInterface::StatsOutputLevel level);
+  void BuildSsrcToTransportId();
+  webrtc::StatsReport* GetReport(const StatsReport::StatsType& type,
+                                 const std::string& id,
+                                 StatsReport::Direction direction);
+
+  // Helper method to get stats from the local audio tracks.
+  void UpdateStatsFromExistingLocalAudioTracks();
+  void UpdateReportFromAudioTrack(AudioTrackInterface* track,
+                                  StatsReport* report);
+
+  // Helper method to get the id for the track identified by ssrc.
+  // |direction| tells if the track is for sending or receiving.
+  bool GetTrackIdBySsrc(uint32_t ssrc,
+                        std::string* track_id,
+                        StatsReport::Direction direction);
+
+  // Helper method to update the timestamp of track records.
+  void UpdateTrackReports();
+
+  // A collection for all of our stats reports.
+  StatsCollection reports_;
+  TrackIdMap track_ids_;
+  // Raw pointer to the peer connection the statistics are gathered from.
+  PeerConnection* const pc_;
+  double stats_gathering_started_;
+  ProxyTransportMap proxy_to_transport_;
+
+  // TODO(tommi): We appear to be holding on to raw pointers to reference
+  // counted objects?  We should be using scoped_refptr here.
+  typedef std::vector<std::pair<AudioTrackInterface*, uint32_t> >
+      LocalAudioTrackVector;
+  LocalAudioTrackVector local_audio_tracks_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_STATSCOLLECTOR_H_
diff --git a/webrtc/api/statscollector_unittest.cc b/webrtc/api/statscollector_unittest.cc
new file mode 100644
index 0000000..b99aa12
--- /dev/null
+++ b/webrtc/api/statscollector_unittest.cc
@@ -0,0 +1,1756 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+
+#include "webrtc/api/statscollector.h"
+
+#include "talk/session/media/channelmanager.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/api/mediastream.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/mediastreamtrack.h"
+#include "webrtc/api/peerconnection.h"
+#include "webrtc/api/peerconnectionfactory.h"
+#include "webrtc/api/test/fakedatachannelprovider.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/base/base64.h"
+#include "webrtc/base/fakesslidentity.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/network.h"
+#include "webrtc/media/base/fakemediaengine.h"
+#include "webrtc/p2p/base/faketransportcontroller.h"
+
+using rtc::scoped_ptr;
+using testing::_;
+using testing::DoAll;
+using testing::Field;
+using testing::Return;
+using testing::ReturnNull;
+using testing::ReturnRef;
+using testing::SetArgPointee;
+using webrtc::PeerConnectionInterface;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+
+namespace {
+// This value comes from openssl/tls1.h
+const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014;
+}  // namespace
+
+namespace cricket {
+
+class ChannelManager;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+// Error return values
+const char kNotFound[] = "NOT FOUND";
+
+// Constant names for track identification.
+const char kLocalTrackId[] = "local_track_id";
+const char kRemoteTrackId[] = "remote_track_id";
+const uint32_t kSsrcOfTrack = 1234;
+
+class MockWebRtcSession : public webrtc::WebRtcSession {
+ public:
+  explicit MockWebRtcSession(webrtc::MediaControllerInterface* media_controller)
+      : WebRtcSession(media_controller,
+                      rtc::Thread::Current(),
+                      rtc::Thread::Current(),
+                      nullptr) {}
+  MOCK_METHOD0(voice_channel, cricket::VoiceChannel*());
+  MOCK_METHOD0(video_channel, cricket::VideoChannel*());
+  // Libjingle uses "local" for a outgoing track, and "remote" for a incoming
+  // track.
+  MOCK_METHOD2(GetLocalTrackIdBySsrc, bool(uint32_t, std::string*));
+  MOCK_METHOD2(GetRemoteTrackIdBySsrc, bool(uint32_t, std::string*));
+  MOCK_METHOD1(GetTransportStats, bool(SessionStats*));
+  MOCK_METHOD2(GetLocalCertificate,
+               bool(const std::string& transport_name,
+                    rtc::scoped_refptr<rtc::RTCCertificate>* certificate));
+  MOCK_METHOD2(GetRemoteSSLCertificate,
+               bool(const std::string& transport_name,
+                    rtc::SSLCertificate** cert));
+};
+
+// The factory isn't really used; it just satisfies the base PeerConnection.
+class FakePeerConnectionFactory
+    : public rtc::RefCountedObject<PeerConnectionFactory> {};
+
+class MockPeerConnection
+    : public rtc::RefCountedObject<webrtc::PeerConnection> {
+ public:
+  MockPeerConnection()
+      : rtc::RefCountedObject<webrtc::PeerConnection>(
+            new FakePeerConnectionFactory()) {}
+  MOCK_METHOD0(session, WebRtcSession*());
+  MOCK_CONST_METHOD0(sctp_data_channels,
+                     const std::vector<rtc::scoped_refptr<DataChannel>>&());
+};
+
+class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel {
+ public:
+  MockVideoMediaChannel() :
+      cricket::FakeVideoMediaChannel(NULL, cricket::VideoOptions()) {}
+  MOCK_METHOD1(GetStats, bool(cricket::VideoMediaInfo*));
+};
+
+class MockVoiceMediaChannel : public cricket::FakeVoiceMediaChannel {
+ public:
+  MockVoiceMediaChannel() :
+      cricket::FakeVoiceMediaChannel(NULL, cricket::AudioOptions()) {}
+  MOCK_METHOD1(GetStats, bool(cricket::VoiceMediaInfo*));
+};
+
+class FakeAudioProcessor : public webrtc::AudioProcessorInterface {
+ public:
+  FakeAudioProcessor() {}
+  ~FakeAudioProcessor() {}
+
+ private:
+  void GetStats(AudioProcessorInterface::AudioProcessorStats* stats) override {
+    stats->typing_noise_detected = true;
+    stats->echo_return_loss = 2;
+    stats->echo_return_loss_enhancement = 3;
+    stats->echo_delay_median_ms = 4;
+    stats->aec_quality_min = 5.1f;
+    stats->echo_delay_std_ms = 6;
+  }
+};
+
+class FakeAudioTrack
+    : public webrtc::MediaStreamTrack<webrtc::AudioTrackInterface> {
+ public:
+  explicit FakeAudioTrack(const std::string& id)
+      : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(id),
+        processor_(new rtc::RefCountedObject<FakeAudioProcessor>()) {}
+  std::string kind() const override { return "audio"; }
+  webrtc::AudioSourceInterface* GetSource() const override { return NULL; }
+  void AddSink(webrtc::AudioTrackSinkInterface* sink) override {}
+  void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override {}
+  bool GetSignalLevel(int* level) override {
+    *level = 1;
+    return true;
+  }
+  rtc::scoped_refptr<webrtc::AudioProcessorInterface> GetAudioProcessor()
+      override {
+    return processor_;
+  }
+
+ private:
+  rtc::scoped_refptr<FakeAudioProcessor> processor_;
+};
+
+bool GetValue(const StatsReport* report,
+              StatsReport::StatsValueName name,
+              std::string* value) {
+  const StatsReport::Value* v = report->FindValue(name);
+  if (!v)
+    return false;
+  *value = v->ToString();
+  return true;
+}
+
+std::string ExtractStatsValue(const StatsReport::StatsType& type,
+                              const StatsReports& reports,
+                              StatsReport::StatsValueName name) {
+  for (const auto* r : reports) {
+    std::string ret;
+    if (r->type() == type && GetValue(r, name, &ret))
+      return ret;
+  }
+
+  return kNotFound;
+}
+
+StatsReport::Id TypedIdFromIdString(StatsReport::StatsType type,
+                                    const std::string& value) {
+  EXPECT_FALSE(value.empty());
+  StatsReport::Id id;
+  if (value.empty())
+    return id;
+
+  // This has assumptions about how the ID is constructed.  As is, this is
+  // OK since this is for testing purposes only, but if we ever need this
+  // in production, we should add a generic method that does this.
+  size_t index = value.find('_');
+  EXPECT_NE(index, std::string::npos);
+  if (index == std::string::npos || index == (value.length() - 1))
+    return id;
+
+  id = StatsReport::NewTypedId(type, value.substr(index + 1));
+  EXPECT_EQ(id->ToString(), value);
+  return id;
+}
+
+StatsReport::Id IdFromCertIdString(const std::string& cert_id) {
+  return TypedIdFromIdString(StatsReport::kStatsReportTypeCertificate, cert_id);
+}
+
+// Finds the |n|-th report of type |type| in |reports|.
+// |n| starts from 1 for finding the first report.
+const StatsReport* FindNthReportByType(
+    const StatsReports& reports, const StatsReport::StatsType& type, int n) {
+  for (size_t i = 0; i < reports.size(); ++i) {
+    if (reports[i]->type() == type) {
+      n--;
+      if (n == 0)
+        return reports[i];
+    }
+  }
+  return nullptr;
+}
+
+const StatsReport* FindReportById(const StatsReports& reports,
+                                  const StatsReport::Id& id) {
+  for (const auto* r : reports) {
+    if (r->id()->Equals(id))
+      return r;
+  }
+  return nullptr;
+}
+
+std::string ExtractSsrcStatsValue(StatsReports reports,
+                                  StatsReport::StatsValueName name) {
+  return ExtractStatsValue(StatsReport::kStatsReportTypeSsrc, reports, name);
+}
+
+std::string ExtractBweStatsValue(StatsReports reports,
+                                 StatsReport::StatsValueName name) {
+  return ExtractStatsValue(
+      StatsReport::kStatsReportTypeBwe, reports, name);
+}
+
+std::string DerToPem(const std::string& der) {
+  return rtc::SSLIdentity::DerToPem(
+        rtc::kPemTypeCertificate,
+        reinterpret_cast<const unsigned char*>(der.c_str()),
+        der.length());
+}
+
+std::vector<std::string> DersToPems(
+    const std::vector<std::string>& ders) {
+  std::vector<std::string> pems(ders.size());
+  std::transform(ders.begin(), ders.end(), pems.begin(), DerToPem);
+  return pems;
+}
+
+void CheckCertChainReports(const StatsReports& reports,
+                           const std::vector<std::string>& ders,
+                           const StatsReport::Id& start_id) {
+  StatsReport::Id cert_id;
+  const StatsReport::Id* certificate_id = &start_id;
+  size_t i = 0;
+  while (true) {
+    const StatsReport* report = FindReportById(reports, *certificate_id);
+    ASSERT_TRUE(report != NULL);
+
+    std::string der_base64;
+    EXPECT_TRUE(GetValue(
+        report, StatsReport::kStatsValueNameDer, &der_base64));
+    std::string der = rtc::Base64::Decode(der_base64, rtc::Base64::DO_STRICT);
+    EXPECT_EQ(ders[i], der);
+
+    std::string fingerprint_algorithm;
+    EXPECT_TRUE(GetValue(
+        report,
+        StatsReport::kStatsValueNameFingerprintAlgorithm,
+        &fingerprint_algorithm));
+    // The digest algorithm for a FakeSSLCertificate is always SHA-1.
+    std::string sha_1_str = rtc::DIGEST_SHA_1;
+    EXPECT_EQ(sha_1_str, fingerprint_algorithm);
+
+    std::string fingerprint;
+    EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameFingerprint,
+                         &fingerprint));
+    EXPECT_FALSE(fingerprint.empty());
+
+    ++i;
+    std::string issuer_id;
+    if (!GetValue(report, StatsReport::kStatsValueNameIssuerId,
+                  &issuer_id)) {
+      break;
+    }
+
+    cert_id = IdFromCertIdString(issuer_id);
+    certificate_id = &cert_id;
+  }
+  EXPECT_EQ(ders.size(), i);
+}
+
+void VerifyVoiceReceiverInfoReport(
+    const StatsReport* report,
+    const cricket::VoiceReceiverInfo& info) {
+  std::string value_in_report;
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameAudioOutputLevel, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.audio_level), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameBytesReceived, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int64_t>(info.bytes_rcvd), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameJitterReceived, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.jitter_ms), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameJitterBufferMs, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.jitter_buffer_ms), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNamePreferredJitterBufferMs,
+      &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.jitter_buffer_preferred_ms),
+      value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameCurrentDelayMs, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.delay_estimate_ms), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameExpandRate, &value_in_report));
+  EXPECT_EQ(rtc::ToString<float>(info.expand_rate), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameSpeechExpandRate, &value_in_report));
+  EXPECT_EQ(rtc::ToString<float>(info.speech_expand_rate), value_in_report);
+  EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAccelerateRate,
+                       &value_in_report));
+  EXPECT_EQ(rtc::ToString<float>(info.accelerate_rate), value_in_report);
+  EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePreemptiveExpandRate,
+                       &value_in_report));
+  EXPECT_EQ(rtc::ToString<float>(info.preemptive_expand_rate), value_in_report);
+  EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSecondaryDecodedRate,
+                       &value_in_report));
+  EXPECT_EQ(rtc::ToString<float>(info.secondary_decoded_rate), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNamePacketsReceived, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.packets_rcvd), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameDecodingCTSG, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.decoding_calls_to_silence_generator),
+      value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameDecodingCTN, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.decoding_calls_to_neteq),
+      value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameDecodingNormal, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.decoding_normal), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameDecodingPLC, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.decoding_plc), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameDecodingCNG, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.decoding_cng), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameDecodingPLCCNG, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(info.decoding_plc_cng), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameCodecName, &value_in_report));
+}
+
+
+void VerifyVoiceSenderInfoReport(const StatsReport* report,
+                                 const cricket::VoiceSenderInfo& sinfo) {
+  std::string value_in_report;
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameCodecName, &value_in_report));
+  EXPECT_EQ(sinfo.codec_name, value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameBytesSent, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int64_t>(sinfo.bytes_sent), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNamePacketsSent, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.packets_sent), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNamePacketsLost, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.packets_lost), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameRtt, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.rtt_ms), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameRtt, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.rtt_ms), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameJitterReceived, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.jitter_ms), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameEchoCancellationQualityMin,
+      &value_in_report));
+  EXPECT_EQ(rtc::ToString<float>(sinfo.aec_quality_min), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameEchoDelayMedian, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.echo_delay_median_ms),
+            value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameEchoDelayStdDev, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.echo_delay_std_ms),
+            value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameEchoReturnLoss, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.echo_return_loss),
+            value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+      &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.echo_return_loss_enhancement),
+            value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameAudioInputLevel, &value_in_report));
+  EXPECT_EQ(rtc::ToString<int>(sinfo.audio_level), value_in_report);
+  EXPECT_TRUE(GetValue(
+      report, StatsReport::kStatsValueNameTypingNoiseState, &value_in_report));
+  std::string typing_detected = sinfo.typing_noise_detected ? "true" : "false";
+  EXPECT_EQ(typing_detected, value_in_report);
+}
+
+// Helper methods to avoid duplication of code.
+void InitVoiceSenderInfo(cricket::VoiceSenderInfo* voice_sender_info) {
+  voice_sender_info->add_ssrc(kSsrcOfTrack);
+  voice_sender_info->codec_name = "fake_codec";
+  voice_sender_info->bytes_sent = 100;
+  voice_sender_info->packets_sent = 101;
+  voice_sender_info->rtt_ms = 102;
+  voice_sender_info->fraction_lost = 103;
+  voice_sender_info->jitter_ms = 104;
+  voice_sender_info->packets_lost = 105;
+  voice_sender_info->ext_seqnum = 106;
+  voice_sender_info->audio_level = 107;
+  voice_sender_info->echo_return_loss = 108;
+  voice_sender_info->echo_return_loss_enhancement = 109;
+  voice_sender_info->echo_delay_median_ms = 110;
+  voice_sender_info->echo_delay_std_ms = 111;
+  voice_sender_info->aec_quality_min = 112.0f;
+  voice_sender_info->typing_noise_detected = false;
+}
+
+void UpdateVoiceSenderInfoFromAudioTrack(
+    FakeAudioTrack* audio_track, cricket::VoiceSenderInfo* voice_sender_info) {
+  audio_track->GetSignalLevel(&voice_sender_info->audio_level);
+  webrtc::AudioProcessorInterface::AudioProcessorStats audio_processor_stats;
+  audio_track->GetAudioProcessor()->GetStats(&audio_processor_stats);
+  voice_sender_info->typing_noise_detected =
+      audio_processor_stats.typing_noise_detected;
+  voice_sender_info->echo_return_loss = audio_processor_stats.echo_return_loss;
+  voice_sender_info->echo_return_loss_enhancement =
+      audio_processor_stats.echo_return_loss_enhancement;
+  voice_sender_info->echo_delay_median_ms =
+      audio_processor_stats.echo_delay_median_ms;
+  voice_sender_info->aec_quality_min = audio_processor_stats.aec_quality_min;
+  voice_sender_info->echo_delay_std_ms =
+      audio_processor_stats.echo_delay_std_ms;
+}
+
+void InitVoiceReceiverInfo(cricket::VoiceReceiverInfo* voice_receiver_info) {
+  voice_receiver_info->add_ssrc(kSsrcOfTrack);
+  voice_receiver_info->bytes_rcvd = 110;
+  voice_receiver_info->packets_rcvd = 111;
+  voice_receiver_info->packets_lost = 112;
+  voice_receiver_info->fraction_lost = 113;
+  voice_receiver_info->packets_lost = 114;
+  voice_receiver_info->ext_seqnum = 115;
+  voice_receiver_info->jitter_ms = 116;
+  voice_receiver_info->jitter_buffer_ms = 117;
+  voice_receiver_info->jitter_buffer_preferred_ms = 118;
+  voice_receiver_info->delay_estimate_ms = 119;
+  voice_receiver_info->audio_level = 120;
+  voice_receiver_info->expand_rate = 121;
+  voice_receiver_info->speech_expand_rate = 122;
+  voice_receiver_info->secondary_decoded_rate = 123;
+  voice_receiver_info->accelerate_rate = 124;
+  voice_receiver_info->preemptive_expand_rate = 125;
+}
+
+class StatsCollectorForTest : public webrtc::StatsCollector {
+ public:
+  explicit StatsCollectorForTest(PeerConnection* pc)
+      : StatsCollector(pc), time_now_(19477) {}
+
+  double GetTimeNow() override {
+    return time_now_;
+  }
+
+ private:
+  double time_now_;
+};
+
+class StatsCollectorTest : public testing::Test {
+ protected:
+  StatsCollectorTest()
+      : media_engine_(new cricket::FakeMediaEngine()),
+        channel_manager_(
+            new cricket::ChannelManager(media_engine_, rtc::Thread::Current())),
+        media_controller_(
+            webrtc::MediaControllerInterface::Create(rtc::Thread::Current(),
+                                                     channel_manager_.get())),
+        session_(media_controller_.get()) {
+    // By default, we ignore session GetStats calls.
+    EXPECT_CALL(session_, GetTransportStats(_)).WillRepeatedly(Return(false));
+    // Add default returns for mock classes.
+    EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+    EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+    EXPECT_CALL(pc_, session()).WillRepeatedly(Return(&session_));
+    EXPECT_CALL(pc_, sctp_data_channels())
+        .WillRepeatedly(ReturnRef(data_channels_));
+  }
+
+  ~StatsCollectorTest() {}
+
+  // This creates a standard setup with a transport called "trspname"
+  // having one transport channel
+  // and the specified virtual connection name.
+  void InitSessionStats(const std::string& vc_name) {
+    const std::string kTransportName("trspname");
+    cricket::TransportStats transport_stats;
+    cricket::TransportChannelStats channel_stats;
+    channel_stats.component = 1;
+    transport_stats.transport_name = kTransportName;
+    transport_stats.channel_stats.push_back(channel_stats);
+
+    session_stats_.transport_stats[kTransportName] = transport_stats;
+    session_stats_.proxy_to_transport[vc_name] = kTransportName;
+  }
+
+  // Adds a outgoing video track with a given SSRC into the stats.
+  void AddOutgoingVideoTrackStats() {
+    stream_ = webrtc::MediaStream::Create("streamlabel");
+    track_= webrtc::VideoTrack::Create(kLocalTrackId, NULL);
+    stream_->AddTrack(track_);
+    EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+        .WillRepeatedly(DoAll(SetArgPointee<1>(kLocalTrackId), Return(true)));
+  }
+
+  // Adds a incoming video track with a given SSRC into the stats.
+  void AddIncomingVideoTrackStats() {
+    stream_ = webrtc::MediaStream::Create("streamlabel");
+    track_= webrtc::VideoTrack::Create(kRemoteTrackId, NULL);
+    stream_->AddTrack(track_);
+    EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+        .WillRepeatedly(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+    }
+
+  // Adds a outgoing audio track with a given SSRC into the stats.
+  void AddOutgoingAudioTrackStats() {
+    if (stream_ == NULL)
+      stream_ = webrtc::MediaStream::Create("streamlabel");
+
+    audio_track_ = new rtc::RefCountedObject<FakeAudioTrack>(
+        kLocalTrackId);
+    stream_->AddTrack(audio_track_);
+    EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+        .WillOnce(DoAll(SetArgPointee<1>(kLocalTrackId), Return(true)));
+  }
+
+  // Adds a incoming audio track with a given SSRC into the stats.
+  void AddIncomingAudioTrackStats() {
+    if (stream_ == NULL)
+      stream_ = webrtc::MediaStream::Create("streamlabel");
+
+    audio_track_ = new rtc::RefCountedObject<FakeAudioTrack>(
+        kRemoteTrackId);
+    stream_->AddTrack(audio_track_);
+    EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+        .WillOnce(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+  }
+
+  void AddDataChannel(cricket::DataChannelType type,
+                      const std::string& label,
+                      int id) {
+    InternalDataChannelInit config;
+    config.id = id;
+
+    data_channels_.push_back(DataChannel::Create(
+        &data_channel_provider_, cricket::DCT_SCTP, label, config));
+  }
+
+  StatsReport* AddCandidateReport(StatsCollector* collector,
+                                  const cricket::Candidate& candidate,
+                                  bool local) {
+    return collector->AddCandidateReport(candidate, local);
+  }
+
+  void SetupAndVerifyAudioTrackStats(
+      FakeAudioTrack* audio_track,
+      webrtc::MediaStream* stream,
+      webrtc::StatsCollector* stats,
+      cricket::VoiceChannel* voice_channel,
+      const std::string& vc_name,
+      MockVoiceMediaChannel* media_channel,
+      cricket::VoiceSenderInfo* voice_sender_info,
+      cricket::VoiceReceiverInfo* voice_receiver_info,
+      cricket::VoiceMediaInfo* stats_read,
+      StatsReports* reports) {
+    // A track can't have both sender report and recv report at the same time
+    // for now, this might change in the future though.
+    ASSERT((voice_sender_info == NULL) ^ (voice_receiver_info == NULL));
+
+    // Instruct the session to return stats containing the transport channel.
+    InitSessionStats(vc_name);
+    EXPECT_CALL(session_, GetTransportStats(_))
+        .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                              Return(true)));
+
+    // Constructs an ssrc stats update.
+    if (voice_sender_info)
+      stats_read->senders.push_back(*voice_sender_info);
+    if (voice_receiver_info)
+      stats_read->receivers.push_back(*voice_receiver_info);
+
+    EXPECT_CALL(session_, voice_channel()).WillRepeatedly(
+        Return(voice_channel));
+    EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+    EXPECT_CALL(*media_channel, GetStats(_))
+        .WillOnce(DoAll(SetArgPointee<0>(*stats_read), Return(true)));
+
+    stats->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+    stats->ClearUpdateStatsCacheForTest();
+    stats->GetStats(NULL, reports);
+
+    // Verify the existence of the track report.
+    const StatsReport* report = FindNthReportByType(
+        *reports, StatsReport::kStatsReportTypeSsrc, 1);
+    EXPECT_FALSE(report == NULL);
+    EXPECT_EQ(stats->GetTimeNow(), report->timestamp());
+    std::string track_id = ExtractSsrcStatsValue(
+        *reports, StatsReport::kStatsValueNameTrackId);
+    EXPECT_EQ(audio_track->id(), track_id);
+    std::string ssrc_id = ExtractSsrcStatsValue(
+        *reports, StatsReport::kStatsValueNameSsrc);
+    EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+    std::string media_type = ExtractSsrcStatsValue(*reports,
+        StatsReport::kStatsValueNameMediaType);
+    EXPECT_EQ("audio", media_type);
+
+    // Verifies the values in the track report.
+    if (voice_sender_info) {
+      UpdateVoiceSenderInfoFromAudioTrack(audio_track, voice_sender_info);
+      VerifyVoiceSenderInfoReport(report, *voice_sender_info);
+    }
+    if (voice_receiver_info) {
+      VerifyVoiceReceiverInfoReport(report, *voice_receiver_info);
+    }
+
+    // Verify we get the same result by passing a track to GetStats().
+    StatsReports track_reports;  // returned values.
+    stats->GetStats(audio_track, &track_reports);
+    const StatsReport* track_report = FindNthReportByType(
+        track_reports, StatsReport::kStatsReportTypeSsrc, 1);
+    EXPECT_TRUE(track_report);
+    EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
+    track_id = ExtractSsrcStatsValue(track_reports,
+                                     StatsReport::kStatsValueNameTrackId);
+    EXPECT_EQ(audio_track->id(), track_id);
+    ssrc_id = ExtractSsrcStatsValue(track_reports,
+                                    StatsReport::kStatsValueNameSsrc);
+    EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+    if (voice_sender_info)
+      VerifyVoiceSenderInfoReport(track_report, *voice_sender_info);
+    if (voice_receiver_info)
+    VerifyVoiceReceiverInfoReport(track_report, *voice_receiver_info);
+  }
+
+  void TestCertificateReports(const rtc::FakeSSLCertificate& local_cert,
+                              const std::vector<std::string>& local_ders,
+                              const rtc::FakeSSLCertificate& remote_cert,
+                              const std::vector<std::string>& remote_ders) {
+    StatsCollectorForTest stats(&pc_);
+
+    StatsReports reports;  // returned values.
+
+    // Fake stats to process.
+    cricket::TransportChannelStats channel_stats;
+    channel_stats.component = 1;
+    channel_stats.srtp_crypto_suite = rtc::SRTP_AES128_CM_SHA1_80;
+    channel_stats.ssl_cipher_suite = TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
+
+    cricket::TransportStats transport_stats;
+    transport_stats.transport_name = "audio";
+    transport_stats.channel_stats.push_back(channel_stats);
+
+    SessionStats session_stats;
+    session_stats.transport_stats[transport_stats.transport_name] =
+        transport_stats;
+
+    // Fake certificate to report
+    rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
+        rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::FakeSSLIdentity>(
+            new rtc::FakeSSLIdentity(local_cert))));
+
+    // Configure MockWebRtcSession
+    EXPECT_CALL(session_,
+                GetLocalCertificate(transport_stats.transport_name, _))
+        .WillOnce(DoAll(SetArgPointee<1>(local_certificate), Return(true)));
+    EXPECT_CALL(session_,
+                GetRemoteSSLCertificate(transport_stats.transport_name, _))
+        .WillOnce(
+            DoAll(SetArgPointee<1>(remote_cert.GetReference()), Return(true)));
+    EXPECT_CALL(session_, GetTransportStats(_))
+      .WillOnce(DoAll(SetArgPointee<0>(session_stats),
+                      Return(true)));
+
+    stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+    stats.GetStats(NULL, &reports);
+
+    const StatsReport* channel_report = FindNthReportByType(
+        reports, StatsReport::kStatsReportTypeComponent, 1);
+    EXPECT_TRUE(channel_report != NULL);
+
+    // Check local certificate chain.
+    std::string local_certificate_id = ExtractStatsValue(
+        StatsReport::kStatsReportTypeComponent,
+        reports,
+        StatsReport::kStatsValueNameLocalCertificateId);
+    if (local_ders.size() > 0) {
+      EXPECT_NE(kNotFound, local_certificate_id);
+      StatsReport::Id id(IdFromCertIdString(local_certificate_id));
+      CheckCertChainReports(reports, local_ders, id);
+    } else {
+      EXPECT_EQ(kNotFound, local_certificate_id);
+    }
+
+    // Check remote certificate chain.
+    std::string remote_certificate_id = ExtractStatsValue(
+        StatsReport::kStatsReportTypeComponent,
+        reports,
+        StatsReport::kStatsValueNameRemoteCertificateId);
+    if (remote_ders.size() > 0) {
+      EXPECT_NE(kNotFound, remote_certificate_id);
+      StatsReport::Id id(IdFromCertIdString(remote_certificate_id));
+      CheckCertChainReports(reports, remote_ders, id);
+    } else {
+      EXPECT_EQ(kNotFound, remote_certificate_id);
+    }
+
+    // Check negotiated ciphers.
+    std::string dtls_cipher_suite =
+        ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+                          StatsReport::kStatsValueNameDtlsCipher);
+    EXPECT_EQ(rtc::SSLStreamAdapter::SslCipherSuiteToName(
+                  TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA),
+              dtls_cipher_suite);
+    std::string srtp_crypto_suite =
+        ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+                          StatsReport::kStatsValueNameSrtpCipher);
+    EXPECT_EQ(rtc::SrtpCryptoSuiteToName(rtc::SRTP_AES128_CM_SHA1_80),
+              srtp_crypto_suite);
+  }
+
+  cricket::FakeMediaEngine* media_engine_;
+  rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+  rtc::scoped_ptr<webrtc::MediaControllerInterface> media_controller_;
+  MockWebRtcSession session_;
+  MockPeerConnection pc_;
+  FakeDataChannelProvider data_channel_provider_;
+  SessionStats session_stats_;
+  rtc::scoped_refptr<webrtc::MediaStream> stream_;
+  rtc::scoped_refptr<webrtc::VideoTrack> track_;
+  rtc::scoped_refptr<FakeAudioTrack> audio_track_;
+  std::vector<rtc::scoped_refptr<DataChannel>> data_channels_;
+};
+
+// Verify that ExtractDataInfo populates reports.
+TEST_F(StatsCollectorTest, ExtractDataInfo) {
+  const std::string label = "hacks";
+  const int id = 31337;
+  const std::string state = DataChannelInterface::DataStateString(
+      DataChannelInterface::DataState::kConnecting);
+
+  AddDataChannel(cricket::DCT_SCTP, label, id);
+  StatsCollectorForTest stats(&pc_);
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+
+  const StatsReport* report =
+      FindNthReportByType(reports, StatsReport::kStatsReportTypeDataChannel, 1);
+
+  StatsReport::Id reportId = StatsReport::NewTypedIntId(
+      StatsReport::kStatsReportTypeDataChannel, id);
+
+  EXPECT_TRUE(reportId->Equals(report->id()));
+
+  EXPECT_EQ(stats.GetTimeNow(), report->timestamp());
+  EXPECT_EQ(label, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
+                                     reports,
+                                     StatsReport::kStatsValueNameLabel));
+  EXPECT_EQ(rtc::ToString<int64_t>(id),
+            ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
+                              StatsReport::kStatsValueNameDataChannelId));
+  EXPECT_EQ(state, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
+                                     reports,
+                                     StatsReport::kStatsValueNameState));
+  EXPECT_EQ("", ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
+                                  reports,
+                                  StatsReport::kStatsValueNameProtocol));
+}
+
+// This test verifies that 64-bit counters are passed successfully.
+TEST_F(StatsCollectorTest, BytesCounterHandles64Bits) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  const char kVideoChannelName[] = "video";
+
+  InitSessionStats(kVideoChannelName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVideoChannelName, false);
+  StatsReports reports;  // returned values.
+  cricket::VideoSenderInfo video_sender_info;
+  cricket::VideoMediaInfo stats_read;
+  // The number of bytes must be larger than 0xFFFFFFFF for this test.
+  const int64_t kBytesSent = 12345678901234LL;
+  const std::string kBytesSentString("12345678901234");
+
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Construct a stats value to read.
+  video_sender_info.add_ssrc(1234);
+  video_sender_info.bytes_sent = kBytesSent;
+  stats_read.senders.push_back(video_sender_info);
+
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+      .WillOnce(DoAll(SetArgPointee<0>(stats_read),
+                      Return(true)));
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+  std::string result = ExtractSsrcStatsValue(reports,
+      StatsReport::kStatsValueNameBytesSent);
+  EXPECT_EQ(kBytesSentString, result);
+}
+
+// Test that BWE information is reported via stats.
+TEST_F(StatsCollectorTest, BandwidthEstimationInfoIsReported) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  const char kVideoChannelName[] = "video";
+
+  InitSessionStats(kVideoChannelName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVideoChannelName, false);
+
+  StatsReports reports;  // returned values.
+  cricket::VideoSenderInfo video_sender_info;
+  cricket::VideoMediaInfo stats_read;
+  // Set up an SSRC just to test that we get both kinds of stats back: SSRC and
+  // BWE.
+  const int64_t kBytesSent = 12345678901234LL;
+  const std::string kBytesSentString("12345678901234");
+
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Construct a stats value to read.
+  video_sender_info.add_ssrc(1234);
+  video_sender_info.bytes_sent = kBytesSent;
+  stats_read.senders.push_back(video_sender_info);
+  cricket::BandwidthEstimationInfo bwe;
+  const int kTargetEncBitrate = 123456;
+  const std::string kTargetEncBitrateString("123456");
+  bwe.target_enc_bitrate = kTargetEncBitrate;
+  stats_read.bw_estimations.push_back(bwe);
+
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+      .WillOnce(DoAll(SetArgPointee<0>(stats_read), Return(true)));
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+  std::string result = ExtractSsrcStatsValue(reports,
+      StatsReport::kStatsValueNameBytesSent);
+  EXPECT_EQ(kBytesSentString, result);
+  result = ExtractBweStatsValue(reports,
+      StatsReport::kStatsValueNameTargetEncBitrate);
+  EXPECT_EQ(kTargetEncBitrateString, result);
+}
+
+// This test verifies that an object of type "googSession" always
+// exists in the returned stats.
+TEST_F(StatsCollectorTest, SessionObjectExists) {
+  StatsCollectorForTest stats(&pc_);
+
+  StatsReports reports;  // returned values.
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+  const StatsReport* session_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeSession, 1);
+  EXPECT_FALSE(session_report == NULL);
+}
+
+// This test verifies that only one object of type "googSession" exists
+// in the returned stats.
+TEST_F(StatsCollectorTest, OnlyOneSessionObjectExists) {
+  StatsCollectorForTest stats(&pc_);
+
+  StatsReports reports;  // returned values.
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+  const StatsReport* session_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeSession, 1);
+  EXPECT_FALSE(session_report == NULL);
+  session_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeSession, 2);
+  EXPECT_EQ(NULL, session_report);
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// without calling StatsCollector::UpdateStats.
+TEST_F(StatsCollectorTest, TrackObjectExistsWithoutUpdateStats) {
+  StatsCollectorForTest stats(&pc_);
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, "video", false);
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Verfies the existence of the track report.
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+  EXPECT_EQ((size_t)1, reports.size());
+  EXPECT_EQ(StatsReport::kStatsReportTypeTrack, reports[0]->type());
+  EXPECT_EQ(0, reports[0]->timestamp());
+
+  std::string trackValue =
+      ExtractStatsValue(StatsReport::kStatsReportTypeTrack,
+                        reports,
+                        StatsReport::kStatsValueNameTrackId);
+  EXPECT_EQ(kLocalTrackId, trackValue);
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// when StatsCollector::UpdateStats is called with ssrc stats.
+TEST_F(StatsCollectorTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  const char kVideoChannelName[] = "video";
+  InitSessionStats(kVideoChannelName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVideoChannelName, false);
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Constructs an ssrc stats update.
+  cricket::VideoSenderInfo video_sender_info;
+  cricket::VideoMediaInfo stats_read;
+  const int64_t kBytesSent = 12345678901234LL;
+
+  // Construct a stats value to read.
+  video_sender_info.add_ssrc(1234);
+  video_sender_info.bytes_sent = kBytesSent;
+  stats_read.senders.push_back(video_sender_info);
+
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+    .WillOnce(DoAll(SetArgPointee<0>(stats_read),
+                    Return(true)));
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+  // |reports| should contain at least one session report, one track report,
+  // and one ssrc report.
+  EXPECT_LE((size_t)3, reports.size());
+  const StatsReport* track_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeTrack, 1);
+  EXPECT_TRUE(track_report);
+
+  // Get report for the specific |track|.
+  reports.clear();
+  stats.GetStats(track_, &reports);
+  // |reports| should contain at least one session report, one track report,
+  // and one ssrc report.
+  EXPECT_LE((size_t)3, reports.size());
+  track_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeTrack, 1);
+  EXPECT_TRUE(track_report);
+  EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+
+  std::string ssrc_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameSsrc);
+  EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+  std::string track_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameTrackId);
+  EXPECT_EQ(kLocalTrackId, track_id);
+
+  std::string media_type = ExtractSsrcStatsValue(reports,
+      StatsReport::kStatsValueNameMediaType);
+  EXPECT_EQ("video", media_type);
+}
+
+// This test verifies that an SSRC object has the identifier of a Transport
+// stats object, and that this transport stats object exists in stats.
+TEST_F(StatsCollectorTest, TransportObjectLinkedFromSsrcObject) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  // The transport_name known by the video channel.
+  const std::string kVcName("vcname");
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVcName, false);
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Constructs an ssrc stats update.
+  cricket::VideoSenderInfo video_sender_info;
+  cricket::VideoMediaInfo stats_read;
+  const int64_t kBytesSent = 12345678901234LL;
+
+  // Construct a stats value to read.
+  video_sender_info.add_ssrc(1234);
+  video_sender_info.bytes_sent = kBytesSent;
+  stats_read.senders.push_back(video_sender_info);
+
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+    .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+                          Return(true)));
+
+  InitSessionStats(kVcName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+  std::string transport_id = ExtractStatsValue(
+      StatsReport::kStatsReportTypeSsrc,
+      reports,
+      StatsReport::kStatsValueNameTransportId);
+  ASSERT_NE(kNotFound, transport_id);
+  // Transport id component ID will always be 1.
+  // This has assumptions about how the ID is constructed.  As is, this is
+  // OK since this is for testing purposes only, but if we ever need this
+  // in production, we should add a generic method that does this.
+  size_t index = transport_id.find('-');
+  ASSERT_NE(std::string::npos, index);
+  std::string content = transport_id.substr(index + 1);
+  index = content.rfind('-');
+  ASSERT_NE(std::string::npos, index);
+  content = content.substr(0, index);
+  StatsReport::Id id(StatsReport::NewComponentId(content, 1));
+  ASSERT_EQ(transport_id, id->ToString());
+  const StatsReport* transport_report = FindReportById(reports, id);
+  ASSERT_FALSE(transport_report == NULL);
+}
+
+// This test verifies that a remote stats object will not be created for
+// an outgoing SSRC where remote stats are not returned.
+TEST_F(StatsCollectorTest, RemoteSsrcInfoIsAbsent) {
+  StatsCollectorForTest stats(&pc_);
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  // The transport_name known by the video channel.
+  const std::string kVcName("vcname");
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVcName, false);
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+  const StatsReport* remote_report = FindNthReportByType(reports,
+      StatsReport::kStatsReportTypeRemoteSsrc, 1);
+  EXPECT_TRUE(remote_report == NULL);
+}
+
+// This test verifies that a remote stats object will be created for
+// an outgoing SSRC where stats are returned.
+TEST_F(StatsCollectorTest, RemoteSsrcInfoIsPresent) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  // The transport_name known by the video channel.
+  const std::string kVcName("vcname");
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVcName, false);
+  AddOutgoingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Instruct the session to return stats containing the transport channel.
+  InitSessionStats(kVcName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  // Constructs an ssrc stats update.
+  cricket::VideoMediaInfo stats_read;
+
+  cricket::SsrcReceiverInfo remote_ssrc_stats;
+  remote_ssrc_stats.timestamp = 12345.678;
+  remote_ssrc_stats.ssrc = kSsrcOfTrack;
+  cricket::VideoSenderInfo video_sender_info;
+  video_sender_info.add_ssrc(kSsrcOfTrack);
+  video_sender_info.remote_stats.push_back(remote_ssrc_stats);
+  stats_read.senders.push_back(video_sender_info);
+
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+    .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+                          Return(true)));
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+
+  const StatsReport* remote_report = FindNthReportByType(reports,
+      StatsReport::kStatsReportTypeRemoteSsrc, 1);
+  EXPECT_FALSE(remote_report == NULL);
+  EXPECT_EQ(12345.678, remote_report->timestamp());
+}
+
+// This test verifies that the empty track report exists in the returned stats
+// when StatsCollector::UpdateStats is called with ssrc stats.
+TEST_F(StatsCollectorTest, ReportsFromRemoteTrack) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  const char kVideoChannelName[] = "video";
+  InitSessionStats(kVideoChannelName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
+  cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
+                                      nullptr, kVideoChannelName, false);
+  AddIncomingVideoTrackStats();
+  stats.AddStream(stream_);
+
+  // Constructs an ssrc stats update.
+  cricket::VideoReceiverInfo video_receiver_info;
+  cricket::VideoMediaInfo stats_read;
+  const int64_t kNumOfPacketsConcealed = 54321;
+
+  // Construct a stats value to read.
+  video_receiver_info.add_ssrc(1234);
+  video_receiver_info.packets_concealed = kNumOfPacketsConcealed;
+  stats_read.receivers.push_back(video_receiver_info);
+
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(Return(&video_channel));
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+      .WillOnce(DoAll(SetArgPointee<0>(stats_read),
+                      Return(true)));
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  StatsReports reports;
+  stats.GetStats(NULL, &reports);
+  // |reports| should contain at least one session report, one track report,
+  // and one ssrc report.
+  EXPECT_LE(static_cast<size_t>(3), reports.size());
+  const StatsReport* track_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeTrack, 1);
+  EXPECT_TRUE(track_report);
+  EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+
+  std::string ssrc_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameSsrc);
+  EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+  std::string track_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameTrackId);
+  EXPECT_EQ(kRemoteTrackId, track_id);
+}
+
+// This test verifies the Ice Candidate report should contain the correct
+// information from local/remote candidates.
+TEST_F(StatsCollectorTest, IceCandidateReport) {
+  StatsCollectorForTest stats(&pc_);
+
+  StatsReports reports;                     // returned values.
+
+  const int local_port = 2000;
+  const char local_ip[] = "192.168.0.1";
+  const int remote_port = 2001;
+  const char remote_ip[] = "192.168.0.2";
+
+  rtc::SocketAddress local_address(local_ip, local_port);
+  rtc::SocketAddress remote_address(remote_ip, remote_port);
+  rtc::AdapterType network_type = rtc::ADAPTER_TYPE_ETHERNET;
+  uint32_t priority = 1000;
+
+  cricket::Candidate c;
+  ASSERT(c.id().length() > 0);
+  c.set_type(cricket::LOCAL_PORT_TYPE);
+  c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+  c.set_address(local_address);
+  c.set_priority(priority);
+  c.set_network_type(network_type);
+  std::string report_id = AddCandidateReport(&stats, c, true)->id()->ToString();
+  EXPECT_EQ("Cand-" + c.id(), report_id);
+
+  c = cricket::Candidate();
+  ASSERT(c.id().length() > 0);
+  c.set_type(cricket::PRFLX_PORT_TYPE);
+  c.set_protocol(cricket::UDP_PROTOCOL_NAME);
+  c.set_address(remote_address);
+  c.set_priority(priority);
+  c.set_network_type(network_type);
+  report_id = AddCandidateReport(&stats, c, false)->id()->ToString();
+  EXPECT_EQ("Cand-" + c.id(), report_id);
+
+  stats.GetStats(NULL, &reports);
+
+  // Verify the local candidate report is populated correctly.
+  EXPECT_EQ(
+      local_ip,
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+                        StatsReport::kStatsValueNameCandidateIPAddress));
+  EXPECT_EQ(
+      rtc::ToString<int>(local_port),
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+                        StatsReport::kStatsValueNameCandidatePortNumber));
+  EXPECT_EQ(
+      cricket::UDP_PROTOCOL_NAME,
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+                        StatsReport::kStatsValueNameCandidateTransportType));
+  EXPECT_EQ(
+      rtc::ToString<int>(priority),
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+                        StatsReport::kStatsValueNameCandidatePriority));
+  EXPECT_EQ(
+      IceCandidateTypeToStatsType(cricket::LOCAL_PORT_TYPE),
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+                        StatsReport::kStatsValueNameCandidateType));
+  EXPECT_EQ(
+      AdapterTypeToStatsType(network_type),
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports,
+                        StatsReport::kStatsValueNameCandidateNetworkType));
+
+  // Verify the remote candidate report is populated correctly.
+  EXPECT_EQ(remote_ip,
+            ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+                              reports,
+                              StatsReport::kStatsValueNameCandidateIPAddress));
+  EXPECT_EQ(rtc::ToString<int>(remote_port),
+            ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+                              reports,
+                              StatsReport::kStatsValueNameCandidatePortNumber));
+  EXPECT_EQ(cricket::UDP_PROTOCOL_NAME,
+            ExtractStatsValue(
+                StatsReport::kStatsReportTypeIceRemoteCandidate, reports,
+                StatsReport::kStatsValueNameCandidateTransportType));
+  EXPECT_EQ(rtc::ToString<int>(priority),
+            ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+                              reports,
+                              StatsReport::kStatsValueNameCandidatePriority));
+  EXPECT_EQ(
+      IceCandidateTypeToStatsType(cricket::PRFLX_PORT_TYPE),
+      ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate,
+                        reports, StatsReport::kStatsValueNameCandidateType));
+  EXPECT_EQ(kNotFound,
+            ExtractStatsValue(
+                StatsReport::kStatsReportTypeIceRemoteCandidate, reports,
+                StatsReport::kStatsValueNameCandidateNetworkType));
+}
+
+// This test verifies that all chained certificates are correctly
+// reported
+TEST_F(StatsCollectorTest, ChainedCertificateReportsCreated) {
+  // Build local certificate chain.
+  std::vector<std::string> local_ders(5);
+  local_ders[0] = "These";
+  local_ders[1] = "are";
+  local_ders[2] = "some";
+  local_ders[3] = "der";
+  local_ders[4] = "values";
+  rtc::FakeSSLCertificate local_cert(DersToPems(local_ders));
+
+  // Build remote certificate chain
+  std::vector<std::string> remote_ders(4);
+  remote_ders[0] = "A";
+  remote_ders[1] = "non-";
+  remote_ders[2] = "intersecting";
+  remote_ders[3] = "set";
+  rtc::FakeSSLCertificate remote_cert(DersToPems(remote_ders));
+
+  TestCertificateReports(local_cert, local_ders, remote_cert, remote_ders);
+}
+
+// This test verifies that all certificates without chains are correctly
+// reported.
+TEST_F(StatsCollectorTest, ChainlessCertificateReportsCreated) {
+  // Build local certificate.
+  std::string local_der = "This is the local der.";
+  rtc::FakeSSLCertificate local_cert(DerToPem(local_der));
+
+  // Build remote certificate.
+  std::string remote_der = "This is somebody else's der.";
+  rtc::FakeSSLCertificate remote_cert(DerToPem(remote_der));
+
+  TestCertificateReports(local_cert, std::vector<std::string>(1, local_der),
+                         remote_cert, std::vector<std::string>(1, remote_der));
+}
+
+// This test verifies that the stats are generated correctly when no
+// transport is present.
+TEST_F(StatsCollectorTest, NoTransport) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  StatsReports reports;  // returned values.
+
+  // Fake stats to process.
+  cricket::TransportChannelStats channel_stats;
+  channel_stats.component = 1;
+
+  cricket::TransportStats transport_stats;
+  transport_stats.transport_name = "audio";
+  transport_stats.channel_stats.push_back(channel_stats);
+
+  SessionStats session_stats;
+  session_stats.transport_stats[transport_stats.transport_name] =
+      transport_stats;
+
+  // Configure MockWebRtcSession
+  EXPECT_CALL(session_, GetTransportStats(_))
+    .WillOnce(DoAll(SetArgPointee<0>(session_stats),
+                    Return(true)));
+
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+
+  // Check that the local certificate is absent.
+  std::string local_certificate_id = ExtractStatsValue(
+      StatsReport::kStatsReportTypeComponent,
+      reports,
+      StatsReport::kStatsValueNameLocalCertificateId);
+  ASSERT_EQ(kNotFound, local_certificate_id);
+
+  // Check that the remote certificate is absent.
+  std::string remote_certificate_id = ExtractStatsValue(
+      StatsReport::kStatsReportTypeComponent,
+      reports,
+      StatsReport::kStatsValueNameRemoteCertificateId);
+  ASSERT_EQ(kNotFound, remote_certificate_id);
+
+  // Check that the negotiated ciphers are absent.
+  std::string dtls_cipher_suite =
+      ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+                        StatsReport::kStatsValueNameDtlsCipher);
+  ASSERT_EQ(kNotFound, dtls_cipher_suite);
+  std::string srtp_crypto_suite =
+      ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
+                        StatsReport::kStatsValueNameSrtpCipher);
+  ASSERT_EQ(kNotFound, srtp_crypto_suite);
+}
+
+// This test verifies that the stats are generated correctly when the transport
+// does not have any certificates.
+TEST_F(StatsCollectorTest, NoCertificates) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  StatsReports reports;  // returned values.
+
+  // Fake stats to process.
+  cricket::TransportChannelStats channel_stats;
+  channel_stats.component = 1;
+
+  cricket::TransportStats transport_stats;
+  transport_stats.transport_name = "audio";
+  transport_stats.channel_stats.push_back(channel_stats);
+
+  SessionStats session_stats;
+  session_stats.transport_stats[transport_stats.transport_name] =
+      transport_stats;
+
+  // Fake transport object.
+  rtc::scoped_ptr<cricket::FakeTransport> transport(
+      new cricket::FakeTransport(transport_stats.transport_name));
+
+  // Configure MockWebRtcSession
+  EXPECT_CALL(session_, GetTransportStats(_))
+    .WillOnce(DoAll(SetArgPointee<0>(session_stats),
+                    Return(true)));
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+
+  // Check that the local certificate is absent.
+  std::string local_certificate_id = ExtractStatsValue(
+      StatsReport::kStatsReportTypeComponent,
+      reports,
+      StatsReport::kStatsValueNameLocalCertificateId);
+  ASSERT_EQ(kNotFound, local_certificate_id);
+
+  // Check that the remote certificate is absent.
+  std::string remote_certificate_id = ExtractStatsValue(
+      StatsReport::kStatsReportTypeComponent,
+      reports,
+      StatsReport::kStatsValueNameRemoteCertificateId);
+  ASSERT_EQ(kNotFound, remote_certificate_id);
+}
+
+// This test verifies that a remote certificate with an unsupported digest
+// algorithm is correctly ignored.
+TEST_F(StatsCollectorTest, UnsupportedDigestIgnored) {
+  // Build a local certificate.
+  std::string local_der = "This is the local der.";
+  rtc::FakeSSLCertificate local_cert(DerToPem(local_der));
+
+  // Build a remote certificate with an unsupported digest algorithm.
+  std::string remote_der = "This is somebody else's der.";
+  rtc::FakeSSLCertificate remote_cert(DerToPem(remote_der));
+  remote_cert.set_digest_algorithm("foobar");
+
+  TestCertificateReports(local_cert, std::vector<std::string>(1, local_der),
+                         remote_cert, std::vector<std::string>());
+}
+
+// This test verifies that a local stats object can get statistics via
+// AudioTrackInterface::GetStats() method.
+TEST_F(StatsCollectorTest, GetStatsFromLocalAudioTrack) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+  // The transport_name known by the voice channel.
+  const std::string kVcName("vcname");
+  cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+                                      media_channel, nullptr, kVcName, false);
+  AddOutgoingAudioTrackStats();
+  stats.AddStream(stream_);
+  stats.AddLocalAudioTrack(audio_track_, kSsrcOfTrack);
+
+  cricket::VoiceSenderInfo voice_sender_info;
+  InitVoiceSenderInfo(&voice_sender_info);
+
+  cricket::VoiceMediaInfo stats_read;
+  StatsReports reports;  // returned values.
+  SetupAndVerifyAudioTrackStats(
+      audio_track_.get(), stream_.get(), &stats, &voice_channel, kVcName,
+      media_channel, &voice_sender_info, NULL, &stats_read, &reports);
+
+  // Verify that there is no remote report for the local audio track because
+  // we did not set it up.
+  const StatsReport* remote_report = FindNthReportByType(reports,
+      StatsReport::kStatsReportTypeRemoteSsrc, 1);
+  EXPECT_TRUE(remote_report == NULL);
+}
+
+// This test verifies that audio receive streams populate stats reports
+// correctly.
+TEST_F(StatsCollectorTest, GetStatsFromRemoteStream) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+  // The transport_name known by the voice channel.
+  const std::string kVcName("vcname");
+  cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+                                      media_channel, nullptr, kVcName, false);
+  AddIncomingAudioTrackStats();
+  stats.AddStream(stream_);
+
+  cricket::VoiceReceiverInfo voice_receiver_info;
+  InitVoiceReceiverInfo(&voice_receiver_info);
+  voice_receiver_info.codec_name = "fake_codec";
+
+  cricket::VoiceMediaInfo stats_read;
+  StatsReports reports;  // returned values.
+  SetupAndVerifyAudioTrackStats(
+      audio_track_.get(), stream_.get(), &stats, &voice_channel, kVcName,
+      media_channel, NULL, &voice_receiver_info, &stats_read, &reports);
+}
+
+// This test verifies that a local stats object won't update its statistics
+// after a RemoveLocalAudioTrack() call.
+TEST_F(StatsCollectorTest, GetStatsAfterRemoveAudioStream) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+  // The transport_name known by the voice channel.
+  const std::string kVcName("vcname");
+  cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+                                      media_channel, nullptr, kVcName, false);
+  AddOutgoingAudioTrackStats();
+  stats.AddStream(stream_);
+  stats.AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+  // Instruct the session to return stats containing the transport channel.
+  InitSessionStats(kVcName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  stats.RemoveLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+  cricket::VoiceSenderInfo voice_sender_info;
+  InitVoiceSenderInfo(&voice_sender_info);
+
+  // Constructs an ssrc stats update.
+  cricket::VoiceMediaInfo stats_read;
+  stats_read.senders.push_back(voice_sender_info);
+
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(Return(&voice_channel));
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+                            Return(true)));
+
+  StatsReports reports;  // returned values.
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+  stats.GetStats(NULL, &reports);
+
+  // The report will exist since we don't remove them in RemoveStream().
+  const StatsReport* report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeSsrc, 1);
+  EXPECT_FALSE(report == NULL);
+  EXPECT_EQ(stats.GetTimeNow(), report->timestamp());
+  std::string track_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameTrackId);
+  EXPECT_EQ(kLocalTrackId, track_id);
+  std::string ssrc_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameSsrc);
+  EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
+
+  // Verifies the values in the track report, no value will be changed by the
+  // AudioTrackInterface::GetSignalValue() and
+  // AudioProcessorInterface::AudioProcessorStats::GetStats();
+  VerifyVoiceSenderInfoReport(report, voice_sender_info);
+}
+
+// This test verifies that when ongoing and incoming audio tracks are using
+// the same ssrc, they populate stats reports correctly.
+TEST_F(StatsCollectorTest, LocalAndRemoteTracksWithSameSsrc) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+  // The transport_name known by the voice channel.
+  const std::string kVcName("vcname");
+  cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+                                      media_channel, nullptr, kVcName, false);
+
+  // Create a local stream with a local audio track and adds it to the stats.
+  AddOutgoingAudioTrackStats();
+  stats.AddStream(stream_);
+  stats.AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+  // Create a remote stream with a remote audio track and adds it to the stats.
+  rtc::scoped_refptr<webrtc::MediaStream> remote_stream(
+      webrtc::MediaStream::Create("remotestreamlabel"));
+  rtc::scoped_refptr<FakeAudioTrack> remote_track(
+      new rtc::RefCountedObject<FakeAudioTrack>(kRemoteTrackId));
+  EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+      .WillOnce(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+  remote_stream->AddTrack(remote_track);
+  stats.AddStream(remote_stream);
+
+  // Instruct the session to return stats containing the transport channel.
+  InitSessionStats(kVcName);
+  EXPECT_CALL(session_, GetTransportStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_),
+                            Return(true)));
+
+  cricket::VoiceSenderInfo voice_sender_info;
+  InitVoiceSenderInfo(&voice_sender_info);
+
+  // Some of the contents in |voice_sender_info| needs to be updated from the
+  // |audio_track_|.
+  UpdateVoiceSenderInfoFromAudioTrack(audio_track_.get(), &voice_sender_info);
+
+  cricket::VoiceReceiverInfo voice_receiver_info;
+  InitVoiceReceiverInfo(&voice_receiver_info);
+
+  // Constructs an ssrc stats update.
+  cricket::VoiceMediaInfo stats_read;
+  stats_read.senders.push_back(voice_sender_info);
+  stats_read.receivers.push_back(voice_receiver_info);
+
+  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(Return(&voice_channel));
+  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+  EXPECT_CALL(*media_channel, GetStats(_))
+      .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read),
+                            Return(true)));
+
+  StatsReports reports;  // returned values.
+  stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+  // Get stats for the local track.
+  stats.GetStats(audio_track_.get(), &reports);
+  const StatsReport* track_report = FindNthReportByType(
+      reports, StatsReport::kStatsReportTypeSsrc, 1);
+  EXPECT_TRUE(track_report);
+  EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+  std::string track_id = ExtractSsrcStatsValue(
+      reports, StatsReport::kStatsValueNameTrackId);
+  EXPECT_EQ(kLocalTrackId, track_id);
+  VerifyVoiceSenderInfoReport(track_report, voice_sender_info);
+
+  // Get stats for the remote track.
+  reports.clear();
+  stats.GetStats(remote_track.get(), &reports);
+  track_report = FindNthReportByType(reports,
+                                     StatsReport::kStatsReportTypeSsrc, 1);
+  EXPECT_TRUE(track_report);
+  EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
+  track_id = ExtractSsrcStatsValue(reports,
+                                   StatsReport::kStatsValueNameTrackId);
+  EXPECT_EQ(kRemoteTrackId, track_id);
+  VerifyVoiceReceiverInfoReport(track_report, voice_receiver_info);
+}
+
+// This test verifies that when two outgoing audio tracks are using the same
+// ssrc at different times, they populate stats reports correctly.
+// TODO(xians): Figure out if it is possible to encapsulate the setup and
+// avoid duplication of code in test cases.
+TEST_F(StatsCollectorTest, TwoLocalTracksWithSameSsrc) {
+  StatsCollectorForTest stats(&pc_);
+
+  EXPECT_CALL(session_, GetLocalCertificate(_, _))
+      .WillRepeatedly(Return(false));
+  EXPECT_CALL(session_, GetRemoteSSLCertificate(_, _))
+      .WillRepeatedly(Return(false));
+
+  MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+  // The transport_name known by the voice channel.
+  const std::string kVcName("vcname");
+  cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
+                                      media_channel, nullptr, kVcName, false);
+
+  // Create a local stream with a local audio track and adds it to the stats.
+  AddOutgoingAudioTrackStats();
+  stats.AddStream(stream_);
+  stats.AddLocalAudioTrack(audio_track_, kSsrcOfTrack);
+
+  cricket::VoiceSenderInfo voice_sender_info;
+  voice_sender_info.add_ssrc(kSsrcOfTrack);
+
+  cricket::VoiceMediaInfo stats_read;
+  StatsReports reports;  // returned values.
+  SetupAndVerifyAudioTrackStats(
+      audio_track_.get(), stream_.get(), &stats, &voice_channel, kVcName,
+      media_channel, &voice_sender_info, NULL, &stats_read, &reports);
+
+  // Remove the previous audio track from the stream.
+  stream_->RemoveTrack(audio_track_.get());
+  stats.RemoveLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
+
+  // Create a new audio track and adds it to the stream and stats.
+  static const std::string kNewTrackId = "new_track_id";
+  rtc::scoped_refptr<FakeAudioTrack> new_audio_track(
+      new rtc::RefCountedObject<FakeAudioTrack>(kNewTrackId));
+  EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+      .WillOnce(DoAll(SetArgPointee<1>(kNewTrackId), Return(true)));
+  stream_->AddTrack(new_audio_track);
+
+  stats.AddLocalAudioTrack(new_audio_track, kSsrcOfTrack);
+  stats.ClearUpdateStatsCacheForTest();
+  cricket::VoiceSenderInfo new_voice_sender_info;
+  InitVoiceSenderInfo(&new_voice_sender_info);
+  cricket::VoiceMediaInfo new_stats_read;
+  reports.clear();
+  SetupAndVerifyAudioTrackStats(
+      new_audio_track.get(), stream_.get(), &stats, &voice_channel, kVcName,
+      media_channel, &new_voice_sender_info, NULL, &new_stats_read, &reports);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/statstypes.cc b/webrtc/api/statstypes.cc
new file mode 100644
index 0000000..ab58cb1
--- /dev/null
+++ b/webrtc/api/statstypes.cc
@@ -0,0 +1,782 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/statstypes.h"
+
+#include <string.h>
+
+#include "webrtc/base/checks.h"
+
+// TODO(tommi): Could we have a static map of value name -> expected type
+// and use this to RTC_DCHECK on correct usage (somewhat strongly typed values)?
+// Alternatively, we could define the names+type in a separate document and
+// generate strongly typed inline C++ code that forces the correct type to be
+// used for a given name at compile time.
+
+using rtc::RefCountedObject;
+
+namespace webrtc {
+namespace {
+
+// The id of StatsReport of type kStatsReportTypeBwe.
+const char kStatsReportVideoBweId[] = "bweforvideo";
+
+// NOTE: These names need to be consistent with an external
+// specification (W3C Stats Identifiers).
+const char* InternalTypeToString(StatsReport::StatsType type) {
+  switch (type) {
+    case StatsReport::kStatsReportTypeSession:
+      return "googLibjingleSession";
+    case StatsReport::kStatsReportTypeBwe:
+      return "VideoBwe";
+    case StatsReport::kStatsReportTypeRemoteSsrc:
+      return "remoteSsrc";
+    case StatsReport::kStatsReportTypeSsrc:
+      return "ssrc";
+    case StatsReport::kStatsReportTypeTrack:
+      return "googTrack";
+    case StatsReport::kStatsReportTypeIceLocalCandidate:
+      return "localcandidate";
+    case StatsReport::kStatsReportTypeIceRemoteCandidate:
+      return "remotecandidate";
+    case StatsReport::kStatsReportTypeTransport:
+      return "transport";
+    case StatsReport::kStatsReportTypeComponent:
+      return "googComponent";
+    case StatsReport::kStatsReportTypeCandidatePair:
+      return "googCandidatePair";
+    case StatsReport::kStatsReportTypeCertificate:
+      return "googCertificate";
+    case StatsReport::kStatsReportTypeDataChannel:
+      return "datachannel";
+  }
+  RTC_DCHECK(false);
+  return nullptr;
+}
+
+class BandwidthEstimationId : public StatsReport::IdBase {
+ public:
+  BandwidthEstimationId()
+      : StatsReport::IdBase(StatsReport::kStatsReportTypeBwe) {}
+  std::string ToString() const override { return kStatsReportVideoBweId; }
+};
+
+class TypedId : public StatsReport::IdBase {
+ public:
+  TypedId(StatsReport::StatsType type, const std::string& id)
+      : StatsReport::IdBase(type), id_(id) {}
+
+  bool Equals(const IdBase& other) const override {
+    return IdBase::Equals(other) &&
+           static_cast<const TypedId&>(other).id_ == id_;
+  }
+
+  std::string ToString() const override {
+    return std::string(InternalTypeToString(type_)) + kSeparator + id_;
+  }
+
+ protected:
+  const std::string id_;
+};
+
+class TypedIntId : public StatsReport::IdBase {
+ public:
+  TypedIntId(StatsReport::StatsType type, int id)
+      : StatsReport::IdBase(type), id_(id) {}
+
+  bool Equals(const IdBase& other) const override {
+    return IdBase::Equals(other) &&
+           static_cast<const TypedIntId&>(other).id_ == id_;
+  }
+
+  std::string ToString() const override {
+    return std::string(InternalTypeToString(type_)) +
+           kSeparator +
+           rtc::ToString<int>(id_);
+  }
+
+ protected:
+  const int id_;
+};
+
+class IdWithDirection : public TypedId {
+ public:
+  IdWithDirection(StatsReport::StatsType type, const std::string& id,
+                  StatsReport::Direction direction)
+      : TypedId(type, id), direction_(direction) {}
+
+  bool Equals(const IdBase& other) const override {
+    return TypedId::Equals(other) &&
+           static_cast<const IdWithDirection&>(other).direction_ == direction_;
+  }
+
+  std::string ToString() const override {
+    std::string ret(TypedId::ToString());
+    ret += kSeparator;
+    ret += direction_ == StatsReport::kSend ? "send" : "recv";
+    return ret;
+  }
+
+ private:
+  const StatsReport::Direction direction_;
+};
+
+class CandidateId : public TypedId {
+ public:
+  CandidateId(bool local, const std::string& id)
+      : TypedId(local ?
+                    StatsReport::kStatsReportTypeIceLocalCandidate :
+                    StatsReport::kStatsReportTypeIceRemoteCandidate,
+                id) {
+  }
+
+  std::string ToString() const override {
+    return "Cand-" + id_;
+  }
+};
+
+class ComponentId : public StatsReport::IdBase {
+ public:
+  ComponentId(const std::string& content_name, int component)
+      : ComponentId(StatsReport::kStatsReportTypeComponent, content_name,
+            component) {}
+
+  bool Equals(const IdBase& other) const override {
+    return IdBase::Equals(other) &&
+        static_cast<const ComponentId&>(other).component_ == component_ &&
+        static_cast<const ComponentId&>(other).content_name_ == content_name_;
+  }
+
+  std::string ToString() const override {
+    return ToString("Channel-");
+  }
+
+ protected:
+  ComponentId(StatsReport::StatsType type, const std::string& content_name,
+              int component)
+      : IdBase(type),
+        content_name_(content_name),
+        component_(component) {}
+
+  std::string ToString(const char* prefix) const {
+    std::string ret(prefix);
+    ret += content_name_;
+    ret += '-';
+    ret += rtc::ToString<>(component_);
+    return ret;
+  }
+
+ private:
+  const std::string content_name_;
+  const int component_;
+};
+
+class CandidatePairId : public ComponentId {
+ public:
+  CandidatePairId(const std::string& content_name, int component, int index)
+      : ComponentId(StatsReport::kStatsReportTypeCandidatePair, content_name,
+            component),
+        index_(index) {}
+
+  bool Equals(const IdBase& other) const override {
+    return ComponentId::Equals(other) &&
+        static_cast<const CandidatePairId&>(other).index_ == index_;
+  }
+
+  std::string ToString() const override {
+    std::string ret(ComponentId::ToString("Conn-"));
+    ret += '-';
+    ret += rtc::ToString<>(index_);
+    return ret;
+  }
+
+ private:
+  const int index_;
+};
+
+}  // namespace
+
+StatsReport::IdBase::IdBase(StatsType type) : type_(type) {}
+StatsReport::IdBase::~IdBase() {}
+
+StatsReport::StatsType StatsReport::IdBase::type() const { return type_; }
+
+bool StatsReport::IdBase::Equals(const IdBase& other) const {
+  return other.type_ == type_;
+}
+
+StatsReport::Value::Value(StatsValueName name, int64_t value, Type int_type)
+    : name(name), type_(int_type) {
+  RTC_DCHECK(type_ == kInt || type_ == kInt64);
+  type_ == kInt ? value_.int_ = static_cast<int>(value) : value_.int64_ = value;
+}
+
+StatsReport::Value::Value(StatsValueName name, float f)
+    : name(name), type_(kFloat) {
+  value_.float_ = f;
+}
+
+StatsReport::Value::Value(StatsValueName name, const std::string& value)
+    : name(name), type_(kString) {
+  value_.string_ = new std::string(value);
+}
+
+StatsReport::Value::Value(StatsValueName name, const char* value)
+    : name(name), type_(kStaticString) {
+  value_.static_string_ = value;
+}
+
+StatsReport::Value::Value(StatsValueName name, bool b)
+    : name(name), type_(kBool) {
+  value_.bool_ = b;
+}
+
+StatsReport::Value::Value(StatsValueName name, const Id& value)
+    : name(name), type_(kId) {
+  value_.id_ = new Id(value);
+}
+
+StatsReport::Value::~Value() {
+  switch (type_) {
+    case kInt:
+    case kInt64:
+    case kFloat:
+    case kBool:
+    case kStaticString:
+      break;
+    case kString:
+      delete value_.string_;
+      break;
+    case kId:
+      delete value_.id_;
+      break;
+  }
+}
+
+bool StatsReport::Value::Equals(const Value& other) const {
+  if (name != other.name)
+    return false;
+
+  // There's a 1:1 relation between a name and a type, so we don't have to
+  // check that.
+  RTC_DCHECK_EQ(type_, other.type_);
+
+  switch (type_) {
+    case kInt:
+      return value_.int_ == other.value_.int_;
+    case kInt64:
+      return value_.int64_ == other.value_.int64_;
+    case kFloat:
+      return value_.float_ == other.value_.float_;
+    case kStaticString: {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+      if (value_.static_string_ != other.value_.static_string_) {
+        RTC_DCHECK(strcmp(value_.static_string_, other.value_.static_string_) !=
+                   0)
+            << "Duplicate global?";
+      }
+#endif
+      return value_.static_string_ == other.value_.static_string_;
+    }
+    case kString:
+      return *value_.string_ == *other.value_.string_;
+    case kBool:
+      return value_.bool_ == other.value_.bool_;
+    case kId:
+      return (*value_.id_)->Equals(*other.value_.id_);
+  }
+  RTC_NOTREACHED();
+  return false;
+}
+
+bool StatsReport::Value::operator==(const std::string& value) const {
+  return (type_ == kString && value_.string_->compare(value) == 0) ||
+         (type_ == kStaticString && value.compare(value_.static_string_) == 0);
+}
+
+bool StatsReport::Value::operator==(const char* value) const {
+  if (type_ == kString)
+    return value_.string_->compare(value) == 0;
+  if (type_ != kStaticString)
+    return false;
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
+  if (value_.static_string_ != value)
+    RTC_DCHECK(strcmp(value_.static_string_, value) != 0)
+        << "Duplicate global?";
+#endif
+  return value == value_.static_string_;
+}
+
+bool StatsReport::Value::operator==(int64_t value) const {
+  return type_ == kInt ? value_.int_ == static_cast<int>(value) :
+      (type_ == kInt64 ? value_.int64_ == value : false);
+}
+
+bool StatsReport::Value::operator==(bool value) const {
+  return type_ == kBool && value_.bool_ == value;
+}
+
+bool StatsReport::Value::operator==(float value) const {
+  return type_ == kFloat && value_.float_ == value;
+}
+
+bool StatsReport::Value::operator==(const Id& value) const {
+  return type_ == kId && (*value_.id_)->Equals(value);
+}
+
+int StatsReport::Value::int_val() const {
+  RTC_DCHECK(type_ == kInt);
+  return value_.int_;
+}
+
+int64_t StatsReport::Value::int64_val() const {
+  RTC_DCHECK(type_ == kInt64);
+  return value_.int64_;
+}
+
+float StatsReport::Value::float_val() const {
+  RTC_DCHECK(type_ == kFloat);
+  return value_.float_;
+}
+
+const char* StatsReport::Value::static_string_val() const {
+  RTC_DCHECK(type_ == kStaticString);
+  return value_.static_string_;
+}
+
+const std::string& StatsReport::Value::string_val() const {
+  RTC_DCHECK(type_ == kString);
+  return *value_.string_;
+}
+
+bool StatsReport::Value::bool_val() const {
+  RTC_DCHECK(type_ == kBool);
+  return value_.bool_;
+}
+
+const char* StatsReport::Value::display_name() const {
+  switch (name) {
+    case kStatsValueNameAudioOutputLevel:
+      return "audioOutputLevel";
+    case kStatsValueNameAudioInputLevel:
+      return "audioInputLevel";
+    case kStatsValueNameBytesSent:
+      return "bytesSent";
+    case kStatsValueNamePacketsSent:
+      return "packetsSent";
+    case kStatsValueNameBytesReceived:
+      return "bytesReceived";
+    case kStatsValueNameLabel:
+      return "label";
+    case kStatsValueNamePacketsReceived:
+      return "packetsReceived";
+    case kStatsValueNamePacketsLost:
+      return "packetsLost";
+    case kStatsValueNameProtocol:
+      return "protocol";
+    case kStatsValueNameTransportId:
+      return "transportId";
+    case kStatsValueNameSelectedCandidatePairId:
+      return "selectedCandidatePairId";
+    case kStatsValueNameSsrc:
+      return "ssrc";
+    case kStatsValueNameState:
+      return "state";
+    case kStatsValueNameDataChannelId:
+      return "datachannelid";
+    case kStatsValueNameCodecImplementationName:
+      return "codecImplementationName";
+    case kStatsValueNameMediaType:
+      return "mediaType";
+    // 'goog' prefixed constants.
+    case kStatsValueNameAccelerateRate:
+      return "googAccelerateRate";
+    case kStatsValueNameActiveConnection:
+      return "googActiveConnection";
+    case kStatsValueNameActualEncBitrate:
+      return "googActualEncBitrate";
+    case kStatsValueNameAvailableReceiveBandwidth:
+      return "googAvailableReceiveBandwidth";
+    case kStatsValueNameAvailableSendBandwidth:
+      return "googAvailableSendBandwidth";
+    case kStatsValueNameAvgEncodeMs:
+      return "googAvgEncodeMs";
+    case kStatsValueNameBucketDelay:
+      return "googBucketDelay";
+    case kStatsValueNameBandwidthLimitedResolution:
+      return "googBandwidthLimitedResolution";
+
+    // Candidate related attributes. Values are taken from
+    // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*.
+    case kStatsValueNameCandidateIPAddress:
+      return "ipAddress";
+    case kStatsValueNameCandidateNetworkType:
+      return "networkType";
+    case kStatsValueNameCandidatePortNumber:
+      return "portNumber";
+    case kStatsValueNameCandidatePriority:
+      return "priority";
+    case kStatsValueNameCandidateTransportType:
+      return "transport";
+    case kStatsValueNameCandidateType:
+      return "candidateType";
+
+    case kStatsValueNameChannelId:
+      return "googChannelId";
+    case kStatsValueNameCodecName:
+      return "googCodecName";
+    case kStatsValueNameComponent:
+      return "googComponent";
+    case kStatsValueNameContentName:
+      return "googContentName";
+    case kStatsValueNameCpuLimitedResolution:
+      return "googCpuLimitedResolution";
+    case kStatsValueNameDecodingCTSG:
+      return "googDecodingCTSG";
+    case kStatsValueNameDecodingCTN:
+      return "googDecodingCTN";
+    case kStatsValueNameDecodingNormal:
+      return "googDecodingNormal";
+    case kStatsValueNameDecodingPLC:
+      return "googDecodingPLC";
+    case kStatsValueNameDecodingCNG:
+      return "googDecodingCNG";
+    case kStatsValueNameDecodingPLCCNG:
+      return "googDecodingPLCCNG";
+    case kStatsValueNameDer:
+      return "googDerBase64";
+    case kStatsValueNameDtlsCipher:
+      return "dtlsCipher";
+    case kStatsValueNameEchoCancellationQualityMin:
+      return "googEchoCancellationQualityMin";
+    case kStatsValueNameEchoDelayMedian:
+      return "googEchoCancellationEchoDelayMedian";
+    case kStatsValueNameEchoDelayStdDev:
+      return "googEchoCancellationEchoDelayStdDev";
+    case kStatsValueNameEchoReturnLoss:
+      return "googEchoCancellationReturnLoss";
+    case kStatsValueNameEchoReturnLossEnhancement:
+      return "googEchoCancellationReturnLossEnhancement";
+    case kStatsValueNameEncodeUsagePercent:
+      return "googEncodeUsagePercent";
+    case kStatsValueNameExpandRate:
+      return "googExpandRate";
+    case kStatsValueNameFingerprint:
+      return "googFingerprint";
+    case kStatsValueNameFingerprintAlgorithm:
+      return "googFingerprintAlgorithm";
+    case kStatsValueNameFirsReceived:
+      return "googFirsReceived";
+    case kStatsValueNameFirsSent:
+      return "googFirsSent";
+    case kStatsValueNameFrameHeightInput:
+      return "googFrameHeightInput";
+    case kStatsValueNameFrameHeightReceived:
+      return "googFrameHeightReceived";
+    case kStatsValueNameFrameHeightSent:
+      return "googFrameHeightSent";
+    case kStatsValueNameFrameRateReceived:
+      return "googFrameRateReceived";
+    case kStatsValueNameFrameRateDecoded:
+      return "googFrameRateDecoded";
+    case kStatsValueNameFrameRateOutput:
+      return "googFrameRateOutput";
+    case kStatsValueNameDecodeMs:
+      return "googDecodeMs";
+    case kStatsValueNameMaxDecodeMs:
+      return "googMaxDecodeMs";
+    case kStatsValueNameCurrentDelayMs:
+      return "googCurrentDelayMs";
+    case kStatsValueNameTargetDelayMs:
+      return "googTargetDelayMs";
+    case kStatsValueNameJitterBufferMs:
+      return "googJitterBufferMs";
+    case kStatsValueNameMinPlayoutDelayMs:
+      return "googMinPlayoutDelayMs";
+    case kStatsValueNameRenderDelayMs:
+      return "googRenderDelayMs";
+    case kStatsValueNameCaptureStartNtpTimeMs:
+      return "googCaptureStartNtpTimeMs";
+    case kStatsValueNameFrameRateInput:
+      return "googFrameRateInput";
+    case kStatsValueNameFrameRateSent:
+      return "googFrameRateSent";
+    case kStatsValueNameFrameWidthInput:
+      return "googFrameWidthInput";
+    case kStatsValueNameFrameWidthReceived:
+      return "googFrameWidthReceived";
+    case kStatsValueNameFrameWidthSent:
+      return "googFrameWidthSent";
+    case kStatsValueNameInitiator:
+      return "googInitiator";
+    case kStatsValueNameIssuerId:
+      return "googIssuerId";
+    case kStatsValueNameJitterReceived:
+      return "googJitterReceived";
+    case kStatsValueNameLocalAddress:
+      return "googLocalAddress";
+    case kStatsValueNameLocalCandidateId:
+      return "localCandidateId";
+    case kStatsValueNameLocalCandidateType:
+      return "googLocalCandidateType";
+    case kStatsValueNameLocalCertificateId:
+      return "localCertificateId";
+    case kStatsValueNameAdaptationChanges:
+      return "googAdaptationChanges";
+    case kStatsValueNameNacksReceived:
+      return "googNacksReceived";
+    case kStatsValueNameNacksSent:
+      return "googNacksSent";
+    case kStatsValueNamePreemptiveExpandRate:
+      return "googPreemptiveExpandRate";
+    case kStatsValueNamePlisReceived:
+      return "googPlisReceived";
+    case kStatsValueNamePlisSent:
+      return "googPlisSent";
+    case kStatsValueNamePreferredJitterBufferMs:
+      return "googPreferredJitterBufferMs";
+    case kStatsValueNameReceiving:
+      return "googReadable";
+    case kStatsValueNameRemoteAddress:
+      return "googRemoteAddress";
+    case kStatsValueNameRemoteCandidateId:
+      return "remoteCandidateId";
+    case kStatsValueNameRemoteCandidateType:
+      return "googRemoteCandidateType";
+    case kStatsValueNameRemoteCertificateId:
+      return "remoteCertificateId";
+    case kStatsValueNameRetransmitBitrate:
+      return "googRetransmitBitrate";
+    case kStatsValueNameRtt:
+      return "googRtt";
+    case kStatsValueNameSecondaryDecodedRate:
+      return "googSecondaryDecodedRate";
+    case kStatsValueNameSendPacketsDiscarded:
+      return "packetsDiscardedOnSend";
+    case kStatsValueNameSpeechExpandRate:
+      return "googSpeechExpandRate";
+    case kStatsValueNameSrtpCipher:
+      return "srtpCipher";
+    case kStatsValueNameTargetEncBitrate:
+      return "googTargetEncBitrate";
+    case kStatsValueNameTransmitBitrate:
+      return "googTransmitBitrate";
+    case kStatsValueNameTransportType:
+      return "googTransportType";
+    case kStatsValueNameTrackId:
+      return "googTrackId";
+    case kStatsValueNameTypingNoiseState:
+      return "googTypingNoiseState";
+    case kStatsValueNameViewLimitedResolution:
+      return "googViewLimitedResolution";
+    case kStatsValueNameWritable:
+      return "googWritable";
+  }
+
+  return nullptr;
+}
+
+std::string StatsReport::Value::ToString() const {
+  switch (type_) {
+    case kInt:
+      return rtc::ToString(value_.int_);
+    case kInt64:
+      return rtc::ToString(value_.int64_);
+    case kFloat:
+      return rtc::ToString(value_.float_);
+    case kStaticString:
+      return std::string(value_.static_string_);
+    case kString:
+      return *value_.string_;
+    case kBool:
+      return value_.bool_ ? "true" : "false";
+    case kId:
+      return (*value_.id_)->ToString();
+  }
+  RTC_NOTREACHED();
+  return std::string();
+}
+
+StatsReport::StatsReport(const Id& id) : id_(id), timestamp_(0.0) {
+  RTC_DCHECK(id_.get());
+}
+
+// static
+StatsReport::Id StatsReport::NewBandwidthEstimationId() {
+  return Id(new RefCountedObject<BandwidthEstimationId>());
+}
+
+// static
+StatsReport::Id StatsReport::NewTypedId(StatsType type, const std::string& id) {
+  return Id(new RefCountedObject<TypedId>(type, id));
+}
+
+// static
+StatsReport::Id StatsReport::NewTypedIntId(StatsType type, int id) {
+  return Id(new RefCountedObject<TypedIntId>(type, id));
+}
+
+// static
+StatsReport::Id StatsReport::NewIdWithDirection(
+    StatsType type, const std::string& id, StatsReport::Direction direction) {
+  return Id(new RefCountedObject<IdWithDirection>(type, id, direction));
+}
+
+// static
+StatsReport::Id StatsReport::NewCandidateId(bool local, const std::string& id) {
+  return Id(new RefCountedObject<CandidateId>(local, id));
+}
+
+// static
+StatsReport::Id StatsReport::NewComponentId(
+    const std::string& content_name, int component) {
+  return Id(new RefCountedObject<ComponentId>(content_name, component));
+}
+
+// static
+StatsReport::Id StatsReport::NewCandidatePairId(
+    const std::string& content_name, int component, int index) {
+  return Id(new RefCountedObject<CandidatePairId>(
+      content_name, component, index));
+}
+
+const char* StatsReport::TypeToString() const {
+  return InternalTypeToString(id_->type());
+}
+
+void StatsReport::AddString(StatsReport::StatsValueName name,
+                            const std::string& value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == value))
+    values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddString(StatsReport::StatsValueName name,
+                            const char* value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == value))
+    values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddInt64(StatsReport::StatsValueName name, int64_t value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == value))
+    values_[name] = ValuePtr(new Value(name, value, Value::kInt64));
+}
+
+void StatsReport::AddInt(StatsReport::StatsValueName name, int value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == static_cast<int64_t>(value)))
+    values_[name] = ValuePtr(new Value(name, value, Value::kInt));
+}
+
+void StatsReport::AddFloat(StatsReport::StatsValueName name, float value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == value))
+    values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddBoolean(StatsReport::StatsValueName name, bool value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == value))
+    values_[name] = ValuePtr(new Value(name, value));
+}
+
+void StatsReport::AddId(StatsReport::StatsValueName name,
+                        const Id& value) {
+  const Value* found = FindValue(name);
+  if (!found || !(*found == value))
+    values_[name] = ValuePtr(new Value(name, value));
+}
+
+const StatsReport::Value* StatsReport::FindValue(StatsValueName name) const {
+  Values::const_iterator it = values_.find(name);
+  return it == values_.end() ? nullptr : it->second.get();
+}
+
+StatsCollection::StatsCollection() {
+}
+
+StatsCollection::~StatsCollection() {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  for (auto* r : list_)
+    delete r;
+}
+
+StatsCollection::const_iterator StatsCollection::begin() const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  return list_.begin();
+}
+
+StatsCollection::const_iterator StatsCollection::end() const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  return list_.end();
+}
+
+size_t StatsCollection::size() const {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  return list_.size();
+}
+
+StatsReport* StatsCollection::InsertNew(const StatsReport::Id& id) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  RTC_DCHECK(Find(id) == nullptr);
+  StatsReport* report = new StatsReport(id);
+  list_.push_back(report);
+  return report;
+}
+
+StatsReport* StatsCollection::FindOrAddNew(const StatsReport::Id& id) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  StatsReport* ret = Find(id);
+  return ret ? ret : InsertNew(id);
+}
+
+StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  RTC_DCHECK(id.get());
+  Container::iterator it = std::find_if(list_.begin(), list_.end(),
+      [&id](const StatsReport* r)->bool { return r->id()->Equals(id); });
+  if (it != end()) {
+    StatsReport* report = new StatsReport((*it)->id());
+    delete *it;
+    *it = report;
+    return report;
+  }
+  return InsertNew(id);
+}
+
+// Looks for a report with the given |id|.  If one is not found, NULL
+// will be returned.
+StatsReport* StatsCollection::Find(const StatsReport::Id& id) {
+  RTC_DCHECK(thread_checker_.CalledOnValidThread());
+  Container::iterator it = std::find_if(list_.begin(), list_.end(),
+      [&id](const StatsReport* r)->bool { return r->id()->Equals(id); });
+  return it == list_.end() ? nullptr : *it;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/statstypes.h b/webrtc/api/statstypes.h
new file mode 100644
index 0000000..753cba6
--- /dev/null
+++ b/webrtc/api/statstypes.h
@@ -0,0 +1,419 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains structures used for retrieving statistics from an ongoing
+// libjingle session.
+
+#ifndef WEBRTC_API_STATSTYPES_H_
+#define WEBRTC_API_STATSTYPES_H_
+
+#include <algorithm>
+#include <list>
+#include <map>
+#include <string>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/linked_ptr.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc {
+
+class StatsReport {
+ public:
+  // Indicates whether a track is for sending or receiving.
+  // Used in reports for audio/video tracks.
+  enum Direction {
+    kSend = 0,
+    kReceive,
+  };
+
+  enum StatsType {
+    // StatsReport types.
+    // A StatsReport of |type| = "googSession" contains overall information
+    // about the thing libjingle calls a session (which may contain one
+    // or more RTP sessions.
+    kStatsReportTypeSession,
+
+    // A StatsReport of |type| = "googTransport" contains information
+    // about a libjingle "transport".
+    kStatsReportTypeTransport,
+
+    // A StatsReport of |type| = "googComponent" contains information
+    // about a libjingle "channel" (typically, RTP or RTCP for a transport).
+    // This is intended to be the same thing as an ICE "Component".
+    kStatsReportTypeComponent,
+
+    // A StatsReport of |type| = "googCandidatePair" contains information
+    // about a libjingle "connection" - a single source/destination port pair.
+    // This is intended to be the same thing as an ICE "candidate pair".
+    kStatsReportTypeCandidatePair,
+
+    // A StatsReport of |type| = "VideoBWE" is statistics for video Bandwidth
+    // Estimation, which is global per-session.  The |id| field is "bweforvideo"
+    // (will probably change in the future).
+    kStatsReportTypeBwe,
+
+    // A StatsReport of |type| = "ssrc" is statistics for a specific rtp stream.
+    // The |id| field is the SSRC in decimal form of the rtp stream.
+    kStatsReportTypeSsrc,
+
+    // A StatsReport of |type| = "remoteSsrc" is statistics for a specific
+    // rtp stream, generated by the remote end of the connection.
+    kStatsReportTypeRemoteSsrc,
+
+    // A StatsReport of |type| = "googTrack" is statistics for a specific media
+    // track. The |id| field is the track id.
+    kStatsReportTypeTrack,
+
+    // A StatsReport of |type| = "localcandidate" or "remotecandidate" is
+    // attributes on a specific ICE Candidate. It links to its connection pair
+    // by candidate id. The string value is taken from
+    // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*.
+    kStatsReportTypeIceLocalCandidate,
+    kStatsReportTypeIceRemoteCandidate,
+
+    // A StatsReport of |type| = "googCertificate" contains an SSL certificate
+    // transmitted by one of the endpoints of this connection.  The |id| is
+    // controlled by the fingerprint, and is used to identify the certificate in
+    // the Channel stats (as "googLocalCertificateId" or
+    // "googRemoteCertificateId") and in any child certificates (as
+    // "googIssuerId").
+    kStatsReportTypeCertificate,
+
+    // A StatsReport of |type| = "datachannel" with statistics for a
+    // particular DataChannel.
+    kStatsReportTypeDataChannel,
+  };
+
+  enum StatsValueName {
+    kStatsValueNameActiveConnection,
+    kStatsValueNameAudioInputLevel,
+    kStatsValueNameAudioOutputLevel,
+    kStatsValueNameBytesReceived,
+    kStatsValueNameBytesSent,
+    kStatsValueNameCodecImplementationName,
+    kStatsValueNameDataChannelId,
+    kStatsValueNameMediaType,
+    kStatsValueNamePacketsLost,
+    kStatsValueNamePacketsReceived,
+    kStatsValueNamePacketsSent,
+    kStatsValueNameProtocol,
+    kStatsValueNameReceiving,
+    kStatsValueNameSelectedCandidatePairId,
+    kStatsValueNameSsrc,
+    kStatsValueNameState,
+    kStatsValueNameTransportId,
+
+    // Internal StatsValue names.
+    kStatsValueNameAccelerateRate,
+    kStatsValueNameActualEncBitrate,
+    kStatsValueNameAdaptationChanges,
+    kStatsValueNameAvailableReceiveBandwidth,
+    kStatsValueNameAvailableSendBandwidth,
+    kStatsValueNameAvgEncodeMs,
+    kStatsValueNameBandwidthLimitedResolution,
+    kStatsValueNameBucketDelay,
+    kStatsValueNameCaptureStartNtpTimeMs,
+    kStatsValueNameCandidateIPAddress,
+    kStatsValueNameCandidateNetworkType,
+    kStatsValueNameCandidatePortNumber,
+    kStatsValueNameCandidatePriority,
+    kStatsValueNameCandidateTransportType,
+    kStatsValueNameCandidateType,
+    kStatsValueNameChannelId,
+    kStatsValueNameCodecName,
+    kStatsValueNameComponent,
+    kStatsValueNameContentName,
+    kStatsValueNameCpuLimitedResolution,
+    kStatsValueNameCurrentDelayMs,
+    kStatsValueNameDecodeMs,
+    kStatsValueNameDecodingCNG,
+    kStatsValueNameDecodingCTN,
+    kStatsValueNameDecodingCTSG,
+    kStatsValueNameDecodingNormal,
+    kStatsValueNameDecodingPLC,
+    kStatsValueNameDecodingPLCCNG,
+    kStatsValueNameDer,
+    kStatsValueNameDtlsCipher,
+    kStatsValueNameEchoCancellationQualityMin,
+    kStatsValueNameEchoDelayMedian,
+    kStatsValueNameEchoDelayStdDev,
+    kStatsValueNameEchoReturnLoss,
+    kStatsValueNameEchoReturnLossEnhancement,
+    kStatsValueNameEncodeUsagePercent,
+    kStatsValueNameExpandRate,
+    kStatsValueNameFingerprint,
+    kStatsValueNameFingerprintAlgorithm,
+    kStatsValueNameFirsReceived,
+    kStatsValueNameFirsSent,
+    kStatsValueNameFrameHeightInput,
+    kStatsValueNameFrameHeightReceived,
+    kStatsValueNameFrameHeightSent,
+    kStatsValueNameFrameRateDecoded,
+    kStatsValueNameFrameRateInput,
+    kStatsValueNameFrameRateOutput,
+    kStatsValueNameFrameRateReceived,
+    kStatsValueNameFrameRateSent,
+    kStatsValueNameFrameWidthInput,
+    kStatsValueNameFrameWidthReceived,
+    kStatsValueNameFrameWidthSent,
+    kStatsValueNameInitiator,
+    kStatsValueNameIssuerId,
+    kStatsValueNameJitterBufferMs,
+    kStatsValueNameJitterReceived,
+    kStatsValueNameLabel,
+    kStatsValueNameLocalAddress,
+    kStatsValueNameLocalCandidateId,
+    kStatsValueNameLocalCandidateType,
+    kStatsValueNameLocalCertificateId,
+    kStatsValueNameMaxDecodeMs,
+    kStatsValueNameMinPlayoutDelayMs,
+    kStatsValueNameNacksReceived,
+    kStatsValueNameNacksSent,
+    kStatsValueNamePlisReceived,
+    kStatsValueNamePlisSent,
+    kStatsValueNamePreemptiveExpandRate,
+    kStatsValueNamePreferredJitterBufferMs,
+    kStatsValueNameRemoteAddress,
+    kStatsValueNameRemoteCandidateId,
+    kStatsValueNameRemoteCandidateType,
+    kStatsValueNameRemoteCertificateId,
+    kStatsValueNameRenderDelayMs,
+    kStatsValueNameRetransmitBitrate,
+    kStatsValueNameRtt,
+    kStatsValueNameSecondaryDecodedRate,
+    kStatsValueNameSendPacketsDiscarded,
+    kStatsValueNameSpeechExpandRate,
+    kStatsValueNameSrtpCipher,
+    kStatsValueNameTargetDelayMs,
+    kStatsValueNameTargetEncBitrate,
+    kStatsValueNameTrackId,
+    kStatsValueNameTransmitBitrate,
+    kStatsValueNameTransportType,
+    kStatsValueNameTypingNoiseState,
+    kStatsValueNameViewLimitedResolution,
+    kStatsValueNameWritable,
+  };
+
+  class IdBase : public rtc::RefCountInterface {
+   public:
+    ~IdBase() override;
+    StatsType type() const;
+
+    // Users of IdBase will be using the Id typedef, which is compatible with
+    // this Equals() function.  It simply calls the protected (and overridden)
+    // Equals() method.
+    bool Equals(const rtc::scoped_refptr<IdBase>& other) const {
+      return Equals(*other.get());
+    }
+
+    virtual std::string ToString() const = 0;
+
+   protected:
+    // Protected since users of the IdBase type will be using the Id typedef.
+    virtual bool Equals(const IdBase& other) const;
+
+    IdBase(StatsType type);  // Only meant for derived classes.
+    const StatsType type_;
+
+    static const char kSeparator = '_';
+  };
+
+  typedef rtc::scoped_refptr<IdBase> Id;
+
+  struct Value {
+    enum Type {
+      kInt,           // int.
+      kInt64,         // int64_t.
+      kFloat,         // float.
+      kString,        // std::string
+      kStaticString,  // const char*.
+      kBool,          // bool.
+      kId,            // Id.
+    };
+
+    Value(StatsValueName name, int64_t value, Type int_type);
+    Value(StatsValueName name, float f);
+    Value(StatsValueName name, const std::string& value);
+    Value(StatsValueName name, const char* value);
+    Value(StatsValueName name, bool b);
+    Value(StatsValueName name, const Id& value);
+
+    ~Value();
+
+    // TODO(tommi): This compares name as well as value...
+    // I think we should only need to compare the value part and
+    // move the name part into a hash map.
+    bool Equals(const Value& other) const;
+
+    // Comparison operators. Return true iff the current instance is of the
+    // correct type and holds the same value.  No conversion is performed so
+    // a string value of "123" is not equal to an int value of 123 and an int
+    // value of 123 is not equal to a float value of 123.0f.
+    // One exception to this is that types kInt and kInt64 can be compared and
+    // kString and kStaticString too.
+    bool operator==(const std::string& value) const;
+    bool operator==(const char* value) const;
+    bool operator==(int64_t value) const;
+    bool operator==(bool value) const;
+    bool operator==(float value) const;
+    bool operator==(const Id& value) const;
+
+    // Getters that allow getting the native value directly.
+    // The caller must know the type beforehand or else hit a check.
+    int int_val() const;
+    int64_t int64_val() const;
+    float float_val() const;
+    const char* static_string_val() const;
+    const std::string& string_val() const;
+    bool bool_val() const;
+    const Id& id_val() const;
+
+    // Returns the string representation of |name|.
+    const char* display_name() const;
+
+    // Converts the native value to a string representation of the value.
+    std::string ToString() const;
+
+    Type type() const { return type_; }
+
+    // TODO(tommi): Move |name| and |display_name| out of the Value struct.
+    const StatsValueName name;
+
+   private:
+    const Type type_;
+    // TODO(tommi): Use C++ 11 union and make value_ const.
+    union InternalType {
+      int int_;
+      int64_t int64_;
+      float float_;
+      bool bool_;
+      std::string* string_;
+      const char* static_string_;
+      Id* id_;
+    } value_;
+
+   private:
+    RTC_DISALLOW_COPY_AND_ASSIGN(Value);
+  };
+
+  // TODO(tommi): Consider using a similar approach to how we store Ids using
+  // scoped_refptr for values.
+  typedef rtc::linked_ptr<Value> ValuePtr;
+  typedef std::map<StatsValueName, ValuePtr> Values;
+
+  // Ownership of |id| is passed to |this|.
+  explicit StatsReport(const Id& id);
+
+  // Factory functions for various types of stats IDs.
+  static Id NewBandwidthEstimationId();
+  static Id NewTypedId(StatsType type, const std::string& id);
+  static Id NewTypedIntId(StatsType type, int id);
+  static Id NewIdWithDirection(
+      StatsType type, const std::string& id, Direction direction);
+  static Id NewCandidateId(bool local, const std::string& id);
+  static Id NewComponentId(
+      const std::string& content_name, int component);
+  static Id NewCandidatePairId(
+      const std::string& content_name, int component, int index);
+
+  const Id& id() const { return id_; }
+  StatsType type() const { return id_->type(); }
+  double timestamp() const { return timestamp_; }
+  void set_timestamp(double t) { timestamp_ = t; }
+  bool empty() const { return values_.empty(); }
+  const Values& values() const { return values_; }
+
+  const char* TypeToString() const;
+
+  void AddString(StatsValueName name, const std::string& value);
+  void AddString(StatsValueName name, const char* value);
+  void AddInt64(StatsValueName name, int64_t value);
+  void AddInt(StatsValueName name, int value);
+  void AddFloat(StatsValueName name, float value);
+  void AddBoolean(StatsValueName name, bool value);
+  void AddId(StatsValueName name, const Id& value);
+
+  const Value* FindValue(StatsValueName name) const;
+
+ private:
+  // The unique identifier for this object.
+  // This is used as a key for this report in ordered containers,
+  // so it must never be changed.
+  const Id id_;
+  double timestamp_;  // Time since 1970-01-01T00:00:00Z in milliseconds.
+  Values values_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(StatsReport);
+};
+
+// Typedef for an array of const StatsReport pointers.
+// Ownership of the pointers held by this implementation is assumed to lie
+// elsewhere and lifetime guarantees are made by the implementation that uses
+// this type.  In the StatsCollector, object ownership lies with the
+// StatsCollection class.
+typedef std::vector<const StatsReport*> StatsReports;
+
+// A map from the report id to the report.
+// This class wraps an STL container and provides a limited set of
+// functionality in order to keep things simple.
+class StatsCollection {
+ public:
+  StatsCollection();
+  ~StatsCollection();
+
+  typedef std::list<StatsReport*> Container;
+  typedef Container::iterator iterator;
+  typedef Container::const_iterator const_iterator;
+
+  const_iterator begin() const;
+  const_iterator end() const;
+  size_t size() const;
+
+  // Creates a new report object with |id| that does not already
+  // exist in the list of reports.
+  StatsReport* InsertNew(const StatsReport::Id& id);
+  StatsReport* FindOrAddNew(const StatsReport::Id& id);
+  StatsReport* ReplaceOrAddNew(const StatsReport::Id& id);
+
+  // Looks for a report with the given |id|.  If one is not found, NULL
+  // will be returned.
+  StatsReport* Find(const StatsReport::Id& id);
+
+ private:
+  Container list_;
+  rtc::ThreadChecker thread_checker_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_STATSTYPES_H_
diff --git a/webrtc/api/streamcollection.h b/webrtc/api/streamcollection.h
new file mode 100644
index 0000000..fc9a891
--- /dev/null
+++ b/webrtc/api/streamcollection.h
@@ -0,0 +1,125 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_STREAMCOLLECTION_H_
+#define WEBRTC_API_STREAMCOLLECTION_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/api/peerconnectioninterface.h"
+
+namespace webrtc {
+
+// Implementation of StreamCollection.
+class StreamCollection : public StreamCollectionInterface {
+ public:
+  static rtc::scoped_refptr<StreamCollection> Create() {
+    rtc::RefCountedObject<StreamCollection>* implementation =
+         new rtc::RefCountedObject<StreamCollection>();
+    return implementation;
+  }
+
+  static rtc::scoped_refptr<StreamCollection> Create(
+      StreamCollection* streams) {
+    rtc::RefCountedObject<StreamCollection>* implementation =
+         new rtc::RefCountedObject<StreamCollection>(streams);
+    return implementation;
+  }
+
+  virtual size_t count() {
+    return media_streams_.size();
+  }
+
+  virtual MediaStreamInterface* at(size_t index) {
+    return media_streams_.at(index);
+  }
+
+  virtual MediaStreamInterface* find(const std::string& label) {
+    for (StreamVector::iterator it = media_streams_.begin();
+         it != media_streams_.end(); ++it) {
+      if ((*it)->label().compare(label) == 0) {
+        return (*it);
+      }
+    }
+    return NULL;
+  }
+
+  virtual MediaStreamTrackInterface* FindAudioTrack(
+      const std::string& id) {
+    for (size_t i = 0; i < media_streams_.size(); ++i) {
+      MediaStreamTrackInterface* track = media_streams_[i]->FindAudioTrack(id);
+      if (track) {
+        return track;
+      }
+    }
+    return NULL;
+  }
+
+  virtual MediaStreamTrackInterface* FindVideoTrack(
+      const std::string& id) {
+    for (size_t i = 0; i < media_streams_.size(); ++i) {
+      MediaStreamTrackInterface* track = media_streams_[i]->FindVideoTrack(id);
+      if (track) {
+        return track;
+      }
+    }
+    return NULL;
+  }
+
+  void AddStream(MediaStreamInterface* stream) {
+    for (StreamVector::iterator it = media_streams_.begin();
+         it != media_streams_.end(); ++it) {
+      if ((*it)->label().compare(stream->label()) == 0)
+        return;
+    }
+    media_streams_.push_back(stream);
+  }
+
+  void RemoveStream(MediaStreamInterface* remove_stream) {
+    for (StreamVector::iterator it = media_streams_.begin();
+         it != media_streams_.end(); ++it) {
+      if ((*it)->label().compare(remove_stream->label()) == 0) {
+        media_streams_.erase(it);
+        break;
+      }
+    }
+  }
+
+ protected:
+  StreamCollection() {}
+  explicit StreamCollection(StreamCollection* original)
+      : media_streams_(original->media_streams_) {
+  }
+  typedef std::vector<rtc::scoped_refptr<MediaStreamInterface> >
+      StreamVector;
+  StreamVector media_streams_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_STREAMCOLLECTION_H_
diff --git a/webrtc/api/test/DEPS b/webrtc/api/test/DEPS
new file mode 100644
index 0000000..a814b15
--- /dev/null
+++ b/webrtc/api/test/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+  # Allow include of Chrome base/android to allow inclusion of headers needed
+  # for accessing the JVM and Application context in gtest.
+  "+base/android",
+]
diff --git a/webrtc/api/test/androidtestinitializer.cc b/webrtc/api/test/androidtestinitializer.cc
new file mode 100644
index 0000000..17118c0
--- /dev/null
+++ b/webrtc/api/test/androidtestinitializer.cc
@@ -0,0 +1,74 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/test/androidtestinitializer.h"
+
+#include <pthread.h>
+
+// Note: this dependency is dangerous since it reaches into Chromium's base.
+// There's a risk of e.g. macro clashes. This file may only be used in tests.
+// Since we use Chromes build system for creating the gtest binary, this should
+// be fine.
+#include "base/android/context_utils.h"
+#include "base/android/jni_android.h"
+
+#include "webrtc/api/java/jni/classreferenceholder.h"
+#include "webrtc/api/java/jni/jni_helpers.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+namespace webrtc {
+
+namespace {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+// There can only be one JNI_OnLoad in each binary. So since this is a GTEST
+// C++ runner binary, we want to initialize the same global objects we normally
+// do if this had been a Java binary.
+void EnsureInitializedOnce() {
+  RTC_CHECK(::base::android::IsVMInitialized());
+  JNIEnv* jni = ::base::android::AttachCurrentThread();
+  JavaVM* jvm = NULL;
+  RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm));
+  jobject context = ::base::android::GetApplicationContext();
+
+  RTC_CHECK_GE(webrtc_jni::InitGlobalJniVariables(jvm), 0);
+  RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+  webrtc_jni::LoadGlobalClassReferenceHolder();
+
+  webrtc::VoiceEngine::SetAndroidObjects(jvm, context);
+}
+
+}  // anonymous namespace
+
+void InitializeAndroidObjects() {
+  RTC_CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce));
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/test/androidtestinitializer.h b/webrtc/api/test/androidtestinitializer.h
new file mode 100644
index 0000000..2d178ac
--- /dev/null
+++ b/webrtc/api/test/androidtestinitializer.h
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_TEST_ANDROIDTESTINITIALIZER_H_
+#define WEBRTC_API_TEST_ANDROIDTESTINITIALIZER_H_
+
+namespace webrtc {
+
+void InitializeAndroidObjects();
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_TEST_ANDROIDTESTINITIALIZER_H_
diff --git a/webrtc/api/test/fakeaudiocapturemodule.cc b/webrtc/api/test/fakeaudiocapturemodule.cc
new file mode 100644
index 0000000..2dfa267
--- /dev/null
+++ b/webrtc/api/test/fakeaudiocapturemodule.cc
@@ -0,0 +1,744 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/test/fakeaudiocapturemodule.h"
+
+#include "webrtc/base/common.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+
+// Audio sample value that is high enough that it doesn't occur naturally when
+// frames are being faked. E.g. NetEq will not generate this large sample value
+// unless it has received an audio frame containing a sample of this value.
+// Even simpler buffers would likely just contain audio sample values of 0.
+static const int kHighSampleValue = 10000;
+
+// Same value as src/modules/audio_device/main/source/audio_device_config.h in
+// https://code.google.com/p/webrtc/
+static const uint32_t kAdmMaxIdleTimeProcess = 1000;
+
+// Constants here are derived by running VoE using a real ADM.
+// The constants correspond to 10ms of mono audio at 44kHz.
+static const int kTimePerFrameMs = 10;
+static const uint8_t kNumberOfChannels = 1;
+static const int kSamplesPerSecond = 44000;
+static const int kTotalDelayMs = 0;
+static const int kClockDriftMs = 0;
+static const uint32_t kMaxVolume = 14392;
+
+enum {
+  MSG_START_PROCESS,
+  MSG_RUN_PROCESS,
+};
+
+FakeAudioCaptureModule::FakeAudioCaptureModule()
+    : last_process_time_ms_(0),
+      audio_callback_(nullptr),
+      recording_(false),
+      playing_(false),
+      play_is_initialized_(false),
+      rec_is_initialized_(false),
+      current_mic_level_(kMaxVolume),
+      started_(false),
+      next_frame_time_(0),
+      frames_received_(0) {
+}
+
+FakeAudioCaptureModule::~FakeAudioCaptureModule() {
+  if (process_thread_) {
+    process_thread_->Stop();
+  }
+}
+
+rtc::scoped_refptr<FakeAudioCaptureModule> FakeAudioCaptureModule::Create() {
+  rtc::scoped_refptr<FakeAudioCaptureModule> capture_module(
+      new rtc::RefCountedObject<FakeAudioCaptureModule>());
+  if (!capture_module->Initialize()) {
+    return nullptr;
+  }
+  return capture_module;
+}
+
+int FakeAudioCaptureModule::frames_received() const {
+  rtc::CritScope cs(&crit_);
+  return frames_received_;
+}
+
+int64_t FakeAudioCaptureModule::TimeUntilNextProcess() {
+  const uint32_t current_time = rtc::Time();
+  if (current_time < last_process_time_ms_) {
+    // TODO: wraparound could be handled more gracefully.
+    return 0;
+  }
+  const uint32_t elapsed_time = current_time - last_process_time_ms_;
+  if (kAdmMaxIdleTimeProcess < elapsed_time) {
+    return 0;
+  }
+  return kAdmMaxIdleTimeProcess - elapsed_time;
+}
+
+int32_t FakeAudioCaptureModule::Process() {
+  last_process_time_ms_ = rtc::Time();
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::ActiveAudioLayer(
+    AudioLayer* /*audio_layer*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+webrtc::AudioDeviceModule::ErrorCode FakeAudioCaptureModule::LastError() const {
+  ASSERT(false);
+  return webrtc::AudioDeviceModule::kAdmErrNone;
+}
+
+int32_t FakeAudioCaptureModule::RegisterEventObserver(
+    webrtc::AudioDeviceObserver* /*event_callback*/) {
+  // Only used to report warnings and errors. This fake implementation won't
+  // generate any so discard this callback.
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::RegisterAudioCallback(
+    webrtc::AudioTransport* audio_callback) {
+  rtc::CritScope cs(&crit_callback_);
+  audio_callback_ = audio_callback;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::Init() {
+  // Initialize is called by the factory method. Safe to ignore this Init call.
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::Terminate() {
+  // Clean up in the destructor. No action here, just success.
+  return 0;
+}
+
+bool FakeAudioCaptureModule::Initialized() const {
+  ASSERT(false);
+  return 0;
+}
+
+int16_t FakeAudioCaptureModule::PlayoutDevices() {
+  ASSERT(false);
+  return 0;
+}
+
+int16_t FakeAudioCaptureModule::RecordingDevices() {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDeviceName(
+    uint16_t /*index*/,
+    char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+    char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingDeviceName(
+    uint16_t /*index*/,
+    char /*name*/[webrtc::kAdmMaxDeviceNameSize],
+    char /*guid*/[webrtc::kAdmMaxGuidSize]) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(uint16_t /*index*/) {
+  // No playout device, just playing from file. Return success.
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutDevice(WindowsDeviceType /*device*/) {
+  if (play_is_initialized_) {
+    return -1;
+  }
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(uint16_t /*index*/) {
+  // No recording device, just dropping audio. Return success.
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingDevice(
+    WindowsDeviceType /*device*/) {
+  if (rec_is_initialized_) {
+    return -1;
+  }
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutIsAvailable(bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitPlayout() {
+  play_is_initialized_ = true;
+  return 0;
+}
+
+bool FakeAudioCaptureModule::PlayoutIsInitialized() const {
+  return play_is_initialized_;
+}
+
+int32_t FakeAudioCaptureModule::RecordingIsAvailable(bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitRecording() {
+  rec_is_initialized_ = true;
+  return 0;
+}
+
+bool FakeAudioCaptureModule::RecordingIsInitialized() const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StartPlayout() {
+  if (!play_is_initialized_) {
+    return -1;
+  }
+  {
+    rtc::CritScope cs(&crit_);
+    playing_ = true;
+  }
+  bool start = true;
+  UpdateProcessing(start);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopPlayout() {
+  bool start = false;
+  {
+    rtc::CritScope cs(&crit_);
+    playing_ = false;
+    start = ShouldStartProcessing();
+  }
+  UpdateProcessing(start);
+  return 0;
+}
+
+bool FakeAudioCaptureModule::Playing() const {
+  rtc::CritScope cs(&crit_);
+  return playing_;
+}
+
+int32_t FakeAudioCaptureModule::StartRecording() {
+  if (!rec_is_initialized_) {
+    return -1;
+  }
+  {
+    rtc::CritScope cs(&crit_);
+    recording_ = true;
+  }
+  bool start = true;
+  UpdateProcessing(start);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRecording() {
+  bool start = false;
+  {
+    rtc::CritScope cs(&crit_);
+    recording_ = false;
+    start = ShouldStartProcessing();
+  }
+  UpdateProcessing(start);
+  return 0;
+}
+
+bool FakeAudioCaptureModule::Recording() const {
+  rtc::CritScope cs(&crit_);
+  return recording_;
+}
+
+int32_t FakeAudioCaptureModule::SetAGC(bool /*enable*/) {
+  // No AGC but not needed since audio is pregenerated. Return success.
+  return 0;
+}
+
+bool FakeAudioCaptureModule::AGC() const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetWaveOutVolume(uint16_t /*volume_left*/,
+                                                 uint16_t /*volume_right*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::WaveOutVolume(
+    uint16_t* /*volume_left*/,
+    uint16_t* /*volume_right*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitSpeaker() {
+  // No speaker, just playing from file. Return success.
+  return 0;
+}
+
+bool FakeAudioCaptureModule::SpeakerIsInitialized() const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::InitMicrophone() {
+  // No microphone, just playing from file. Return success.
+  return 0;
+}
+
+bool FakeAudioCaptureModule::MicrophoneIsInitialized() const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolumeIsAvailable(bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerVolume(uint32_t /*volume*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolume(uint32_t* /*volume*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxSpeakerVolume(
+    uint32_t* /*max_volume*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinSpeakerVolume(
+    uint32_t* /*min_volume*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerVolumeStepSize(
+    uint16_t* /*step_size*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolumeIsAvailable(
+    bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneVolume(uint32_t volume) {
+  rtc::CritScope cs(&crit_);
+  current_mic_level_ = volume;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolume(uint32_t* volume) const {
+  rtc::CritScope cs(&crit_);
+  *volume = current_mic_level_;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MaxMicrophoneVolume(
+    uint32_t* max_volume) const {
+  *max_volume = kMaxVolume;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MinMicrophoneVolume(
+    uint32_t* /*min_volume*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneVolumeStepSize(
+    uint16_t* /*step_size*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMuteIsAvailable(bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetSpeakerMute(bool /*enable*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SpeakerMute(bool* /*enabled*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMuteIsAvailable(bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneMute(bool /*enable*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneMute(bool* /*enabled*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneBoostIsAvailable(
+    bool* /*available*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetMicrophoneBoost(bool /*enable*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::MicrophoneBoost(bool* /*enabled*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayoutIsAvailable(
+    bool* available) const {
+  // No recording device, just dropping audio. Stereo can be dropped just
+  // as easily as mono.
+  *available = true;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoPlayout(bool /*enable*/) {
+  // No recording device, just dropping audio. Stereo can be dropped just
+  // as easily as mono.
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoPlayout(bool* /*enabled*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecordingIsAvailable(
+    bool* available) const {
+  // Keep thing simple. No stereo recording.
+  *available = false;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetStereoRecording(bool enable) {
+  if (!enable) {
+    return 0;
+  }
+  return -1;
+}
+
+int32_t FakeAudioCaptureModule::StereoRecording(bool* /*enabled*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingChannel(
+    const ChannelType channel) {
+  if (channel != AudioDeviceModule::kChannelBoth) {
+    // There is no right or left in mono. I.e. kChannelBoth should be used for
+    // mono.
+    ASSERT(false);
+    return -1;
+  }
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingChannel(ChannelType* channel) const {
+  // Stereo recording not supported. However, WebRTC ADM returns kChannelBoth
+  // in that case. Do the same here.
+  *channel = AudioDeviceModule::kChannelBoth;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutBuffer(const BufferType /*type*/,
+                                                 uint16_t /*size_ms*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutBuffer(BufferType* /*type*/,
+                                              uint16_t* /*size_ms*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutDelay(uint16_t* delay_ms) const {
+  // No delay since audio frames are dropped.
+  *delay_ms = 0;
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingDelay(uint16_t* /*delay_ms*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::CPULoad(uint16_t* /*load*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StartRawOutputFileRecording(
+    const char /*pcm_file_name_utf8*/[webrtc::kAdmMaxFileNameSize]) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRawOutputFileRecording() {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StartRawInputFileRecording(
+    const char /*pcm_file_name_utf8*/[webrtc::kAdmMaxFileNameSize]) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::StopRawInputFileRecording() {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetRecordingSampleRate(
+    const uint32_t /*samples_per_sec*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::RecordingSampleRate(
+    uint32_t* /*samples_per_sec*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetPlayoutSampleRate(
+    const uint32_t /*samples_per_sec*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::PlayoutSampleRate(
+    uint32_t* /*samples_per_sec*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::ResetAudioDevice() {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::SetLoudspeakerStatus(bool /*enable*/) {
+  ASSERT(false);
+  return 0;
+}
+
+int32_t FakeAudioCaptureModule::GetLoudspeakerStatus(bool* /*enabled*/) const {
+  ASSERT(false);
+  return 0;
+}
+
+void FakeAudioCaptureModule::OnMessage(rtc::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_START_PROCESS:
+      StartProcessP();
+      break;
+    case MSG_RUN_PROCESS:
+      ProcessFrameP();
+      break;
+    default:
+      // All existing messages should be caught. Getting here should never
+      // happen.
+      ASSERT(false);
+  }
+}
+
+bool FakeAudioCaptureModule::Initialize() {
+  // Set the send buffer samples high enough that it would not occur on the
+  // remote side unless a packet containing a sample of that magnitude has been
+  // sent to it. Note that the audio processing pipeline will likely distort the
+  // original signal.
+  SetSendBuffer(kHighSampleValue);
+  last_process_time_ms_ = rtc::Time();
+  return true;
+}
+
+void FakeAudioCaptureModule::SetSendBuffer(int value) {
+  Sample* buffer_ptr = reinterpret_cast<Sample*>(send_buffer_);
+  const size_t buffer_size_in_samples =
+      sizeof(send_buffer_) / kNumberBytesPerSample;
+  for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+    buffer_ptr[i] = value;
+  }
+}
+
+void FakeAudioCaptureModule::ResetRecBuffer() {
+  memset(rec_buffer_, 0, sizeof(rec_buffer_));
+}
+
+bool FakeAudioCaptureModule::CheckRecBuffer(int value) {
+  const Sample* buffer_ptr = reinterpret_cast<const Sample*>(rec_buffer_);
+  const size_t buffer_size_in_samples =
+      sizeof(rec_buffer_) / kNumberBytesPerSample;
+  for (size_t i = 0; i < buffer_size_in_samples; ++i) {
+    if (buffer_ptr[i] >= value) return true;
+  }
+  return false;
+}
+
+bool FakeAudioCaptureModule::ShouldStartProcessing() {
+  return recording_ || playing_;
+}
+
+void FakeAudioCaptureModule::UpdateProcessing(bool start) {
+  if (start) {
+    if (!process_thread_) {
+      process_thread_.reset(new rtc::Thread());
+      process_thread_->Start();
+    }
+    process_thread_->Post(this, MSG_START_PROCESS);
+  } else {
+    if (process_thread_) {
+      process_thread_->Stop();
+      process_thread_.reset(nullptr);
+    }
+    started_ = false;
+  }
+}
+
+void FakeAudioCaptureModule::StartProcessP() {
+  ASSERT(process_thread_->IsCurrent());
+  if (started_) {
+    // Already started.
+    return;
+  }
+  ProcessFrameP();
+}
+
+void FakeAudioCaptureModule::ProcessFrameP() {
+  ASSERT(process_thread_->IsCurrent());
+  if (!started_) {
+    next_frame_time_ = rtc::Time();
+    started_ = true;
+  }
+
+  {
+    rtc::CritScope cs(&crit_);
+    // Receive and send frames every kTimePerFrameMs.
+    if (playing_) {
+      ReceiveFrameP();
+    }
+    if (recording_) {
+      SendFrameP();
+    }
+  }
+
+  next_frame_time_ += kTimePerFrameMs;
+  const uint32_t current_time = rtc::Time();
+  const uint32_t wait_time =
+      (next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0;
+  process_thread_->PostDelayed(wait_time, this, MSG_RUN_PROCESS);
+}
+
+void FakeAudioCaptureModule::ReceiveFrameP() {
+  ASSERT(process_thread_->IsCurrent());
+  {
+    rtc::CritScope cs(&crit_callback_);
+    if (!audio_callback_) {
+      return;
+    }
+    ResetRecBuffer();
+    size_t nSamplesOut = 0;
+    int64_t elapsed_time_ms = 0;
+    int64_t ntp_time_ms = 0;
+    if (audio_callback_->NeedMorePlayData(kNumberSamples, kNumberBytesPerSample,
+                                         kNumberOfChannels, kSamplesPerSecond,
+                                         rec_buffer_, nSamplesOut,
+                                         &elapsed_time_ms, &ntp_time_ms) != 0) {
+      ASSERT(false);
+    }
+    ASSERT(nSamplesOut == kNumberSamples);
+  }
+  // The SetBuffer() function ensures that after decoding, the audio buffer
+  // should contain samples of similar magnitude (there is likely to be some
+  // distortion due to the audio pipeline). If one sample is detected to
+  // have the same or greater magnitude somewhere in the frame, an actual frame
+  // has been received from the remote side (i.e. faked frames are not being
+  // pulled).
+  if (CheckRecBuffer(kHighSampleValue)) {
+    rtc::CritScope cs(&crit_);
+    ++frames_received_;
+  }
+}
+
+void FakeAudioCaptureModule::SendFrameP() {
+  ASSERT(process_thread_->IsCurrent());
+  rtc::CritScope cs(&crit_callback_);
+  if (!audio_callback_) {
+    return;
+  }
+  bool key_pressed = false;
+  uint32_t current_mic_level = 0;
+  MicrophoneVolume(&current_mic_level);
+  if (audio_callback_->RecordedDataIsAvailable(send_buffer_, kNumberSamples,
+                                              kNumberBytesPerSample,
+                                              kNumberOfChannels,
+                                              kSamplesPerSecond, kTotalDelayMs,
+                                              kClockDriftMs, current_mic_level,
+                                              key_pressed,
+                                              current_mic_level) != 0) {
+    ASSERT(false);
+  }
+  SetMicrophoneVolume(current_mic_level);
+}
+
diff --git a/webrtc/api/test/fakeaudiocapturemodule.h b/webrtc/api/test/fakeaudiocapturemodule.h
new file mode 100644
index 0000000..315c251
--- /dev/null
+++ b/webrtc/api/test/fakeaudiocapturemodule.h
@@ -0,0 +1,287 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This class implements an AudioCaptureModule that can be used to detect if
+// audio is being received properly if it is fed by another AudioCaptureModule
+// in some arbitrary audio pipeline where they are connected. It does not play
+// out or record any audio so it does not need access to any hardware and can
+// therefore be used in the gtest testing framework.
+
+// Note P postfix of a function indicates that it should only be called by the
+// processing thread.
+
+#ifndef WEBRTC_API_TEST_FAKEAUDIOCAPTUREMODULE_H_
+#define WEBRTC_API_TEST_FAKEAUDIOCAPTUREMODULE_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_device/include/audio_device.h"
+
+namespace rtc {
+class Thread;
+}  // namespace rtc
+
+class FakeAudioCaptureModule
+    : public webrtc::AudioDeviceModule,
+      public rtc::MessageHandler {
+ public:
+  typedef uint16_t Sample;
+
+  // The value for the following constants have been derived by running VoE
+  // using a real ADM. The constants correspond to 10ms of mono audio at 44kHz.
+  static const size_t kNumberSamples = 440;
+  static const size_t kNumberBytesPerSample = sizeof(Sample);
+
+  // Creates a FakeAudioCaptureModule or returns NULL on failure.
+  static rtc::scoped_refptr<FakeAudioCaptureModule> Create();
+
+  // Returns the number of frames that have been successfully pulled by the
+  // instance. Note that correctly detecting success can only be done if the
+  // pulled frame was generated/pushed from a FakeAudioCaptureModule.
+  int frames_received() const;
+
+  // Following functions are inherited from webrtc::AudioDeviceModule.
+  // Only functions called by PeerConnection are implemented, the rest do
+  // nothing and return success. If a function is not expected to be called by
+  // PeerConnection an assertion is triggered if it is in fact called.
+  int64_t TimeUntilNextProcess() override;
+  int32_t Process() override;
+
+  int32_t ActiveAudioLayer(AudioLayer* audio_layer) const override;
+
+  ErrorCode LastError() const override;
+  int32_t RegisterEventObserver(
+      webrtc::AudioDeviceObserver* event_callback) override;
+
+  // Note: Calling this method from a callback may result in deadlock.
+  int32_t RegisterAudioCallback(
+      webrtc::AudioTransport* audio_callback) override;
+
+  int32_t Init() override;
+  int32_t Terminate() override;
+  bool Initialized() const override;
+
+  int16_t PlayoutDevices() override;
+  int16_t RecordingDevices() override;
+  int32_t PlayoutDeviceName(uint16_t index,
+                            char name[webrtc::kAdmMaxDeviceNameSize],
+                            char guid[webrtc::kAdmMaxGuidSize]) override;
+  int32_t RecordingDeviceName(uint16_t index,
+                              char name[webrtc::kAdmMaxDeviceNameSize],
+                              char guid[webrtc::kAdmMaxGuidSize]) override;
+
+  int32_t SetPlayoutDevice(uint16_t index) override;
+  int32_t SetPlayoutDevice(WindowsDeviceType device) override;
+  int32_t SetRecordingDevice(uint16_t index) override;
+  int32_t SetRecordingDevice(WindowsDeviceType device) override;
+
+  int32_t PlayoutIsAvailable(bool* available) override;
+  int32_t InitPlayout() override;
+  bool PlayoutIsInitialized() const override;
+  int32_t RecordingIsAvailable(bool* available) override;
+  int32_t InitRecording() override;
+  bool RecordingIsInitialized() const override;
+
+  int32_t StartPlayout() override;
+  int32_t StopPlayout() override;
+  bool Playing() const override;
+  int32_t StartRecording() override;
+  int32_t StopRecording() override;
+  bool Recording() const override;
+
+  int32_t SetAGC(bool enable) override;
+  bool AGC() const override;
+
+  int32_t SetWaveOutVolume(uint16_t volume_left,
+                           uint16_t volume_right) override;
+  int32_t WaveOutVolume(uint16_t* volume_left,
+                        uint16_t* volume_right) const override;
+
+  int32_t InitSpeaker() override;
+  bool SpeakerIsInitialized() const override;
+  int32_t InitMicrophone() override;
+  bool MicrophoneIsInitialized() const override;
+
+  int32_t SpeakerVolumeIsAvailable(bool* available) override;
+  int32_t SetSpeakerVolume(uint32_t volume) override;
+  int32_t SpeakerVolume(uint32_t* volume) const override;
+  int32_t MaxSpeakerVolume(uint32_t* max_volume) const override;
+  int32_t MinSpeakerVolume(uint32_t* min_volume) const override;
+  int32_t SpeakerVolumeStepSize(uint16_t* step_size) const override;
+
+  int32_t MicrophoneVolumeIsAvailable(bool* available) override;
+  int32_t SetMicrophoneVolume(uint32_t volume) override;
+  int32_t MicrophoneVolume(uint32_t* volume) const override;
+  int32_t MaxMicrophoneVolume(uint32_t* max_volume) const override;
+
+  int32_t MinMicrophoneVolume(uint32_t* min_volume) const override;
+  int32_t MicrophoneVolumeStepSize(uint16_t* step_size) const override;
+
+  int32_t SpeakerMuteIsAvailable(bool* available) override;
+  int32_t SetSpeakerMute(bool enable) override;
+  int32_t SpeakerMute(bool* enabled) const override;
+
+  int32_t MicrophoneMuteIsAvailable(bool* available) override;
+  int32_t SetMicrophoneMute(bool enable) override;
+  int32_t MicrophoneMute(bool* enabled) const override;
+
+  int32_t MicrophoneBoostIsAvailable(bool* available) override;
+  int32_t SetMicrophoneBoost(bool enable) override;
+  int32_t MicrophoneBoost(bool* enabled) const override;
+
+  int32_t StereoPlayoutIsAvailable(bool* available) const override;
+  int32_t SetStereoPlayout(bool enable) override;
+  int32_t StereoPlayout(bool* enabled) const override;
+  int32_t StereoRecordingIsAvailable(bool* available) const override;
+  int32_t SetStereoRecording(bool enable) override;
+  int32_t StereoRecording(bool* enabled) const override;
+  int32_t SetRecordingChannel(const ChannelType channel) override;
+  int32_t RecordingChannel(ChannelType* channel) const override;
+
+  int32_t SetPlayoutBuffer(const BufferType type,
+                           uint16_t size_ms = 0) override;
+  int32_t PlayoutBuffer(BufferType* type, uint16_t* size_ms) const override;
+  int32_t PlayoutDelay(uint16_t* delay_ms) const override;
+  int32_t RecordingDelay(uint16_t* delay_ms) const override;
+
+  int32_t CPULoad(uint16_t* load) const override;
+
+  int32_t StartRawOutputFileRecording(
+      const char pcm_file_name_utf8[webrtc::kAdmMaxFileNameSize]) override;
+  int32_t StopRawOutputFileRecording() override;
+  int32_t StartRawInputFileRecording(
+      const char pcm_file_name_utf8[webrtc::kAdmMaxFileNameSize]) override;
+  int32_t StopRawInputFileRecording() override;
+
+  int32_t SetRecordingSampleRate(const uint32_t samples_per_sec) override;
+  int32_t RecordingSampleRate(uint32_t* samples_per_sec) const override;
+  int32_t SetPlayoutSampleRate(const uint32_t samples_per_sec) override;
+  int32_t PlayoutSampleRate(uint32_t* samples_per_sec) const override;
+
+  int32_t ResetAudioDevice() override;
+  int32_t SetLoudspeakerStatus(bool enable) override;
+  int32_t GetLoudspeakerStatus(bool* enabled) const override;
+  virtual bool BuiltInAECIsAvailable() const { return false; }
+  virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
+  virtual bool BuiltInAGCIsAvailable() const { return false; }
+  virtual int32_t EnableBuiltInAGC(bool enable) { return -1; }
+  virtual bool BuiltInNSIsAvailable() const { return false; }
+  virtual int32_t EnableBuiltInNS(bool enable) { return -1; }
+  // End of functions inherited from webrtc::AudioDeviceModule.
+
+  // The following function is inherited from rtc::MessageHandler.
+  void OnMessage(rtc::Message* msg) override;
+
+ protected:
+  // The constructor is protected because the class needs to be created as a
+  // reference counted object (for memory managment reasons). It could be
+  // exposed in which case the burden of proper instantiation would be put on
+  // the creator of a FakeAudioCaptureModule instance. To create an instance of
+  // this class use the Create(..) API.
+  explicit FakeAudioCaptureModule();
+  // The destructor is protected because it is reference counted and should not
+  // be deleted directly.
+  virtual ~FakeAudioCaptureModule();
+
+ private:
+  // Initializes the state of the FakeAudioCaptureModule. This API is called on
+  // creation by the Create() API.
+  bool Initialize();
+  // SetBuffer() sets all samples in send_buffer_ to |value|.
+  void SetSendBuffer(int value);
+  // Resets rec_buffer_. I.e., sets all rec_buffer_ samples to 0.
+  void ResetRecBuffer();
+  // Returns true if rec_buffer_ contains one or more sample greater than or
+  // equal to |value|.
+  bool CheckRecBuffer(int value);
+
+  // Returns true/false depending on if recording or playback has been
+  // enabled/started.
+  bool ShouldStartProcessing();
+
+  // Starts or stops the pushing and pulling of audio frames.
+  void UpdateProcessing(bool start);
+
+  // Starts the periodic calling of ProcessFrame() in a thread safe way.
+  void StartProcessP();
+  // Periodcally called function that ensures that frames are pulled and pushed
+  // periodically if enabled/started.
+  void ProcessFrameP();
+  // Pulls frames from the registered webrtc::AudioTransport.
+  void ReceiveFrameP();
+  // Pushes frames to the registered webrtc::AudioTransport.
+  void SendFrameP();
+
+  // The time in milliseconds when Process() was last called or 0 if no call
+  // has been made.
+  uint32_t last_process_time_ms_;
+
+  // Callback for playout and recording.
+  webrtc::AudioTransport* audio_callback_;
+
+  bool recording_; // True when audio is being pushed from the instance.
+  bool playing_; // True when audio is being pulled by the instance.
+
+  bool play_is_initialized_; // True when the instance is ready to pull audio.
+  bool rec_is_initialized_; // True when the instance is ready to push audio.
+
+  // Input to and output from RecordedDataIsAvailable(..) makes it possible to
+  // modify the current mic level. The implementation does not care about the
+  // mic level so it just feeds back what it receives.
+  uint32_t current_mic_level_;
+
+  // next_frame_time_ is updated in a non-drifting manner to indicate the next
+  // wall clock time the next frame should be generated and received. started_
+  // ensures that next_frame_time_ can be initialized properly on first call.
+  bool started_;
+  uint32_t next_frame_time_;
+
+  rtc::scoped_ptr<rtc::Thread> process_thread_;
+
+  // Buffer for storing samples received from the webrtc::AudioTransport.
+  char rec_buffer_[kNumberSamples * kNumberBytesPerSample];
+  // Buffer for samples to send to the webrtc::AudioTransport.
+  char send_buffer_[kNumberSamples * kNumberBytesPerSample];
+
+  // Counter of frames received that have samples of high enough amplitude to
+  // indicate that the frames are not faked somewhere in the audio pipeline
+  // (e.g. by a jitter buffer).
+  int frames_received_;
+
+  // Protects variables that are accessed from process_thread_ and
+  // the main thread.
+  rtc::CriticalSection crit_;
+  // Protects |audio_callback_| that is accessed from process_thread_ and
+  // the main thread.
+  rtc::CriticalSection crit_callback_;
+};
+
+#endif  // WEBRTC_API_TEST_FAKEAUDIOCAPTUREMODULE_H_
diff --git a/webrtc/api/test/fakeaudiocapturemodule_unittest.cc b/webrtc/api/test/fakeaudiocapturemodule_unittest.cc
new file mode 100644
index 0000000..b95d2d7
--- /dev/null
+++ b/webrtc/api/test/fakeaudiocapturemodule_unittest.cc
@@ -0,0 +1,216 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/test/fakeaudiocapturemodule.h"
+
+#include <algorithm>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+
+using std::min;
+
+class FakeAdmTest : public testing::Test,
+                    public webrtc::AudioTransport {
+ protected:
+  static const int kMsInSecond = 1000;
+
+  FakeAdmTest()
+      : push_iterations_(0),
+        pull_iterations_(0),
+        rec_buffer_bytes_(0) {
+    memset(rec_buffer_, 0, sizeof(rec_buffer_));
+  }
+
+  virtual void SetUp() {
+    fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+    EXPECT_TRUE(fake_audio_capture_module_.get() != NULL);
+  }
+
+  // Callbacks inherited from webrtc::AudioTransport.
+  // ADM is pushing data.
+  int32_t RecordedDataIsAvailable(const void* audioSamples,
+                                  const size_t nSamples,
+                                  const size_t nBytesPerSample,
+                                  const size_t nChannels,
+                                  const uint32_t samplesPerSec,
+                                  const uint32_t totalDelayMS,
+                                  const int32_t clockDrift,
+                                  const uint32_t currentMicLevel,
+                                  const bool keyPressed,
+                                  uint32_t& newMicLevel) override {
+    rtc::CritScope cs(&crit_);
+    rec_buffer_bytes_ = nSamples * nBytesPerSample;
+    if ((rec_buffer_bytes_ == 0) ||
+        (rec_buffer_bytes_ > FakeAudioCaptureModule::kNumberSamples *
+         FakeAudioCaptureModule::kNumberBytesPerSample)) {
+      ADD_FAILURE();
+      return -1;
+    }
+    memcpy(rec_buffer_, audioSamples, rec_buffer_bytes_);
+    ++push_iterations_;
+    newMicLevel = currentMicLevel;
+    return 0;
+  }
+
+  // ADM is pulling data.
+  int32_t NeedMorePlayData(const size_t nSamples,
+                           const size_t nBytesPerSample,
+                           const size_t nChannels,
+                           const uint32_t samplesPerSec,
+                           void* audioSamples,
+                           size_t& nSamplesOut,
+                           int64_t* elapsed_time_ms,
+                           int64_t* ntp_time_ms) override {
+    rtc::CritScope cs(&crit_);
+    ++pull_iterations_;
+    const size_t audio_buffer_size = nSamples * nBytesPerSample;
+    const size_t bytes_out = RecordedDataReceived() ?
+        CopyFromRecBuffer(audioSamples, audio_buffer_size):
+        GenerateZeroBuffer(audioSamples, audio_buffer_size);
+    nSamplesOut = bytes_out / nBytesPerSample;
+    *elapsed_time_ms = 0;
+    *ntp_time_ms = 0;
+    return 0;
+  }
+
+  int push_iterations() const {
+    rtc::CritScope cs(&crit_);
+    return push_iterations_;
+  }
+  int pull_iterations() const {
+    rtc::CritScope cs(&crit_);
+    return pull_iterations_;
+  }
+
+  rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+
+ private:
+  bool RecordedDataReceived() const {
+    return rec_buffer_bytes_ != 0;
+  }
+  size_t GenerateZeroBuffer(void* audio_buffer, size_t audio_buffer_size) {
+    memset(audio_buffer, 0, audio_buffer_size);
+    return audio_buffer_size;
+  }
+  size_t CopyFromRecBuffer(void* audio_buffer, size_t audio_buffer_size) {
+    EXPECT_EQ(audio_buffer_size, rec_buffer_bytes_);
+    const size_t min_buffer_size = min(audio_buffer_size, rec_buffer_bytes_);
+    memcpy(audio_buffer, rec_buffer_, min_buffer_size);
+    return min_buffer_size;
+  }
+
+  rtc::CriticalSection crit_;
+
+  int push_iterations_;
+  int pull_iterations_;
+
+  char rec_buffer_[FakeAudioCaptureModule::kNumberSamples *
+                   FakeAudioCaptureModule::kNumberBytesPerSample];
+  size_t rec_buffer_bytes_;
+};
+
+TEST_F(FakeAdmTest, TestProccess) {
+  // Next process call must be some time in the future (or now).
+  EXPECT_LE(0, fake_audio_capture_module_->TimeUntilNextProcess());
+  // Process call updates TimeUntilNextProcess() but there are no guarantees on
+  // timing so just check that Process can ba called successfully.
+  EXPECT_LE(0, fake_audio_capture_module_->Process());
+}
+
+TEST_F(FakeAdmTest, PlayoutTest) {
+  EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+  bool stereo_available = false;
+  EXPECT_EQ(0,
+            fake_audio_capture_module_->StereoPlayoutIsAvailable(
+                &stereo_available));
+  EXPECT_TRUE(stereo_available);
+
+  EXPECT_NE(0, fake_audio_capture_module_->StartPlayout());
+  EXPECT_FALSE(fake_audio_capture_module_->PlayoutIsInitialized());
+  EXPECT_FALSE(fake_audio_capture_module_->Playing());
+  EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+
+  EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+  EXPECT_TRUE(fake_audio_capture_module_->PlayoutIsInitialized());
+  EXPECT_FALSE(fake_audio_capture_module_->Playing());
+
+  EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+  EXPECT_TRUE(fake_audio_capture_module_->Playing());
+
+  uint16_t delay_ms = 10;
+  EXPECT_EQ(0, fake_audio_capture_module_->PlayoutDelay(&delay_ms));
+  EXPECT_EQ(0, delay_ms);
+
+  EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+  EXPECT_GE(0, push_iterations());
+
+  EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+  EXPECT_FALSE(fake_audio_capture_module_->Playing());
+}
+
+TEST_F(FakeAdmTest, RecordTest) {
+  EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+  bool stereo_available = false;
+  EXPECT_EQ(0, fake_audio_capture_module_->StereoRecordingIsAvailable(
+      &stereo_available));
+  EXPECT_FALSE(stereo_available);
+
+  EXPECT_NE(0, fake_audio_capture_module_->StartRecording());
+  EXPECT_FALSE(fake_audio_capture_module_->Recording());
+  EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+
+  EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+  EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+  EXPECT_TRUE(fake_audio_capture_module_->Recording());
+
+  EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+  EXPECT_GE(0, pull_iterations());
+
+  EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+  EXPECT_FALSE(fake_audio_capture_module_->Recording());
+}
+
+TEST_F(FakeAdmTest, DuplexTest) {
+  EXPECT_EQ(0, fake_audio_capture_module_->RegisterAudioCallback(this));
+
+  EXPECT_EQ(0, fake_audio_capture_module_->InitPlayout());
+  EXPECT_EQ(0, fake_audio_capture_module_->StartPlayout());
+
+  EXPECT_EQ(0, fake_audio_capture_module_->InitRecording());
+  EXPECT_EQ(0, fake_audio_capture_module_->StartRecording());
+
+  EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond);
+  EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond);
+
+  EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout());
+  EXPECT_EQ(0, fake_audio_capture_module_->StopRecording());
+}
diff --git a/webrtc/api/test/fakeconstraints.h b/webrtc/api/test/fakeconstraints.h
new file mode 100644
index 0000000..155e5ea
--- /dev/null
+++ b/webrtc/api/test/fakeconstraints.h
@@ -0,0 +1,133 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_TEST_FAKECONSTRAINTS_H_
+#define WEBRTC_API_TEST_FAKECONSTRAINTS_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/base/stringencode.h"
+
+namespace webrtc {
+
+class FakeConstraints : public webrtc::MediaConstraintsInterface {
+ public:
+  FakeConstraints() { }
+  virtual ~FakeConstraints() { }
+
+  virtual const Constraints& GetMandatory() const {
+    return mandatory_;
+  }
+
+  virtual const Constraints& GetOptional() const {
+    return optional_;
+  }
+
+  template <class T>
+  void AddMandatory(const std::string& key, const T& value) {
+    mandatory_.push_back(Constraint(key, rtc::ToString<T>(value)));
+  }
+
+  template <class T>
+  void SetMandatory(const std::string& key, const T& value) {
+    std::string value_str;
+    if (mandatory_.FindFirst(key, &value_str)) {
+      for (Constraints::iterator iter = mandatory_.begin();
+           iter != mandatory_.end(); ++iter) {
+        if (iter->key == key) {
+          mandatory_.erase(iter);
+          break;
+        }
+      }
+    }
+    mandatory_.push_back(Constraint(key, rtc::ToString<T>(value)));
+  }
+
+  template <class T>
+  void AddOptional(const std::string& key, const T& value) {
+    optional_.push_back(Constraint(key, rtc::ToString<T>(value)));
+  }
+
+  void SetMandatoryMinAspectRatio(double ratio) {
+    SetMandatory(MediaConstraintsInterface::kMinAspectRatio, ratio);
+  }
+
+  void SetMandatoryMinWidth(int width) {
+    SetMandatory(MediaConstraintsInterface::kMinWidth, width);
+  }
+
+  void SetMandatoryMinHeight(int height) {
+    SetMandatory(MediaConstraintsInterface::kMinHeight, height);
+  }
+
+  void SetOptionalMaxWidth(int width) {
+    AddOptional(MediaConstraintsInterface::kMaxWidth, width);
+  }
+
+  void SetMandatoryMaxFrameRate(int frame_rate) {
+    SetMandatory(MediaConstraintsInterface::kMaxFrameRate, frame_rate);
+  }
+
+  void SetMandatoryReceiveAudio(bool enable) {
+    SetMandatory(MediaConstraintsInterface::kOfferToReceiveAudio, enable);
+  }
+
+  void SetMandatoryReceiveVideo(bool enable) {
+    SetMandatory(MediaConstraintsInterface::kOfferToReceiveVideo, enable);
+  }
+
+  void SetMandatoryUseRtpMux(bool enable) {
+    SetMandatory(MediaConstraintsInterface::kUseRtpMux, enable);
+  }
+
+  void SetMandatoryIceRestart(bool enable) {
+    SetMandatory(MediaConstraintsInterface::kIceRestart, enable);
+  }
+
+  void SetAllowRtpDataChannels() {
+    SetMandatory(MediaConstraintsInterface::kEnableRtpDataChannels, true);
+    SetMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, false);
+  }
+
+  void SetOptionalVAD(bool enable) {
+    AddOptional(MediaConstraintsInterface::kVoiceActivityDetection, enable);
+  }
+
+  void SetAllowDtlsSctpDataChannels() {
+    SetMandatory(MediaConstraintsInterface::kEnableDtlsSrtp, true);
+  }
+
+ private:
+  Constraints mandatory_;
+  Constraints optional_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_TEST_FAKECONSTRAINTS_H_
diff --git a/webrtc/api/test/fakedatachannelprovider.h b/webrtc/api/test/fakedatachannelprovider.h
new file mode 100644
index 0000000..32c2b52
--- /dev/null
+++ b/webrtc/api/test/fakedatachannelprovider.h
@@ -0,0 +1,161 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_TEST_FAKEDATACHANNELPROVIDER_H_
+#define WEBRTC_API_TEST_FAKEDATACHANNELPROVIDER_H_
+
+#include "webrtc/api/datachannel.h"
+
+class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
+ public:
+  FakeDataChannelProvider()
+      : send_blocked_(false),
+        transport_available_(false),
+        ready_to_send_(false),
+        transport_error_(false) {}
+  virtual ~FakeDataChannelProvider() {}
+
+  bool SendData(const cricket::SendDataParams& params,
+                const rtc::Buffer& payload,
+                cricket::SendDataResult* result) override {
+    ASSERT(ready_to_send_ && transport_available_);
+    if (send_blocked_) {
+      *result = cricket::SDR_BLOCK;
+      return false;
+    }
+
+    if (transport_error_ || payload.size() == 0) {
+      *result = cricket::SDR_ERROR;
+      return false;
+    }
+
+    last_send_data_params_ = params;
+    return true;
+  }
+
+  bool ConnectDataChannel(webrtc::DataChannel* data_channel) override {
+    ASSERT(connected_channels_.find(data_channel) == connected_channels_.end());
+    if (!transport_available_) {
+      return false;
+    }
+    LOG(LS_INFO) << "DataChannel connected " << data_channel;
+    connected_channels_.insert(data_channel);
+    return true;
+  }
+
+  void DisconnectDataChannel(webrtc::DataChannel* data_channel) override {
+    ASSERT(connected_channels_.find(data_channel) != connected_channels_.end());
+    LOG(LS_INFO) << "DataChannel disconnected " << data_channel;
+    connected_channels_.erase(data_channel);
+  }
+
+  void AddSctpDataStream(int sid) override {
+    ASSERT(sid >= 0);
+    if (!transport_available_) {
+      return;
+    }
+    send_ssrcs_.insert(sid);
+    recv_ssrcs_.insert(sid);
+  }
+
+  void RemoveSctpDataStream(int sid) override {
+    ASSERT(sid >= 0);
+    send_ssrcs_.erase(sid);
+    recv_ssrcs_.erase(sid);
+  }
+
+  bool ReadyToSendData() const override { return ready_to_send_; }
+
+  // Set true to emulate the SCTP stream being blocked by congestion control.
+  void set_send_blocked(bool blocked) {
+    send_blocked_ = blocked;
+    if (!blocked) {
+      // Take a snapshot of the connected channels and check to see whether
+      // each value is still in connected_channels_ before calling
+      // OnChannelReady().  This avoids problems where the set gets modified
+      // in response to OnChannelReady().
+      for (webrtc::DataChannel *ch : std::set<webrtc::DataChannel*>(
+               connected_channels_.begin(), connected_channels_.end())) {
+        if (connected_channels_.count(ch)) {
+          ch->OnChannelReady(true);
+        }
+      }
+    }
+  }
+
+  // Set true to emulate the transport channel creation, e.g. after
+  // setLocalDescription/setRemoteDescription called with data content.
+  void set_transport_available(bool available) {
+    transport_available_ = available;
+  }
+
+  // Set true to emulate the transport ReadyToSendData signal when the transport
+  // becomes writable for the first time.
+  void set_ready_to_send(bool ready) {
+    ASSERT(transport_available_);
+    ready_to_send_ = ready;
+    if (ready) {
+      std::set<webrtc::DataChannel*>::iterator it;
+      for (it = connected_channels_.begin();
+           it != connected_channels_.end();
+           ++it) {
+        (*it)->OnChannelReady(true);
+      }
+    }
+  }
+
+  void set_transport_error() {
+    transport_error_ = true;
+  }
+
+  cricket::SendDataParams last_send_data_params() const {
+    return last_send_data_params_;
+  }
+
+  bool IsConnected(webrtc::DataChannel* data_channel) const {
+    return connected_channels_.find(data_channel) != connected_channels_.end();
+  }
+
+  bool IsSendStreamAdded(uint32_t stream) const {
+    return send_ssrcs_.find(stream) != send_ssrcs_.end();
+  }
+
+  bool IsRecvStreamAdded(uint32_t stream) const {
+    return recv_ssrcs_.find(stream) != recv_ssrcs_.end();
+  }
+
+ private:
+  cricket::SendDataParams last_send_data_params_;
+  bool send_blocked_;
+  bool transport_available_;
+  bool ready_to_send_;
+  bool transport_error_;
+  std::set<webrtc::DataChannel*> connected_channels_;
+  std::set<uint32_t> send_ssrcs_;
+  std::set<uint32_t> recv_ssrcs_;
+};
+#endif  // WEBRTC_API_TEST_FAKEDATACHANNELPROVIDER_H_
diff --git a/webrtc/api/test/fakedtlsidentitystore.h b/webrtc/api/test/fakedtlsidentitystore.h
new file mode 100644
index 0000000..404e2ae
--- /dev/null
+++ b/webrtc/api/test/fakedtlsidentitystore.h
@@ -0,0 +1,184 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_TEST_FAKEDTLSIDENTITYSERVICE_H_
+#define WEBRTC_API_TEST_FAKEDTLSIDENTITYSERVICE_H_
+
+#include <string>
+#include <utility>
+
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/base/rtccertificate.h"
+
+static const struct {
+  const char* rsa_private_key_pem;
+  const char* cert_pem;
+} kKeysAndCerts[] = {
+    {"-----BEGIN RSA PRIVATE KEY-----\n"
+     "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+     "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+     "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+     "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+     "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+     "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+     "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+     "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+     "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+     "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+     "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+     "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+     "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+     "UCXiYxSsu20QNVw=\n"
+     "-----END RSA PRIVATE KEY-----\n",
+     "-----BEGIN CERTIFICATE-----\n"
+     "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+     "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+     "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+     "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+     "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+     "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+     "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+     "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+     "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+     "-----END CERTIFICATE-----\n"},
+    {"-----BEGIN RSA PRIVATE KEY-----\n"
+     "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n"
+     "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n"
+     "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n"
+     "AoGARni9eY8/hv+SX+I+05EdXt6MQXNUbQ+cSykBNCfVccLzIFEWUQMT2IHqwl6X\n"
+     "ShIXcq7/n1QzOAEiuzixauM3YHg4xZ1Um2Ha9a7ig5Xg4v6b43bmMkNE6LkoAtYs\n"
+     "qnQdfMh442b1liDud6IMb1Qk0amt3fSrgRMc547TZQVx4QECQQDxUeDm94r3p4ng\n"
+     "5rCLLC1K5/6HSTZsh7jatKPlz7GfP/IZlYV7iE5784/n0wRiCjZOS7hQRy/8m2Gp\n"
+     "pf4aZq+DAkEA6+np4d36FYikydvUrupLT3FkdRHGn/v83qOll/VmeNh+L1xMZlIP\n"
+     "tM26hAXCcQb7O5+J9y3cx2CAQsBS11ZXZQJAfGgTo76WG9p5UEJdXUInD2jOZPwv\n"
+     "XIATolxh6kXKcijLLLlSmT7KB0inNYIpzkkpee+7U1d/u6B3FriGaSHq9QJBAM/J\n"
+     "ICnDdLCgwNvWVraVQC3BpwSB2pswvCFwq7py94V60XFvbw80Ogc6qIv98qvQxVlX\n"
+     "hJIEgA/PjEi+0ng94Q0CQQDm8XSDby35gmjO+6eRmJtAjtB7nguLvrPXM6CPXRmD\n"
+     "sRoBocpHw6j9UdzZ6qYG0FkdXZghezXFY58ro2BYYRR3\n"
+     "-----END RSA PRIVATE KEY-----\n",
+     "-----BEGIN CERTIFICATE-----\n"
+     "MIICWDCCAcGgAwIBAgIJALgDjxMbBOhbMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"
+     "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"
+     "aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTEzMjIzMjEzWhcNMTYxMTEyMjIzMjEzWjBF\n"
+     "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"
+     "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n"
+     "gQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgjBl8CPZMvDh9E\n"
+     "wB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQsOR/qPvviJx5\n"
+     "I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQABo1AwTjAdBgNV\n"
+     "HQ4EFgQUx2tbJdlcSTCepn09UdYORXKuSTAwHwYDVR0jBBgwFoAUx2tbJdlcSTCe\n"
+     "pn09UdYORXKuSTAwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQAmp9Id\n"
+     "E716gHMqeBG4S2FCgVFCr0a0ugkaneQAN/c2L9CbMemEN9W6jvucUIVOtYd90dDW\n"
+     "lXuowWmT/JctPe3D2qt4yvYW3puECHk2tVQmrJOZiZiTRtWm6HxkmoUYHYp/DtaS\n"
+     "1Xe29gSTnZtI5sQCrGMzk3SGRSSs7ejLKiVDBQ==\n"
+     "-----END CERTIFICATE-----\n"}};
+
+class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
+                              public rtc::MessageHandler {
+ public:
+  typedef rtc::TypedMessageData<rtc::scoped_refptr<
+      webrtc::DtlsIdentityRequestObserver> > MessageData;
+
+  FakeDtlsIdentityStore() : should_fail_(false) {}
+
+  void set_should_fail(bool should_fail) {
+    should_fail_ = should_fail;
+  }
+
+  void use_original_key() { key_index_ = 0; }
+  void use_alternate_key() { key_index_ = 1; }
+
+  void RequestIdentity(
+      rtc::KeyType key_type,
+      const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>&
+          observer) override {
+    // TODO(hbos): Should be able to generate KT_ECDSA too.
+    RTC_DCHECK(key_type == rtc::KT_RSA || should_fail_);
+    MessageData* msg = new MessageData(
+        rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>(observer));
+    rtc::Thread::Current()->Post(
+        this, should_fail_ ? MSG_FAILURE : MSG_SUCCESS, msg);
+  }
+
+  static rtc::scoped_refptr<rtc::RTCCertificate> GenerateCertificate() {
+    std::string cert;
+    std::string key;
+    rtc::SSLIdentity::PemToDer("CERTIFICATE", kKeysAndCerts[0].cert_pem, &cert);
+    rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY",
+                               kKeysAndCerts[0].rsa_private_key_pem, &key);
+
+    std::string pem_cert = rtc::SSLIdentity::DerToPem(
+        rtc::kPemTypeCertificate,
+        reinterpret_cast<const unsigned char*>(cert.data()),
+        cert.length());
+    std::string pem_key = rtc::SSLIdentity::DerToPem(
+        rtc::kPemTypeRsaPrivateKey,
+        reinterpret_cast<const unsigned char*>(key.data()),
+        key.length());
+    rtc::scoped_ptr<rtc::SSLIdentity> identity(
+        rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
+
+    return rtc::RTCCertificate::Create(std::move(identity));
+  }
+
+ private:
+  enum {
+    MSG_SUCCESS,
+    MSG_FAILURE,
+  };
+
+  const char* get_key() {
+    return kKeysAndCerts[key_index_].rsa_private_key_pem;
+  }
+  const char* get_cert() { return kKeysAndCerts[key_index_].cert_pem; }
+
+  // rtc::MessageHandler implementation.
+  void OnMessage(rtc::Message* msg) {
+    MessageData* message_data = static_cast<MessageData*>(msg->pdata);
+    rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver> observer =
+        message_data->data();
+    switch (msg->message_id) {
+      case MSG_SUCCESS: {
+        std::string cert;
+        std::string key;
+        rtc::SSLIdentity::PemToDer("CERTIFICATE", get_cert(), &cert);
+        rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", get_key(), &key);
+        observer->OnSuccess(cert, key);
+        break;
+      }
+      case MSG_FAILURE:
+        observer->OnFailure(0);
+        break;
+    }
+    delete message_data;
+  }
+
+  bool should_fail_;
+  int key_index_ = 0;
+};
+
+#endif  // WEBRTC_API_TEST_FAKEDTLSIDENTITYSERVICE_H_
diff --git a/webrtc/api/test/fakeperiodicvideocapturer.h b/webrtc/api/test/fakeperiodicvideocapturer.h
new file mode 100644
index 0000000..2ce648e
--- /dev/null
+++ b/webrtc/api/test/fakeperiodicvideocapturer.h
@@ -0,0 +1,89 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// FakePeriodicVideoCapturer implements a fake cricket::VideoCapturer that
+// creates video frames periodically after it has been started.
+
+#ifndef WEBRTC_API_TEST_FAKEPERIODICVIDEOCAPTURER_H_
+#define WEBRTC_API_TEST_FAKEPERIODICVIDEOCAPTURER_H_
+
+#include "webrtc/base/thread.h"
+#include "webrtc/media/base/fakevideocapturer.h"
+
+namespace webrtc {
+
+class FakePeriodicVideoCapturer : public cricket::FakeVideoCapturer {
+ public:
+  FakePeriodicVideoCapturer() {
+    std::vector<cricket::VideoFormat> formats;
+    formats.push_back(cricket::VideoFormat(1280, 720,
+            cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(640, 480,
+        cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(640, 360,
+            cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(320, 240,
+        cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(160, 120,
+        cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    ResetSupportedFormats(formats);
+  };
+
+  virtual cricket::CaptureState Start(const cricket::VideoFormat& format) {
+    cricket::CaptureState state = FakeVideoCapturer::Start(format);
+    if (state != cricket::CS_FAILED) {
+      rtc::Thread::Current()->Post(this, MSG_CREATEFRAME);
+    }
+    return state;
+  }
+  virtual void Stop() {
+    rtc::Thread::Current()->Clear(this);
+  }
+  // Inherited from MesageHandler.
+  virtual void OnMessage(rtc::Message* msg) {
+    if (msg->message_id == MSG_CREATEFRAME) {
+      if (IsRunning()) {
+        CaptureFrame();
+        rtc::Thread::Current()->PostDelayed(static_cast<int>(
+            GetCaptureFormat()->interval / rtc::kNumNanosecsPerMillisec),
+            this, MSG_CREATEFRAME);
+        }
+    } else {
+      FakeVideoCapturer::OnMessage(msg);
+    }
+  }
+
+ private:
+  enum {
+    // Offset  0xFF to make sure this don't collide with base class messages.
+    MSG_CREATEFRAME = 0xFF
+  };
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_API_TEST_FAKEPERIODICVIDEOCAPTURER_H_
diff --git a/webrtc/api/test/fakevideotrackrenderer.h b/webrtc/api/test/fakevideotrackrenderer.h
new file mode 100644
index 0000000..4a7477b
--- /dev/null
+++ b/webrtc/api/test/fakevideotrackrenderer.h
@@ -0,0 +1,71 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_TEST_FAKEVIDEOTRACKRENDERER_H_
+#define WEBRTC_API_TEST_FAKEVIDEOTRACKRENDERER_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/media/base/fakevideorenderer.h"
+
+namespace webrtc {
+
+class FakeVideoTrackRenderer : public VideoRendererInterface {
+ public:
+  FakeVideoTrackRenderer(VideoTrackInterface* video_track)
+      : video_track_(video_track), last_frame_(NULL) {
+    video_track_->AddRenderer(this);
+  }
+  ~FakeVideoTrackRenderer() {
+    video_track_->RemoveRenderer(this);
+  }
+
+  virtual void RenderFrame(const cricket::VideoFrame* video_frame) override {
+    last_frame_ = const_cast<cricket::VideoFrame*>(video_frame);
+    fake_renderer_.RenderFrame(video_frame);
+  }
+
+  int errors() const { return fake_renderer_.errors(); }
+  int width() const { return fake_renderer_.width(); }
+  int height() const { return fake_renderer_.height(); }
+  bool black_frame() const { return fake_renderer_.black_frame(); }
+
+  int num_rendered_frames() const {
+    return fake_renderer_.num_rendered_frames();
+  }
+  const cricket::VideoFrame* last_frame() const { return last_frame_; }
+
+ private:
+  cricket::FakeVideoRenderer fake_renderer_;
+  rtc::scoped_refptr<VideoTrackInterface> video_track_;
+
+  // Weak reference for frame pointer comparison only.
+  cricket::VideoFrame* last_frame_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_TEST_FAKEVIDEOTRACKRENDERER_H_
diff --git a/webrtc/api/test/mockpeerconnectionobservers.h b/webrtc/api/test/mockpeerconnectionobservers.h
new file mode 100644
index 0000000..bae8538
--- /dev/null
+++ b/webrtc/api/test/mockpeerconnectionobservers.h
@@ -0,0 +1,243 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains mock implementations of observers used in PeerConnection.
+
+#ifndef WEBRTC_API_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
+#define WEBRTC_API_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
+
+#include <string>
+
+#include "webrtc/api/datachannelinterface.h"
+
+namespace webrtc {
+
+class MockCreateSessionDescriptionObserver
+    : public webrtc::CreateSessionDescriptionObserver {
+ public:
+  MockCreateSessionDescriptionObserver()
+      : called_(false),
+        result_(false) {}
+  virtual ~MockCreateSessionDescriptionObserver() {}
+  virtual void OnSuccess(SessionDescriptionInterface* desc) {
+    called_ = true;
+    result_ = true;
+    desc_.reset(desc);
+  }
+  virtual void OnFailure(const std::string& error) {
+    called_ = true;
+    result_ = false;
+  }
+  bool called() const { return called_; }
+  bool result() const { return result_; }
+  SessionDescriptionInterface* release_desc() {
+    return desc_.release();
+  }
+
+ private:
+  bool called_;
+  bool result_;
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_;
+};
+
+class MockSetSessionDescriptionObserver
+    : public webrtc::SetSessionDescriptionObserver {
+ public:
+  MockSetSessionDescriptionObserver()
+      : called_(false),
+        result_(false) {}
+  virtual ~MockSetSessionDescriptionObserver() {}
+  virtual void OnSuccess() {
+    called_ = true;
+    result_ = true;
+  }
+  virtual void OnFailure(const std::string& error) {
+    called_ = true;
+    result_ = false;
+  }
+  bool called() const { return called_; }
+  bool result() const { return result_; }
+
+ private:
+  bool called_;
+  bool result_;
+};
+
+class MockDataChannelObserver : public webrtc::DataChannelObserver {
+ public:
+  explicit MockDataChannelObserver(webrtc::DataChannelInterface* channel)
+     : channel_(channel), received_message_count_(0) {
+    channel_->RegisterObserver(this);
+    state_ = channel_->state();
+  }
+  virtual ~MockDataChannelObserver() {
+    channel_->UnregisterObserver();
+  }
+
+  void OnBufferedAmountChange(uint64_t previous_amount) override {}
+
+  void OnStateChange() override { state_ = channel_->state(); }
+  void OnMessage(const DataBuffer& buffer) override {
+    last_message_.assign(buffer.data.data<char>(), buffer.data.size());
+    ++received_message_count_;
+  }
+
+  bool IsOpen() const { return state_ == DataChannelInterface::kOpen; }
+  const std::string& last_message() const { return last_message_; }
+  size_t received_message_count() const { return received_message_count_; }
+
+ private:
+  rtc::scoped_refptr<webrtc::DataChannelInterface> channel_;
+  DataChannelInterface::DataState state_;
+  std::string last_message_;
+  size_t received_message_count_;
+};
+
+class MockStatsObserver : public webrtc::StatsObserver {
+ public:
+  MockStatsObserver() : called_(false), stats_() {}
+  virtual ~MockStatsObserver() {}
+
+  virtual void OnComplete(const StatsReports& reports) {
+    ASSERT(!called_);
+    called_ = true;
+    stats_.Clear();
+    stats_.number_of_reports = reports.size();
+    for (const auto* r : reports) {
+      if (r->type() == StatsReport::kStatsReportTypeSsrc) {
+        stats_.timestamp = r->timestamp();
+        GetIntValue(r, StatsReport::kStatsValueNameAudioOutputLevel,
+            &stats_.audio_output_level);
+        GetIntValue(r, StatsReport::kStatsValueNameAudioInputLevel,
+            &stats_.audio_input_level);
+        GetIntValue(r, StatsReport::kStatsValueNameBytesReceived,
+            &stats_.bytes_received);
+        GetIntValue(r, StatsReport::kStatsValueNameBytesSent,
+            &stats_.bytes_sent);
+      } else if (r->type() == StatsReport::kStatsReportTypeBwe) {
+        stats_.timestamp = r->timestamp();
+        GetIntValue(r, StatsReport::kStatsValueNameAvailableReceiveBandwidth,
+            &stats_.available_receive_bandwidth);
+      } else if (r->type() == StatsReport::kStatsReportTypeComponent) {
+        stats_.timestamp = r->timestamp();
+        GetStringValue(r, StatsReport::kStatsValueNameDtlsCipher,
+            &stats_.dtls_cipher);
+        GetStringValue(r, StatsReport::kStatsValueNameSrtpCipher,
+            &stats_.srtp_cipher);
+      }
+    }
+  }
+
+  bool called() const { return called_; }
+  size_t number_of_reports() const { return stats_.number_of_reports; }
+  double timestamp() const { return stats_.timestamp; }
+
+  int AudioOutputLevel() const {
+    ASSERT(called_);
+    return stats_.audio_output_level;
+  }
+
+  int AudioInputLevel() const {
+    ASSERT(called_);
+    return stats_.audio_input_level;
+  }
+
+  int BytesReceived() const {
+    ASSERT(called_);
+    return stats_.bytes_received;
+  }
+
+  int BytesSent() const {
+    ASSERT(called_);
+    return stats_.bytes_sent;
+  }
+
+  int AvailableReceiveBandwidth() const {
+    ASSERT(called_);
+    return stats_.available_receive_bandwidth;
+  }
+
+  std::string DtlsCipher() const {
+    ASSERT(called_);
+    return stats_.dtls_cipher;
+  }
+
+  std::string SrtpCipher() const {
+    ASSERT(called_);
+    return stats_.srtp_cipher;
+  }
+
+ private:
+  bool GetIntValue(const StatsReport* report,
+                   StatsReport::StatsValueName name,
+                   int* value) {
+    const StatsReport::Value* v = report->FindValue(name);
+    if (v) {
+      // TODO(tommi): We should really just be using an int here :-/
+      *value = rtc::FromString<int>(v->ToString());
+    }
+    return v != nullptr;
+  }
+
+  bool GetStringValue(const StatsReport* report,
+                      StatsReport::StatsValueName name,
+                      std::string* value) {
+    const StatsReport::Value* v = report->FindValue(name);
+    if (v)
+      *value = v->ToString();
+    return v != nullptr;
+  }
+
+  bool called_;
+  struct {
+    void Clear() {
+      number_of_reports = 0;
+      timestamp = 0;
+      audio_output_level = 0;
+      audio_input_level = 0;
+      bytes_received = 0;
+      bytes_sent = 0;
+      available_receive_bandwidth = 0;
+      dtls_cipher.clear();
+      srtp_cipher.clear();
+    }
+
+    size_t number_of_reports;
+    double timestamp;
+    int audio_output_level;
+    int audio_input_level;
+    int bytes_received;
+    int bytes_sent;
+    int available_receive_bandwidth;
+    std::string dtls_cipher;
+    std::string srtp_cipher;
+  } stats_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
diff --git a/webrtc/api/test/peerconnectiontestwrapper.cc b/webrtc/api/test/peerconnectiontestwrapper.cc
new file mode 100644
index 0000000..7f9ab59
--- /dev/null
+++ b/webrtc/api/test/peerconnectiontestwrapper.cc
@@ -0,0 +1,297 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <utility>
+
+#include "webrtc/api/test/fakedtlsidentitystore.h"
+#include "webrtc/api/test/fakeperiodicvideocapturer.h"
+#include "webrtc/api/test/mockpeerconnectionobservers.h"
+#include "webrtc/api/test/peerconnectiontestwrapper.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/p2p/client/fakeportallocator.h"
+
+static const char kStreamLabelBase[] = "stream_label";
+static const char kVideoTrackLabelBase[] = "video_track";
+static const char kAudioTrackLabelBase[] = "audio_track";
+static const int kMaxWait = 10000;
+static const int kTestAudioFrameCount = 3;
+static const int kTestVideoFrameCount = 3;
+
+using webrtc::FakeConstraints;
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::IceCandidateInterface;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MockSetSessionDescriptionObserver;
+using webrtc::PeerConnectionInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::VideoTrackInterface;
+
+void PeerConnectionTestWrapper::Connect(PeerConnectionTestWrapper* caller,
+                                        PeerConnectionTestWrapper* callee) {
+  caller->SignalOnIceCandidateReady.connect(
+      callee, &PeerConnectionTestWrapper::AddIceCandidate);
+  callee->SignalOnIceCandidateReady.connect(
+      caller, &PeerConnectionTestWrapper::AddIceCandidate);
+
+  caller->SignalOnSdpReady.connect(
+      callee, &PeerConnectionTestWrapper::ReceiveOfferSdp);
+  callee->SignalOnSdpReady.connect(
+      caller, &PeerConnectionTestWrapper::ReceiveAnswerSdp);
+}
+
+PeerConnectionTestWrapper::PeerConnectionTestWrapper(const std::string& name)
+    : name_(name) {}
+
+PeerConnectionTestWrapper::~PeerConnectionTestWrapper() {}
+
+bool PeerConnectionTestWrapper::CreatePc(
+  const MediaConstraintsInterface* constraints) {
+  rtc::scoped_ptr<cricket::PortAllocator> port_allocator(
+      new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+
+  fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
+  if (fake_audio_capture_module_ == NULL) {
+    return false;
+  }
+
+  peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
+      rtc::Thread::Current(), rtc::Thread::Current(),
+      fake_audio_capture_module_, NULL, NULL);
+  if (!peer_connection_factory_) {
+    return false;
+  }
+
+  // CreatePeerConnection with RTCConfiguration.
+  webrtc::PeerConnectionInterface::RTCConfiguration config;
+  webrtc::PeerConnectionInterface::IceServer ice_server;
+  ice_server.uri = "stun:stun.l.google.com:19302";
+  config.servers.push_back(ice_server);
+  rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
+      rtc::SSLStreamAdapter::HaveDtlsSrtp() ?
+      new FakeDtlsIdentityStore() : nullptr);
+  peer_connection_ = peer_connection_factory_->CreatePeerConnection(
+      config, constraints, std::move(port_allocator),
+      std::move(dtls_identity_store), this);
+
+  return peer_connection_.get() != NULL;
+}
+
+rtc::scoped_refptr<webrtc::DataChannelInterface>
+PeerConnectionTestWrapper::CreateDataChannel(
+    const std::string& label,
+    const webrtc::DataChannelInit& init) {
+  return peer_connection_->CreateDataChannel(label, &init);
+}
+
+void PeerConnectionTestWrapper::OnAddStream(MediaStreamInterface* stream) {
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": OnAddStream";
+  // TODO(ronghuawu): support multiple streams.
+  if (stream->GetVideoTracks().size() > 0) {
+    renderer_.reset(new FakeVideoTrackRenderer(stream->GetVideoTracks()[0]));
+  }
+}
+
+void PeerConnectionTestWrapper::OnIceCandidate(
+    const IceCandidateInterface* candidate) {
+  std::string sdp;
+  EXPECT_TRUE(candidate->ToString(&sdp));
+  // Give the user a chance to modify sdp for testing.
+  SignalOnIceCandidateCreated(&sdp);
+  SignalOnIceCandidateReady(candidate->sdp_mid(), candidate->sdp_mline_index(),
+                            sdp);
+}
+
+void PeerConnectionTestWrapper::OnDataChannel(
+    webrtc::DataChannelInterface* data_channel) {
+  SignalOnDataChannel(data_channel);
+}
+
+void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
+  // This callback should take the ownership of |desc|.
+  rtc::scoped_ptr<SessionDescriptionInterface> owned_desc(desc);
+  std::string sdp;
+  EXPECT_TRUE(desc->ToString(&sdp));
+
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": " << desc->type() << " sdp created: " << sdp;
+
+  // Give the user a chance to modify sdp for testing.
+  SignalOnSdpCreated(&sdp);
+
+  SetLocalDescription(desc->type(), sdp);
+
+  SignalOnSdpReady(sdp);
+}
+
+void PeerConnectionTestWrapper::CreateOffer(
+    const MediaConstraintsInterface* constraints) {
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": CreateOffer.";
+  peer_connection_->CreateOffer(this, constraints);
+}
+
+void PeerConnectionTestWrapper::CreateAnswer(
+    const MediaConstraintsInterface* constraints) {
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": CreateAnswer.";
+  peer_connection_->CreateAnswer(this, constraints);
+}
+
+void PeerConnectionTestWrapper::ReceiveOfferSdp(const std::string& sdp) {
+  SetRemoteDescription(SessionDescriptionInterface::kOffer, sdp);
+  CreateAnswer(NULL);
+}
+
+void PeerConnectionTestWrapper::ReceiveAnswerSdp(const std::string& sdp) {
+  SetRemoteDescription(SessionDescriptionInterface::kAnswer, sdp);
+}
+
+void PeerConnectionTestWrapper::SetLocalDescription(const std::string& type,
+                                                    const std::string& sdp) {
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": SetLocalDescription " << type << " " << sdp;
+
+  rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+      observer(new rtc::RefCountedObject<
+                   MockSetSessionDescriptionObserver>());
+  peer_connection_->SetLocalDescription(
+      observer, webrtc::CreateSessionDescription(type, sdp, NULL));
+}
+
+void PeerConnectionTestWrapper::SetRemoteDescription(const std::string& type,
+                                                     const std::string& sdp) {
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": SetRemoteDescription " << type << " " << sdp;
+
+  rtc::scoped_refptr<MockSetSessionDescriptionObserver>
+      observer(new rtc::RefCountedObject<
+                   MockSetSessionDescriptionObserver>());
+  peer_connection_->SetRemoteDescription(
+      observer, webrtc::CreateSessionDescription(type, sdp, NULL));
+}
+
+void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
+                                                int sdp_mline_index,
+                                                const std::string& candidate) {
+  rtc::scoped_ptr<webrtc::IceCandidateInterface> owned_candidate(
+      webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
+  EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
+}
+
+void PeerConnectionTestWrapper::WaitForCallEstablished() {
+  WaitForConnection();
+  WaitForAudio();
+  WaitForVideo();
+}
+
+void PeerConnectionTestWrapper::WaitForConnection() {
+  EXPECT_TRUE_WAIT(CheckForConnection(), kMaxWait);
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": Connected.";
+}
+
+bool PeerConnectionTestWrapper::CheckForConnection() {
+  return (peer_connection_->ice_connection_state() ==
+          PeerConnectionInterface::kIceConnectionConnected) ||
+         (peer_connection_->ice_connection_state() ==
+          PeerConnectionInterface::kIceConnectionCompleted);
+}
+
+void PeerConnectionTestWrapper::WaitForAudio() {
+  EXPECT_TRUE_WAIT(CheckForAudio(), kMaxWait);
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": Got enough audio frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForAudio() {
+  return (fake_audio_capture_module_->frames_received() >=
+          kTestAudioFrameCount);
+}
+
+void PeerConnectionTestWrapper::WaitForVideo() {
+  EXPECT_TRUE_WAIT(CheckForVideo(), kMaxWait);
+  LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_
+               << ": Got enough video frames.";
+}
+
+bool PeerConnectionTestWrapper::CheckForVideo() {
+  if (!renderer_) {
+    return false;
+  }
+  return (renderer_->num_rendered_frames() >= kTestVideoFrameCount);
+}
+
+void PeerConnectionTestWrapper::GetAndAddUserMedia(
+    bool audio, const webrtc::FakeConstraints& audio_constraints,
+    bool video, const webrtc::FakeConstraints& video_constraints) {
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+      GetUserMedia(audio, audio_constraints, video, video_constraints);
+  EXPECT_TRUE(peer_connection_->AddStream(stream));
+}
+
+rtc::scoped_refptr<webrtc::MediaStreamInterface>
+    PeerConnectionTestWrapper::GetUserMedia(
+        bool audio, const webrtc::FakeConstraints& audio_constraints,
+        bool video, const webrtc::FakeConstraints& video_constraints) {
+  std::string label = kStreamLabelBase +
+      rtc::ToString<int>(
+          static_cast<int>(peer_connection_->local_streams()->count()));
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+      peer_connection_factory_->CreateLocalMediaStream(label);
+
+  if (audio) {
+    FakeConstraints constraints = audio_constraints;
+    // Disable highpass filter so that we can get all the test audio frames.
+    constraints.AddMandatory(
+        MediaConstraintsInterface::kHighpassFilter, false);
+    rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
+        peer_connection_factory_->CreateAudioSource(&constraints);
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+        peer_connection_factory_->CreateAudioTrack(kAudioTrackLabelBase,
+                                                   source));
+    stream->AddTrack(audio_track);
+  }
+
+  if (video) {
+    // Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
+    FakeConstraints constraints = video_constraints;
+    constraints.SetMandatoryMaxFrameRate(10);
+
+    rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
+        peer_connection_factory_->CreateVideoSource(
+            new webrtc::FakePeriodicVideoCapturer(), &constraints);
+    std::string videotrack_label = label + kVideoTrackLabelBase;
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+        peer_connection_factory_->CreateVideoTrack(videotrack_label, source));
+
+    stream->AddTrack(video_track);
+  }
+  return stream;
+}
diff --git a/webrtc/api/test/peerconnectiontestwrapper.h b/webrtc/api/test/peerconnectiontestwrapper.h
new file mode 100644
index 0000000..f4600ea
--- /dev/null
+++ b/webrtc/api/test/peerconnectiontestwrapper.h
@@ -0,0 +1,115 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_TEST_PEERCONNECTIONTESTWRAPPER_H_
+#define WEBRTC_API_TEST_PEERCONNECTIONTESTWRAPPER_H_
+
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/test/fakeaudiocapturemodule.h"
+#include "webrtc/api/test/fakeconstraints.h"
+#include "webrtc/api/test/fakevideotrackrenderer.h"
+#include "webrtc/base/sigslot.h"
+
+class PeerConnectionTestWrapper
+    : public webrtc::PeerConnectionObserver,
+      public webrtc::CreateSessionDescriptionObserver,
+      public sigslot::has_slots<> {
+ public:
+  static void Connect(PeerConnectionTestWrapper* caller,
+                      PeerConnectionTestWrapper* callee);
+
+  explicit PeerConnectionTestWrapper(const std::string& name);
+  virtual ~PeerConnectionTestWrapper();
+
+  bool CreatePc(const webrtc::MediaConstraintsInterface* constraints);
+
+  rtc::scoped_refptr<webrtc::DataChannelInterface> CreateDataChannel(
+      const std::string& label,
+      const webrtc::DataChannelInit& init);
+
+  // Implements PeerConnectionObserver.
+  virtual void OnSignalingChange(
+     webrtc::PeerConnectionInterface::SignalingState new_state) {}
+  virtual void OnStateChange(
+      webrtc::PeerConnectionObserver::StateType state_changed) {}
+  virtual void OnAddStream(webrtc::MediaStreamInterface* stream);
+  virtual void OnRemoveStream(webrtc::MediaStreamInterface* stream) {}
+  virtual void OnDataChannel(webrtc::DataChannelInterface* data_channel);
+  virtual void OnRenegotiationNeeded() {}
+  virtual void OnIceConnectionChange(
+      webrtc::PeerConnectionInterface::IceConnectionState new_state) {}
+  virtual void OnIceGatheringChange(
+      webrtc::PeerConnectionInterface::IceGatheringState new_state) {}
+  virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate);
+  virtual void OnIceComplete() {}
+
+  // Implements CreateSessionDescriptionObserver.
+  virtual void OnSuccess(webrtc::SessionDescriptionInterface* desc);
+  virtual void OnFailure(const std::string& error) {}
+
+  void CreateOffer(const webrtc::MediaConstraintsInterface* constraints);
+  void CreateAnswer(const webrtc::MediaConstraintsInterface* constraints);
+  void ReceiveOfferSdp(const std::string& sdp);
+  void ReceiveAnswerSdp(const std::string& sdp);
+  void AddIceCandidate(const std::string& sdp_mid, int sdp_mline_index,
+                       const std::string& candidate);
+  void WaitForCallEstablished();
+  void WaitForConnection();
+  void WaitForAudio();
+  void WaitForVideo();
+  void GetAndAddUserMedia(
+    bool audio, const webrtc::FakeConstraints& audio_constraints,
+    bool video, const webrtc::FakeConstraints& video_constraints);
+
+  // sigslots
+  sigslot::signal1<std::string*> SignalOnIceCandidateCreated;
+  sigslot::signal3<const std::string&,
+                   int,
+                   const std::string&> SignalOnIceCandidateReady;
+  sigslot::signal1<std::string*> SignalOnSdpCreated;
+  sigslot::signal1<const std::string&> SignalOnSdpReady;
+  sigslot::signal1<webrtc::DataChannelInterface*> SignalOnDataChannel;
+
+ private:
+  void SetLocalDescription(const std::string& type, const std::string& sdp);
+  void SetRemoteDescription(const std::string& type, const std::string& sdp);
+  bool CheckForConnection();
+  bool CheckForAudio();
+  bool CheckForVideo();
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> GetUserMedia(
+      bool audio, const webrtc::FakeConstraints& audio_constraints,
+      bool video, const webrtc::FakeConstraints& video_constraints);
+
+  std::string name_;
+  rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+  rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+      peer_connection_factory_;
+  rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
+  rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
+};
+
+#endif  // WEBRTC_API_TEST_PEERCONNECTIONTESTWRAPPER_H_
diff --git a/webrtc/api/test/testsdpstrings.h b/webrtc/api/test/testsdpstrings.h
new file mode 100644
index 0000000..d806e71
--- /dev/null
+++ b/webrtc/api/test/testsdpstrings.h
@@ -0,0 +1,147 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain SDP strings used for testing.
+
+#ifndef WEBRTC_API_TEST_TESTSDPSTRINGS_H_
+#define WEBRTC_API_TEST_TESTSDPSTRINGS_H_
+
+namespace webrtc {
+
+// SDP offer string from a Nightly FireFox build.
+static const char kFireFoxSdpOffer[] =
+    "v=0\r\n"
+    "o=Mozilla-SIPUA 23551 0 IN IP4 0.0.0.0\r\n"
+    "s=SIP Call\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 A7:24:72:CA:6E:02:55:39:BA:66:DF:6E:CC:4C:D8:B0:1A:"
+    "BF:1A:56:65:7D:F4:03:AD:7E:77:43:2A:29:EC:93\r\n"
+    "m=audio 36993 RTP/SAVPF 109 0 8 101\r\n"
+    "c=IN IP4 74.95.2.170\r\n"
+    "a=rtpmap:109 opus/48000/2\r\n"
+    "a=ptime:20\r\n"
+    "a=rtpmap:0 PCMU/8000\r\n"
+    "a=rtpmap:8 PCMA/8000\r\n"
+    "a=rtpmap:101 telephone-event/8000\r\n"
+    "a=fmtp:101 0-15\r\n"
+    "a=sendrecv\r\n"
+    "a=candidate:0 1 UDP 2112946431 172.16.191.1 61725 typ host\r\n"
+    "a=candidate:2 1 UDP 2112487679 172.16.131.1 58798 typ host\r\n"
+    "a=candidate:4 1 UDP 2113667327 10.0.254.2 58122 typ host\r\n"
+    "a=candidate:5 1 UDP 1694302207 74.95.2.170 36993 typ srflx raddr "
+    "10.0.254.2 rport 58122\r\n"
+    "a=candidate:0 2 UDP 2112946430 172.16.191.1 55025 typ host\r\n"
+    "a=candidate:2 2 UDP 2112487678 172.16.131.1 63576 typ host\r\n"
+    "a=candidate:4 2 UDP 2113667326 10.0.254.2 50962 typ host\r\n"
+    "a=candidate:5 2 UDP 1694302206 74.95.2.170 41028 typ srflx raddr"
+    " 10.0.254.2 rport 50962\r\n"
+    "m=video 38826 RTP/SAVPF 120\r\n"
+    "c=IN IP4 74.95.2.170\r\n"
+    "a=rtpmap:120 VP8/90000\r\n"
+    "a=sendrecv\r\n"
+    "a=candidate:0 1 UDP 2112946431 172.16.191.1 62017 typ host\r\n"
+    "a=candidate:2 1 UDP 2112487679 172.16.131.1 59741 typ host\r\n"
+    "a=candidate:4 1 UDP 2113667327 10.0.254.2 62652 typ host\r\n"
+    "a=candidate:5 1 UDP 1694302207 74.95.2.170 38826 typ srflx raddr"
+    " 10.0.254.2 rport 62652\r\n"
+    "a=candidate:0 2 UDP 2112946430 172.16.191.1 63440 typ host\r\n"
+    "a=candidate:2 2 UDP 2112487678 172.16.131.1 51847 typ host\r\n"
+    "a=candidate:4 2 UDP 2113667326 10.0.254.2 58890 typ host\r\n"
+    "a=candidate:5 2 UDP 1694302206 74.95.2.170 33611 typ srflx raddr"
+    " 10.0.254.2 rport 58890\r\n"
+#ifdef HAVE_SCTP
+    "m=application 45536 SCTP/DTLS 5000\r\n"
+    "c=IN IP4 74.95.2.170\r\n"
+    "a=fmtp:5000 protocol=webrtc-datachannel;streams=16\r\n"
+    "a=sendrecv\r\n"
+    "a=candidate:0 1 UDP 2112946431 172.16.191.1 60248 typ host\r\n"
+    "a=candidate:2 1 UDP 2112487679 172.16.131.1 55925 typ host\r\n"
+    "a=candidate:4 1 UDP 2113667327 10.0.254.2 65268 typ host\r\n"
+    "a=candidate:5 1 UDP 1694302207 74.95.2.170 45536 typ srflx raddr"
+    " 10.0.254.2 rport 65268\r\n"
+    "a=candidate:0 2 UDP 2112946430 172.16.191.1 49162 typ host\r\n"
+    "a=candidate:2 2 UDP 2112487678 172.16.131.1 59635 typ host\r\n"
+    "a=candidate:4 2 UDP 2113667326 10.0.254.2 61232 typ host\r\n"
+    "a=candidate:5 2 UDP 1694302206 74.95.2.170 45468 typ srflx raddr"
+    " 10.0.254.2 rport 61232\r\n"
+#endif
+    ;
+
+// Audio SDP with a limited set of audio codecs.
+static const char kAudioSdp[] =
+    "v=0\r\n"
+    "o=- 7859371131 2 IN IP4 192.168.30.208\r\n"
+    "s=-\r\n"
+    "c=IN IP4 192.168.30.208\r\n"
+    "t=0 0\r\n"
+    "m=audio 16000 RTP/SAVPF 0 8 126\r\n"
+    "a=rtpmap:0 PCMU/8000\r\n"
+    "a=rtpmap:8 PCMA/8000\r\n"
+    "a=rtpmap:126 telephone-event/8000\r\n"
+    "a=sendrecv\r\n"
+    "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+    "a=rtcp-mux\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+    "a=ice-ufrag:AI2sRT3r\r\n"
+    "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+    "a=ssrc:4227871655 cname:GeAAgb6XCPNLVMX5\r\n"
+    "a=ssrc:4227871655 msid:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4 a0\r\n"
+    "a=ssrc:4227871655 mslabel:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4\r\n"
+    "a=ssrc:4227871655 label:1NFAV3iD08ioO2339rQS9pfOI9mDf6GeG9F4a0\r\n"
+    "a=mid:audio\r\n";
+
+static const char kAudioSdpWithUnsupportedCodecs[] =
+    "v=0\r\n"
+    "o=- 6858750541 2 IN IP4 192.168.30.208\r\n"
+    "s=-\r\n"
+    "c=IN IP4 192.168.30.208\r\n"
+    "t=0 0\r\n"
+    "m=audio 16000 RTP/SAVPF 0 8 18 110 126\r\n"
+    "a=rtpmap:0 PCMU/8000\r\n"
+    "a=rtpmap:8 PCMA/8000\r\n"
+    "a=rtpmap:18 WeirdCodec1/8000\r\n"
+    "a=rtpmap:110 WeirdCodec2/8000\r\n"
+    "a=rtpmap:126 telephone-event/8000\r\n"
+    "a=sendonly\r\n"
+    "a=rtcp:16000 IN IP4 192.168.30.208\r\n"
+    "a=rtcp-mux\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:tvKIFjbMQ7W0/C2RzhwN0oQglj/7GJg+frdsNRxt\r\n"
+    "a=ice-ufrag:AI2sRT3r\r\n"
+    "a=ice-pwd:lByS9z2RSQlSE9XurlvjYmEm\r\n"
+    "a=ssrc:4227871655 cname:TsmD02HRfhkJBm4m\r\n"
+    "a=ssrc:4227871655 msid:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3 a0\r\n"
+    "a=ssrc:4227871655 mslabel:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3\r\n"
+    "a=ssrc:4227871655 label:7nU0TApbB-n4dfPlCplWT9QTEsbBDS1IlpW3a0\r\n"
+    "a=mid:audio\r\n";
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_TEST_TESTSDPSTRINGS_H_
diff --git a/webrtc/api/umametrics.h b/webrtc/api/umametrics.h
new file mode 100644
index 0000000..f72ad3b
--- /dev/null
+++ b/webrtc/api/umametrics.h
@@ -0,0 +1,128 @@
+/*
+ * libjingle
+ * Copyright 2014 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains enums related to IPv4/IPv6 metrics.
+
+#ifndef WEBRTC_API_UMAMETRICS_H_
+#define WEBRTC_API_UMAMETRICS_H_
+
+namespace webrtc {
+
+// Used to specify which enum counter type we're incrementing in
+// MetricsObserverInterface::IncrementEnumCounter.
+enum PeerConnectionEnumCounterType {
+  kEnumCounterAddressFamily,
+  // For the next 2 counters, we track them separately based on the "first hop"
+  // protocol used by the local candidate. "First hop" means the local candidate
+  // type in the case of non-TURN candidates, and the protocol used to connect
+  // to the TURN server in the case of TURN candidates.
+  kEnumCounterIceCandidatePairTypeUdp,
+  kEnumCounterIceCandidatePairTypeTcp,
+
+  kEnumCounterAudioSrtpCipher,
+  kEnumCounterAudioSslCipher,
+  kEnumCounterVideoSrtpCipher,
+  kEnumCounterVideoSslCipher,
+  kEnumCounterDataSrtpCipher,
+  kEnumCounterDataSslCipher,
+  kPeerConnectionEnumCounterMax
+};
+
+// Currently this contains information related to WebRTC network/transport
+// information.
+
+// The difference between PeerConnectionEnumCounter and
+// PeerConnectionMetricsName is that the "EnumCounter" is only counting the
+// occurrences of events, while "Name" has a value associated with it which is
+// used to form a histogram.
+
+// This enum is backed by Chromium's histograms.xml,
+// chromium/src/tools/metrics/histograms/histograms.xml
+// Existing values cannot be re-ordered and new enums must be added
+// before kBoundary.
+enum PeerConnectionAddressFamilyCounter {
+  kPeerConnection_IPv4,
+  kPeerConnection_IPv6,
+  kBestConnections_IPv4,
+  kBestConnections_IPv6,
+  kPeerConnectionAddressFamilyCounter_Max,
+};
+
+// TODO(guoweis): Keep previous name here until all references are renamed.
+#define kBoundary kPeerConnectionAddressFamilyCounter_Max
+
+// TODO(guoweis): Keep previous name here until all references are renamed.
+typedef PeerConnectionAddressFamilyCounter PeerConnectionUMAMetricsCounter;
+
+// This enum defines types for UMA samples, which will have a range.
+enum PeerConnectionMetricsName {
+  kNetworkInterfaces_IPv4,  // Number of IPv4 interfaces.
+  kNetworkInterfaces_IPv6,  // Number of IPv6 interfaces.
+  kTimeToConnect,           // In milliseconds.
+  kLocalCandidates_IPv4,    // Number of IPv4 local candidates.
+  kLocalCandidates_IPv6,    // Number of IPv6 local candidates.
+  kPeerConnectionMetricsName_Max
+};
+
+// TODO(guoweis): Keep previous name here until all references are renamed.
+typedef PeerConnectionMetricsName PeerConnectionUMAMetricsName;
+
+// The IceCandidatePairType has the format of
+// <local_candidate_type>_<remote_candidate_type>. It is recorded based on the
+// type of candidate pair used when the PeerConnection first goes to a completed
+// state. When BUNDLE is enabled, only the first transport gets recorded.
+enum IceCandidatePairType {
+  // HostHost is deprecated. It was replaced with the set of types at the bottom
+  // to report private or public host IP address.
+  kIceCandidatePairHostHost,
+  kIceCandidatePairHostSrflx,
+  kIceCandidatePairHostRelay,
+  kIceCandidatePairHostPrflx,
+  kIceCandidatePairSrflxHost,
+  kIceCandidatePairSrflxSrflx,
+  kIceCandidatePairSrflxRelay,
+  kIceCandidatePairSrflxPrflx,
+  kIceCandidatePairRelayHost,
+  kIceCandidatePairRelaySrflx,
+  kIceCandidatePairRelayRelay,
+  kIceCandidatePairRelayPrflx,
+  kIceCandidatePairPrflxHost,
+  kIceCandidatePairPrflxSrflx,
+  kIceCandidatePairPrflxRelay,
+
+  // The following 4 types tell whether local and remote hosts have private or
+  // public IP addresses.
+  kIceCandidatePairHostPrivateHostPrivate,
+  kIceCandidatePairHostPrivateHostPublic,
+  kIceCandidatePairHostPublicHostPrivate,
+  kIceCandidatePairHostPublicHostPublic,
+  kIceCandidatePairMax
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_UMAMETRICS_H_
diff --git a/webrtc/api/videosource.cc b/webrtc/api/videosource.cc
new file mode 100644
index 0000000..a94c937
--- /dev/null
+++ b/webrtc/api/videosource.cc
@@ -0,0 +1,438 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/videosource.h"
+
+#include <cstdlib>
+#include <vector>
+
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/base/arraysize.h"
+
+using cricket::CaptureState;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+
+namespace {
+
+const double kRoundingTruncation = 0.0005;
+
+enum {
+  MSG_VIDEOCAPTURESTATECONNECT,
+  MSG_VIDEOCAPTURESTATEDISCONNECT,
+  MSG_VIDEOCAPTURESTATECHANGE,
+};
+
+// Default resolution. If no constraint is specified, this is the resolution we
+// will use.
+static const cricket::VideoFormatPod kDefaultFormat =
+    {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY};
+
+// List of formats used if the camera doesn't support capability enumeration.
+static const cricket::VideoFormatPod kVideoFormats[] = {
+  {1920, 1080, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+  {1280, 720, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+  {960, 720, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+  {640, 360, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+  {640, 480, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+  {320, 240, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY},
+  {320, 180, FPS_TO_INTERVAL(30), cricket::FOURCC_ANY}
+};
+
+MediaSourceInterface::SourceState
+GetReadyState(cricket::CaptureState state) {
+  switch (state) {
+    case cricket::CS_STARTING:
+      return MediaSourceInterface::kInitializing;
+    case cricket::CS_RUNNING:
+      return MediaSourceInterface::kLive;
+    case cricket::CS_FAILED:
+    case cricket::CS_NO_DEVICE:
+    case cricket::CS_STOPPED:
+      return MediaSourceInterface::kEnded;
+    case cricket::CS_PAUSED:
+      return MediaSourceInterface::kMuted;
+    default:
+      ASSERT(false && "GetReadyState unknown state");
+  }
+  return MediaSourceInterface::kEnded;
+}
+
+void SetUpperLimit(int new_limit, int* original_limit) {
+  if (*original_limit < 0 || new_limit < *original_limit)
+    *original_limit = new_limit;
+}
+
+// Updates |format_upper_limit| from |constraint|.
+// If constraint.maxFoo is smaller than format_upper_limit.foo,
+// set format_upper_limit.foo to constraint.maxFoo.
+void SetUpperLimitFromConstraint(
+    const MediaConstraintsInterface::Constraint& constraint,
+    cricket::VideoFormat* format_upper_limit) {
+  if (constraint.key == MediaConstraintsInterface::kMaxWidth) {
+    int value = rtc::FromString<int>(constraint.value);
+    SetUpperLimit(value, &(format_upper_limit->width));
+  } else if (constraint.key == MediaConstraintsInterface::kMaxHeight) {
+    int value = rtc::FromString<int>(constraint.value);
+    SetUpperLimit(value, &(format_upper_limit->height));
+  }
+}
+
+// Fills |format_out| with the max width and height allowed by |constraints|.
+void FromConstraintsForScreencast(
+    const MediaConstraintsInterface::Constraints& constraints,
+    cricket::VideoFormat* format_out) {
+  typedef MediaConstraintsInterface::Constraints::const_iterator
+      ConstraintsIterator;
+
+  cricket::VideoFormat upper_limit(-1, -1, 0, 0);
+  for (ConstraintsIterator constraints_it = constraints.begin();
+       constraints_it != constraints.end(); ++constraints_it)
+    SetUpperLimitFromConstraint(*constraints_it, &upper_limit);
+
+  if (upper_limit.width >= 0)
+    format_out->width = upper_limit.width;
+  if (upper_limit.height >= 0)
+    format_out->height = upper_limit.height;
+}
+
+// Returns true if |constraint| is fulfilled. |format_out| can differ from
+// |format_in| if the format is changed by the constraint. Ie - the frame rate
+// can be changed by setting maxFrameRate.
+bool NewFormatWithConstraints(
+    const MediaConstraintsInterface::Constraint& constraint,
+    const cricket::VideoFormat& format_in,
+    bool mandatory,
+    cricket::VideoFormat* format_out) {
+  ASSERT(format_out != NULL);
+  *format_out = format_in;
+
+  if (constraint.key == MediaConstraintsInterface::kMinWidth) {
+    int value = rtc::FromString<int>(constraint.value);
+    return (value <= format_in.width);
+  } else if (constraint.key == MediaConstraintsInterface::kMaxWidth) {
+    int value = rtc::FromString<int>(constraint.value);
+    return (value >= format_in.width);
+  } else if (constraint.key == MediaConstraintsInterface::kMinHeight) {
+    int value = rtc::FromString<int>(constraint.value);
+    return (value <= format_in.height);
+  } else if (constraint.key == MediaConstraintsInterface::kMaxHeight) {
+    int value = rtc::FromString<int>(constraint.value);
+    return (value >= format_in.height);
+  } else if (constraint.key == MediaConstraintsInterface::kMinFrameRate) {
+    int value = rtc::FromString<int>(constraint.value);
+    return (value <= cricket::VideoFormat::IntervalToFps(format_in.interval));
+  } else if (constraint.key == MediaConstraintsInterface::kMaxFrameRate) {
+    int value = rtc::FromString<int>(constraint.value);
+    if (value == 0) {
+      if (mandatory) {
+        // TODO(ronghuawu): Convert the constraint value to float when sub-1fps
+        // is supported by the capturer.
+        return false;
+      } else {
+        value = 1;
+      }
+    }
+    if (value <= cricket::VideoFormat::IntervalToFps(format_in.interval))
+      format_out->interval = cricket::VideoFormat::FpsToInterval(value);
+    return true;
+  } else if (constraint.key == MediaConstraintsInterface::kMinAspectRatio) {
+    double value = rtc::FromString<double>(constraint.value);
+    // The aspect ratio in |constraint.value| has been converted to a string and
+    // back to a double, so it may have a rounding error.
+    // E.g if the value 1/3 is converted to a string, the string will not have
+    // infinite length.
+    // We add a margin of 0.0005 which is high enough to detect the same aspect
+    // ratio but small enough to avoid matching wrong aspect ratios.
+    double ratio = static_cast<double>(format_in.width) / format_in.height;
+    return  (value <= ratio + kRoundingTruncation);
+  } else if (constraint.key == MediaConstraintsInterface::kMaxAspectRatio) {
+    double value = rtc::FromString<double>(constraint.value);
+    double ratio = static_cast<double>(format_in.width) / format_in.height;
+    // Subtract 0.0005 to avoid rounding problems. Same as above.
+    const double kRoundingTruncation = 0.0005;
+    return  (value >= ratio - kRoundingTruncation);
+  } else if (constraint.key == MediaConstraintsInterface::kNoiseReduction) {
+    // These are actually options, not constraints, so they can be satisfied
+    // regardless of the format.
+    return true;
+  }
+  LOG(LS_WARNING) << "Found unknown MediaStream constraint. Name:"
+      <<  constraint.key << " Value:" << constraint.value;
+  return false;
+}
+
+// Removes cricket::VideoFormats from |formats| that don't meet |constraint|.
+void FilterFormatsByConstraint(
+    const MediaConstraintsInterface::Constraint& constraint,
+    bool mandatory,
+    std::vector<cricket::VideoFormat>* formats) {
+  std::vector<cricket::VideoFormat>::iterator format_it =
+      formats->begin();
+  while (format_it != formats->end()) {
+    // Modify the format_it to fulfill the constraint if possible.
+    // Delete it otherwise.
+    if (!NewFormatWithConstraints(constraint, (*format_it),
+                                  mandatory, &(*format_it))) {
+      format_it = formats->erase(format_it);
+    } else {
+      ++format_it;
+    }
+  }
+}
+
+// Returns a vector of cricket::VideoFormat that best match |constraints|.
+std::vector<cricket::VideoFormat> FilterFormats(
+    const MediaConstraintsInterface::Constraints& mandatory,
+    const MediaConstraintsInterface::Constraints& optional,
+    const std::vector<cricket::VideoFormat>& supported_formats) {
+  typedef MediaConstraintsInterface::Constraints::const_iterator
+      ConstraintsIterator;
+  std::vector<cricket::VideoFormat> candidates = supported_formats;
+
+  for (ConstraintsIterator constraints_it = mandatory.begin();
+       constraints_it != mandatory.end(); ++constraints_it)
+    FilterFormatsByConstraint(*constraints_it, true, &candidates);
+
+  if (candidates.size() == 0)
+    return candidates;
+
+  // Ok - all mandatory checked and we still have a candidate.
+  // Let's try filtering using the optional constraints.
+  for (ConstraintsIterator  constraints_it = optional.begin();
+       constraints_it != optional.end(); ++constraints_it) {
+    std::vector<cricket::VideoFormat> current_candidates = candidates;
+    FilterFormatsByConstraint(*constraints_it, false, &current_candidates);
+    if (current_candidates.size() > 0) {
+      candidates = current_candidates;
+    }
+  }
+
+  // We have done as good as we can to filter the supported resolutions.
+  return candidates;
+}
+
+// Find the format that best matches the default video size.
+// Constraints are optional and since the performance of a video call
+// might be bad due to bitrate limitations, CPU, and camera performance,
+// it is better to select a resolution that is as close as possible to our
+// default and still meets the contraints.
+const cricket::VideoFormat& GetBestCaptureFormat(
+    const std::vector<cricket::VideoFormat>& formats) {
+  ASSERT(formats.size() > 0);
+
+  int default_area = kDefaultFormat.width * kDefaultFormat.height;
+
+  std::vector<cricket::VideoFormat>::const_iterator it = formats.begin();
+  std::vector<cricket::VideoFormat>::const_iterator best_it = formats.begin();
+  int best_diff_area = std::abs(default_area - it->width * it->height);
+  int64_t best_diff_interval = kDefaultFormat.interval;
+  for (; it != formats.end(); ++it) {
+    int diff_area = std::abs(default_area - it->width * it->height);
+    int64_t diff_interval = std::abs(kDefaultFormat.interval - it->interval);
+    if (diff_area < best_diff_area ||
+        (diff_area == best_diff_area && diff_interval < best_diff_interval)) {
+      best_diff_area = diff_area;
+      best_diff_interval = diff_interval;
+      best_it = it;
+    }
+  }
+  return *best_it;
+}
+
+// Set |option| to the highest-priority value of |key| in the constraints.
+// Return false if the key is mandatory, and the value is invalid.
+bool ExtractOption(const MediaConstraintsInterface* all_constraints,
+                   const std::string& key,
+                   rtc::Optional<bool>* option) {
+  size_t mandatory = 0;
+  bool value;
+  if (FindConstraint(all_constraints, key, &value, &mandatory)) {
+    *option = rtc::Optional<bool>(value);
+    return true;
+  }
+
+  return mandatory == 0;
+}
+
+// Search |all_constraints| for known video options.  Apply all options that are
+// found with valid values, and return false if any mandatory video option was
+// found with an invalid value.
+bool ExtractVideoOptions(const MediaConstraintsInterface* all_constraints,
+                         cricket::VideoOptions* options) {
+  bool all_valid = true;
+
+  all_valid &= ExtractOption(all_constraints,
+      MediaConstraintsInterface::kNoiseReduction,
+      &(options->video_noise_reduction));
+
+  return all_valid;
+}
+
+}  // anonymous namespace
+
+namespace webrtc {
+
+rtc::scoped_refptr<VideoSource> VideoSource::Create(
+    cricket::ChannelManager* channel_manager,
+    cricket::VideoCapturer* capturer,
+    const webrtc::MediaConstraintsInterface* constraints,
+    bool remote) {
+  ASSERT(channel_manager != NULL);
+  ASSERT(capturer != NULL);
+  rtc::scoped_refptr<VideoSource> source(new rtc::RefCountedObject<VideoSource>(
+      channel_manager, capturer, remote));
+  source->Initialize(constraints);
+  return source;
+}
+
+VideoSource::VideoSource(cricket::ChannelManager* channel_manager,
+                         cricket::VideoCapturer* capturer,
+                         bool remote)
+    : channel_manager_(channel_manager),
+      video_capturer_(capturer),
+      state_(kInitializing),
+      remote_(remote) {
+  channel_manager_->SignalVideoCaptureStateChange.connect(
+      this, &VideoSource::OnStateChange);
+}
+
+VideoSource::~VideoSource() {
+  channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
+  channel_manager_->SignalVideoCaptureStateChange.disconnect(this);
+}
+
+void VideoSource::Initialize(
+    const webrtc::MediaConstraintsInterface* constraints) {
+
+  std::vector<cricket::VideoFormat> formats =
+      channel_manager_->GetSupportedFormats(video_capturer_.get());
+  if (formats.empty()) {
+    if (video_capturer_->IsScreencast()) {
+      // The screen capturer can accept any resolution and we will derive the
+      // format from the constraints if any.
+      // Note that this only affects tab capturing, not desktop capturing,
+      // since the desktop capturer does not respect the VideoFormat passed in.
+      formats.push_back(cricket::VideoFormat(kDefaultFormat));
+    } else {
+      // The VideoCapturer implementation doesn't support capability
+      // enumeration. We need to guess what the camera supports.
+      for (int i = 0; i < arraysize(kVideoFormats); ++i) {
+        formats.push_back(cricket::VideoFormat(kVideoFormats[i]));
+      }
+    }
+  }
+
+  if (constraints) {
+    MediaConstraintsInterface::Constraints mandatory_constraints =
+        constraints->GetMandatory();
+    MediaConstraintsInterface::Constraints optional_constraints;
+    optional_constraints = constraints->GetOptional();
+
+    if (video_capturer_->IsScreencast()) {
+      // Use the maxWidth and maxHeight allowed by constraints for screencast.
+      FromConstraintsForScreencast(mandatory_constraints, &(formats[0]));
+    }
+
+    formats = FilterFormats(mandatory_constraints, optional_constraints,
+                            formats);
+  }
+
+  if (formats.size() == 0) {
+    LOG(LS_WARNING) << "Failed to find a suitable video format.";
+    SetState(kEnded);
+    return;
+  }
+
+  cricket::VideoOptions options;
+  if (!ExtractVideoOptions(constraints, &options)) {
+    LOG(LS_WARNING) << "Could not satisfy mandatory options.";
+    SetState(kEnded);
+    return;
+  }
+  options_.SetAll(options);
+
+  format_ = GetBestCaptureFormat(formats);
+  // Start the camera with our best guess.
+  // TODO(perkj): Should we try again with another format it it turns out that
+  // the camera doesn't produce frames with the correct format? Or will
+  // cricket::VideCapturer be able to re-scale / crop to the requested
+  // resolution?
+  if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) {
+    SetState(kEnded);
+    return;
+  }
+  // Initialize hasn't succeeded until a successful state change has occurred.
+}
+
+void VideoSource::Stop() {
+  channel_manager_->StopVideoCapture(video_capturer_.get(), format_);
+}
+
+void VideoSource::Restart() {
+  if (!channel_manager_->StartVideoCapture(video_capturer_.get(), format_)) {
+    SetState(kEnded);
+    return;
+  }
+  for (auto* sink : sinks_) {
+    channel_manager_->AddVideoSink(video_capturer_.get(), sink);
+  }
+}
+
+void VideoSource::AddSink(
+    rtc::VideoSinkInterface<cricket::VideoFrame>* output) {
+  sinks_.push_back(output);
+  channel_manager_->AddVideoSink(video_capturer_.get(), output);
+}
+
+void VideoSource::RemoveSink(
+    rtc::VideoSinkInterface<cricket::VideoFrame>* output) {
+  sinks_.remove(output);
+  channel_manager_->RemoveVideoSink(video_capturer_.get(), output);
+}
+
+// OnStateChange listens to the ChannelManager::SignalVideoCaptureStateChange.
+// This signal is triggered for all video capturers. Not only the one we are
+// interested in.
+void VideoSource::OnStateChange(cricket::VideoCapturer* capturer,
+                                     cricket::CaptureState capture_state) {
+  if (capturer == video_capturer_.get()) {
+    SetState(GetReadyState(capture_state));
+  }
+}
+
+void VideoSource::SetState(SourceState new_state) {
+  // TODO(hbos): Temporarily disabled VERIFY due to webrtc:4776.
+  // if (VERIFY(state_ != new_state)) {
+  if (state_ != new_state) {
+    state_ = new_state;
+    FireOnChanged();
+  }
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/videosource.h b/webrtc/api/videosource.h
new file mode 100644
index 0000000..262bc44
--- /dev/null
+++ b/webrtc/api/videosource.h
@@ -0,0 +1,116 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_VIDEOSOURCE_H_
+#define WEBRTC_API_VIDEOSOURCE_H_
+
+#include <list>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/notifier.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/api/videotrackrenderers.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/media/base/videosinkinterface.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/media/base/videocommon.h"
+
+// VideoSource implements VideoSourceInterface. It owns a
+// cricket::VideoCapturer and make sure the camera is started at a resolution
+// that honors the constraints.
+// The state is set depending on the result of starting the capturer.
+// If the constraint can't be met or the capturer fails to start, the state
+// transition to kEnded, otherwise it transitions to kLive.
+
+namespace cricket {
+
+class ChannelManager;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+class MediaConstraintsInterface;
+
+class VideoSource : public Notifier<VideoSourceInterface>,
+                    public sigslot::has_slots<> {
+ public:
+  // Creates an instance of VideoSource.
+  // VideoSource take ownership of |capturer|.
+  // |constraints| can be NULL and in that case the camera is opened using a
+  // default resolution.
+  static rtc::scoped_refptr<VideoSource> Create(
+      cricket::ChannelManager* channel_manager,
+      cricket::VideoCapturer* capturer,
+      const webrtc::MediaConstraintsInterface* constraints,
+      bool remote);
+
+  SourceState state() const override { return state_; }
+  bool remote() const override { return remote_; }
+
+  virtual const cricket::VideoOptions* options() const { return &options_; }
+
+  virtual cricket::VideoCapturer* GetVideoCapturer() {
+    return video_capturer_.get();
+  }
+
+  void Stop() override;
+  void Restart() override;
+
+  // |output| will be served video frames as long as the underlying capturer
+  // is running video frames.
+  virtual void AddSink(rtc::VideoSinkInterface<cricket::VideoFrame>* output);
+  virtual void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* output);
+
+ protected:
+  VideoSource(cricket::ChannelManager* channel_manager,
+              cricket::VideoCapturer* capturer,
+              bool remote);
+  virtual ~VideoSource();
+  void Initialize(const webrtc::MediaConstraintsInterface* constraints);
+
+ private:
+  void OnStateChange(cricket::VideoCapturer* capturer,
+                     cricket::CaptureState capture_state);
+  void SetState(SourceState new_state);
+
+  cricket::ChannelManager* channel_manager_;
+  rtc::scoped_ptr<cricket::VideoCapturer> video_capturer_;
+  rtc::scoped_ptr<cricket::VideoRenderer> frame_input_;
+
+  std::list<rtc::VideoSinkInterface<cricket::VideoFrame>*> sinks_;
+
+  cricket::VideoFormat format_;
+  cricket::VideoOptions options_;
+  SourceState state_;
+  const bool remote_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_VIDEOSOURCE_H_
diff --git a/webrtc/api/videosource_unittest.cc b/webrtc/api/videosource_unittest.cc
new file mode 100644
index 0000000..26543ad
--- /dev/null
+++ b/webrtc/api/videosource_unittest.cc
@@ -0,0 +1,546 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+#include <vector>
+
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/api/remotevideocapturer.h"
+#include "webrtc/api/test/fakeconstraints.h"
+#include "webrtc/api/videosource.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/media/base/fakemediaengine.h"
+#include "webrtc/media/base/fakevideocapturer.h"
+#include "webrtc/media/base/fakevideorenderer.h"
+#include "webrtc/media/webrtc/webrtcvideoframe.h"
+
+using webrtc::FakeConstraints;
+using webrtc::VideoSource;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::ObserverInterface;
+using webrtc::VideoSourceInterface;
+
+namespace {
+
+// Max wait time for a test.
+const int kMaxWaitMs = 100;
+
+}  // anonymous namespace
+
+
+// TestVideoCapturer extends cricket::FakeVideoCapturer so it can be used for
+// testing without known camera formats.
+// It keeps its own lists of cricket::VideoFormats for the unit tests in this
+// file.
+class TestVideoCapturer : public cricket::FakeVideoCapturer {
+ public:
+  TestVideoCapturer() : test_without_formats_(false) {
+    std::vector<cricket::VideoFormat> formats;
+    formats.push_back(cricket::VideoFormat(1280, 720,
+        cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(640, 480,
+        cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(640, 400,
+            cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(320, 240,
+        cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    formats.push_back(cricket::VideoFormat(352, 288,
+            cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+    ResetSupportedFormats(formats);
+  }
+
+  // This function is used for resetting the supported capture formats and
+  // simulating a cricket::VideoCapturer implementation that don't support
+  // capture format enumeration. This is used to simulate the current
+  // Chrome implementation.
+  void TestWithoutCameraFormats() {
+    test_without_formats_ = true;
+    std::vector<cricket::VideoFormat> formats;
+    ResetSupportedFormats(formats);
+  }
+
+  virtual cricket::CaptureState Start(
+      const cricket::VideoFormat& capture_format) {
+    if (test_without_formats_) {
+      std::vector<cricket::VideoFormat> formats;
+      formats.push_back(capture_format);
+      ResetSupportedFormats(formats);
+    }
+    return FakeVideoCapturer::Start(capture_format);
+  }
+
+  virtual bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
+                                    cricket::VideoFormat* best_format) {
+    if (test_without_formats_) {
+      *best_format = desired;
+      return true;
+    }
+    return FakeVideoCapturer::GetBestCaptureFormat(desired,
+                                                   best_format);
+  }
+
+ private:
+  bool test_without_formats_;
+};
+
+class StateObserver : public ObserverInterface {
+ public:
+  explicit StateObserver(VideoSourceInterface* source)
+     : state_(source->state()),
+       source_(source) {
+  }
+  virtual void OnChanged() {
+    state_ = source_->state();
+  }
+  MediaSourceInterface::SourceState state() const { return state_; }
+
+ private:
+  MediaSourceInterface::SourceState state_;
+  rtc::scoped_refptr<VideoSourceInterface> source_;
+};
+
+class VideoSourceTest : public testing::Test {
+ protected:
+  VideoSourceTest()
+      : capturer_cleanup_(new TestVideoCapturer()),
+        capturer_(capturer_cleanup_.get()),
+        channel_manager_(new cricket::ChannelManager(
+          new cricket::FakeMediaEngine(), rtc::Thread::Current())) {
+  }
+
+  void SetUp() {
+    ASSERT_TRUE(channel_manager_->Init());
+  }
+
+  void CreateVideoSource() {
+    CreateVideoSource(NULL);
+  }
+
+  void CreateVideoSource(
+      const webrtc::MediaConstraintsInterface* constraints) {
+    // VideoSource take ownership of |capturer_|
+    source_ =
+        VideoSource::Create(channel_manager_.get(), capturer_cleanup_.release(),
+                            constraints, false);
+
+    ASSERT_TRUE(source_.get() != NULL);
+    EXPECT_EQ(capturer_, source_->GetVideoCapturer());
+
+    state_observer_.reset(new StateObserver(source_));
+    source_->RegisterObserver(state_observer_.get());
+    source_->AddSink(&renderer_);
+  }
+
+  rtc::scoped_ptr<TestVideoCapturer> capturer_cleanup_;
+  TestVideoCapturer* capturer_;
+  cricket::FakeVideoRenderer renderer_;
+  rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+  rtc::scoped_ptr<StateObserver> state_observer_;
+  rtc::scoped_refptr<VideoSource> source_;
+};
+
+
+// Test that a VideoSource transition to kLive state when the capture
+// device have started and kEnded if it is stopped.
+// It also test that an output can receive video frames.
+TEST_F(VideoSourceTest, CapturerStartStop) {
+  // Initialize without constraints.
+  CreateVideoSource();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+
+  ASSERT_TRUE(capturer_->CaptureFrame());
+  EXPECT_EQ(1, renderer_.num_rendered_frames());
+
+  capturer_->Stop();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+}
+
+// Test that a VideoSource can be stopped and restarted.
+TEST_F(VideoSourceTest, StopRestart) {
+  // Initialize without constraints.
+  CreateVideoSource();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+
+  ASSERT_TRUE(capturer_->CaptureFrame());
+  EXPECT_EQ(1, renderer_.num_rendered_frames());
+
+  source_->Stop();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+
+  source_->Restart();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+
+  ASSERT_TRUE(capturer_->CaptureFrame());
+  EXPECT_EQ(2, renderer_.num_rendered_frames());
+
+  source_->Stop();
+}
+
+// Test start stop with a remote VideoSource - the video source that has a
+// RemoteVideoCapturer and takes video frames from FrameInput.
+TEST_F(VideoSourceTest, StartStopRemote) {
+  source_ = VideoSource::Create(channel_manager_.get(),
+                                new webrtc::RemoteVideoCapturer(), NULL, true);
+
+  ASSERT_TRUE(source_.get() != NULL);
+  EXPECT_TRUE(NULL != source_->GetVideoCapturer());
+
+  state_observer_.reset(new StateObserver(source_));
+  source_->RegisterObserver(state_observer_.get());
+  source_->AddSink(&renderer_);
+
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+
+  source_->GetVideoCapturer()->Stop();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+}
+
+// Test that a VideoSource transition to kEnded if the capture device
+// fails.
+TEST_F(VideoSourceTest, CameraFailed) {
+  CreateVideoSource();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+
+  capturer_->SignalStateChange(capturer_, cricket::CS_FAILED);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+}
+
+// Test that the capture output is CIF if we set max constraints to CIF.
+// and the capture device support CIF.
+TEST_F(VideoSourceTest, MandatoryConstraintCif5Fps) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 5);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(352, format->width);
+  EXPECT_EQ(288, format->height);
+  EXPECT_EQ(30, format->framerate());
+}
+
+// Test that the capture output is 720P if the camera support it and the
+// optional constraint is set to 720P.
+TEST_F(VideoSourceTest, MandatoryMinVgaOptional720P) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
+  constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
+  constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
+  constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio,
+                          1280.0 / 720);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(1280, format->width);
+  EXPECT_EQ(720, format->height);
+  EXPECT_EQ(30, format->framerate());
+}
+
+// Test that the capture output have aspect ratio 4:3 if a mandatory constraint
+// require it even if an optional constraint request a higher resolution
+// that don't have this aspect ratio.
+TEST_F(VideoSourceTest, MandatoryAspectRatio4To3) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
+  constraints.AddMandatory(MediaConstraintsInterface::kMinHeight, 480);
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxAspectRatio,
+                           640.0 / 480);
+  constraints.AddOptional(MediaConstraintsInterface::kMinWidth, 1280);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(640, format->width);
+  EXPECT_EQ(480, format->height);
+  EXPECT_EQ(30, format->framerate());
+}
+
+
+// Test that the source state transition to kEnded if the mandatory aspect ratio
+// is set higher than supported.
+TEST_F(VideoSourceTest, MandatoryAspectRatioTooHigh) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio, 2);
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+}
+
+// Test that the source ignores an optional aspect ratio that is higher than
+// supported.
+TEST_F(VideoSourceTest, OptionalAspectRatioTooHigh) {
+  FakeConstraints constraints;
+  constraints.AddOptional(MediaConstraintsInterface::kMinAspectRatio, 2);
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  double aspect_ratio = static_cast<double>(format->width) / format->height;
+  EXPECT_LT(aspect_ratio, 2);
+}
+
+// Test that the source starts video with the default resolution if the
+// camera doesn't support capability enumeration and there are no constraints.
+TEST_F(VideoSourceTest, NoCameraCapability) {
+  capturer_->TestWithoutCameraFormats();
+
+  CreateVideoSource();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(640, format->width);
+  EXPECT_EQ(480, format->height);
+  EXPECT_EQ(30, format->framerate());
+}
+
+// Test that the source can start the video and get the requested aspect ratio
+// if the camera doesn't support capability enumeration and the aspect ratio is
+// set.
+TEST_F(VideoSourceTest, NoCameraCapability16To9Ratio) {
+  capturer_->TestWithoutCameraFormats();
+
+  FakeConstraints constraints;
+  double requested_aspect_ratio = 640.0 / 360;
+  constraints.AddMandatory(MediaConstraintsInterface::kMinWidth, 640);
+  constraints.AddMandatory(MediaConstraintsInterface::kMinAspectRatio,
+                           requested_aspect_ratio);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  double aspect_ratio = static_cast<double>(format->width) / format->height;
+  EXPECT_LE(requested_aspect_ratio, aspect_ratio);
+}
+
+// Test that the source state transitions to kEnded if an unknown mandatory
+// constraint is found.
+TEST_F(VideoSourceTest, InvalidMandatoryConstraint) {
+  FakeConstraints constraints;
+  constraints.AddMandatory("weird key", 640);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+}
+
+// Test that the source ignores an unknown optional constraint.
+TEST_F(VideoSourceTest, InvalidOptionalConstraint) {
+  FakeConstraints constraints;
+  constraints.AddOptional("weird key", 640);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+}
+
+TEST_F(VideoSourceTest, SetValidOptionValues) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kNoiseReduction, "false");
+
+  CreateVideoSource(&constraints);
+
+  EXPECT_EQ(rtc::Optional<bool>(false),
+            source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, OptionNotSet) {
+  FakeConstraints constraints;
+  CreateVideoSource(&constraints);
+  EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, MandatoryOptionOverridesOptional) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(
+      MediaConstraintsInterface::kNoiseReduction, true);
+  constraints.AddOptional(
+      MediaConstraintsInterface::kNoiseReduction, false);
+
+  CreateVideoSource(&constraints);
+
+  EXPECT_EQ(rtc::Optional<bool>(true),
+            source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, InvalidOptionKeyOptional) {
+  FakeConstraints constraints;
+  constraints.AddOptional(
+      MediaConstraintsInterface::kNoiseReduction, false);
+  constraints.AddOptional("invalidKey", false);
+
+  CreateVideoSource(&constraints);
+
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+      kMaxWaitMs);
+  EXPECT_EQ(rtc::Optional<bool>(false),
+            source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, InvalidOptionKeyMandatory) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(
+      MediaConstraintsInterface::kNoiseReduction, false);
+  constraints.AddMandatory("invalidKey", false);
+
+  CreateVideoSource(&constraints);
+
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+      kMaxWaitMs);
+  EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, InvalidOptionValueOptional) {
+  FakeConstraints constraints;
+  constraints.AddOptional(
+      MediaConstraintsInterface::kNoiseReduction, "not a boolean");
+
+  CreateVideoSource(&constraints);
+
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+      kMaxWaitMs);
+  EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, InvalidOptionValueMandatory) {
+  FakeConstraints constraints;
+  // Optional constraints should be ignored if the mandatory constraints fail.
+  constraints.AddOptional(
+      MediaConstraintsInterface::kNoiseReduction, "false");
+  // Values are case-sensitive and must be all lower-case.
+  constraints.AddMandatory(
+      MediaConstraintsInterface::kNoiseReduction, "True");
+
+  CreateVideoSource(&constraints);
+
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+      kMaxWaitMs);
+  EXPECT_EQ(rtc::Optional<bool>(), source_->options()->video_noise_reduction);
+}
+
+TEST_F(VideoSourceTest, MixedOptionsAndConstraints) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 352);
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 288);
+  constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 5);
+
+  constraints.AddMandatory(
+      MediaConstraintsInterface::kNoiseReduction, false);
+  constraints.AddOptional(
+      MediaConstraintsInterface::kNoiseReduction, true);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(352, format->width);
+  EXPECT_EQ(288, format->height);
+  EXPECT_EQ(30, format->framerate());
+
+  EXPECT_EQ(rtc::Optional<bool>(false),
+            source_->options()->video_noise_reduction);
+}
+
+// Tests that the source starts video with the default resolution for
+// screencast if no constraint is set.
+TEST_F(VideoSourceTest, ScreencastResolutionNoConstraint) {
+  capturer_->TestWithoutCameraFormats();
+  capturer_->SetScreencast(true);
+
+  CreateVideoSource();
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(640, format->width);
+  EXPECT_EQ(480, format->height);
+  EXPECT_EQ(30, format->framerate());
+}
+
+// Tests that the source starts video with the max width and height set by
+// constraints for screencast.
+TEST_F(VideoSourceTest, ScreencastResolutionWithConstraint) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxWidth, 480);
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxHeight, 270);
+
+  capturer_->TestWithoutCameraFormats();
+  capturer_->SetScreencast(true);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(480, format->width);
+  EXPECT_EQ(270, format->height);
+  EXPECT_EQ(30, format->framerate());
+}
+
+TEST_F(VideoSourceTest, MandatorySubOneFpsConstraints) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(MediaConstraintsInterface::kMaxFrameRate, 0.5);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kEnded, state_observer_->state(),
+                 kMaxWaitMs);
+  ASSERT_TRUE(capturer_->GetCaptureFormat() == NULL);
+}
+
+TEST_F(VideoSourceTest, OptionalSubOneFpsConstraints) {
+  FakeConstraints constraints;
+  constraints.AddOptional(MediaConstraintsInterface::kMaxFrameRate, 0.5);
+
+  CreateVideoSource(&constraints);
+  EXPECT_EQ_WAIT(MediaSourceInterface::kLive, state_observer_->state(),
+                 kMaxWaitMs);
+  const cricket::VideoFormat* format = capturer_->GetCaptureFormat();
+  ASSERT_TRUE(format != NULL);
+  EXPECT_EQ(30, format->framerate());
+}
diff --git a/webrtc/api/videosourceinterface.h b/webrtc/api/videosourceinterface.h
new file mode 100644
index 0000000..5491576
--- /dev/null
+++ b/webrtc/api/videosourceinterface.h
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_VIDEOSOURCEINTERFACE_H_
+#define WEBRTC_API_VIDEOSOURCEINTERFACE_H_
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/media/base/mediachannel.h"
+#include "webrtc/media/base/videorenderer.h"
+
+namespace webrtc {
+
+// VideoSourceInterface is a reference counted source used for VideoTracks.
+// The same source can be used in multiple VideoTracks.
+// The methods are only supposed to be called by the PeerConnection
+// implementation.
+class VideoSourceInterface : public MediaSourceInterface {
+ public:
+  // Get access to the source implementation of cricket::VideoCapturer.
+  // This can be used for receiving frames and state notifications.
+  // But it should not be used for starting or stopping capturing.
+  virtual cricket::VideoCapturer* GetVideoCapturer() = 0;
+
+  // Stop the video capturer.
+  virtual void Stop() = 0;
+  virtual void Restart() = 0;
+
+  // Adds |output| to the source to receive frames.
+  virtual void AddSink(
+      rtc::VideoSinkInterface<cricket::VideoFrame>* output) = 0;
+  virtual void RemoveSink(
+      rtc::VideoSinkInterface<cricket::VideoFrame>* output) = 0;
+  virtual const cricket::VideoOptions* options() const = 0;
+  // TODO(nisse): Dummy implementation. Delete as soon as chrome's
+  // MockVideoSource is updated.
+  virtual cricket::VideoRenderer* FrameInput() { return nullptr; }
+
+ protected:
+  virtual ~VideoSourceInterface() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_VIDEOSOURCEINTERFACE_H_
diff --git a/webrtc/api/videosourceproxy.h b/webrtc/api/videosourceproxy.h
new file mode 100644
index 0000000..99a3b1e
--- /dev/null
+++ b/webrtc/api/videosourceproxy.h
@@ -0,0 +1,54 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_VIDEOSOURCEPROXY_H_
+#define WEBRTC_API_VIDEOSOURCEPROXY_H_
+
+#include "webrtc/api/proxy.h"
+#include "webrtc/api/videosourceinterface.h"
+
+namespace webrtc {
+
+// VideoSourceProxy makes sure the real VideoSourceInterface implementation is
+// destroyed on the signaling thread and marshals all method calls to the
+// signaling thread.
+BEGIN_PROXY_MAP(VideoSource)
+  PROXY_CONSTMETHOD0(SourceState, state)
+  PROXY_CONSTMETHOD0(bool, remote)
+  PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer)
+  PROXY_METHOD0(void, Stop)
+  PROXY_METHOD0(void, Restart)
+  PROXY_METHOD1(void, AddSink, rtc::VideoSinkInterface<cricket::VideoFrame>*)
+  PROXY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<cricket::VideoFrame>*)
+  PROXY_CONSTMETHOD0(const cricket::VideoOptions*, options)
+  PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+  PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+END_PROXY()
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_VIDEOSOURCEPROXY_H_
diff --git a/webrtc/api/videotrack.cc b/webrtc/api/videotrack.cc
new file mode 100644
index 0000000..4c87c39
--- /dev/null
+++ b/webrtc/api/videotrack.cc
@@ -0,0 +1,77 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/videotrack.h"
+
+#include <string>
+
+namespace webrtc {
+
+const char MediaStreamTrackInterface::kVideoKind[] = "video";
+
+VideoTrack::VideoTrack(const std::string& label,
+                       VideoSourceInterface* video_source)
+    : MediaStreamTrack<VideoTrackInterface>(label),
+      video_source_(video_source) {
+  if (video_source_)
+    video_source_->AddSink(&renderers_);
+}
+
+VideoTrack::~VideoTrack() {
+  if (video_source_)
+    video_source_->RemoveSink(&renderers_);
+}
+
+std::string VideoTrack::kind() const {
+  return kVideoKind;
+}
+
+void VideoTrack::AddRenderer(VideoRendererInterface* renderer) {
+  renderers_.AddRenderer(renderer);
+}
+
+void VideoTrack::RemoveRenderer(VideoRendererInterface* renderer) {
+  renderers_.RemoveRenderer(renderer);
+}
+
+rtc::VideoSinkInterface<cricket::VideoFrame>* VideoTrack::GetSink() {
+  return &renderers_;
+}
+
+bool VideoTrack::set_enabled(bool enable) {
+  renderers_.SetEnabled(enable);
+  return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);
+}
+
+rtc::scoped_refptr<VideoTrack> VideoTrack::Create(
+    const std::string& id, VideoSourceInterface* source) {
+  rtc::RefCountedObject<VideoTrack>* track =
+      new rtc::RefCountedObject<VideoTrack>(id, source);
+  return track;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/videotrack.h b/webrtc/api/videotrack.h
new file mode 100644
index 0000000..399e513
--- /dev/null
+++ b/webrtc/api/videotrack.h
@@ -0,0 +1,65 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_VIDEOTRACK_H_
+#define WEBRTC_API_VIDEOTRACK_H_
+
+#include <string>
+
+#include "webrtc/api/mediastreamtrack.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/api/videotrackrenderers.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+namespace webrtc {
+
+class VideoTrack : public MediaStreamTrack<VideoTrackInterface> {
+ public:
+  static rtc::scoped_refptr<VideoTrack> Create(
+      const std::string& label, VideoSourceInterface* source);
+
+  virtual void AddRenderer(VideoRendererInterface* renderer);
+  virtual void RemoveRenderer(VideoRendererInterface* renderer);
+  virtual VideoSourceInterface* GetSource() const {
+    return video_source_.get();
+  }
+  rtc::VideoSinkInterface<cricket::VideoFrame>* GetSink() override;
+  virtual bool set_enabled(bool enable);
+  virtual std::string kind() const;
+
+ protected:
+  VideoTrack(const std::string& id, VideoSourceInterface* video_source);
+  ~VideoTrack();
+
+ private:
+  VideoTrackRenderers renderers_;
+  rtc::scoped_refptr<VideoSourceInterface> video_source_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_VIDEOTRACK_H_
diff --git a/webrtc/api/videotrack_unittest.cc b/webrtc/api/videotrack_unittest.cc
new file mode 100644
index 0000000..717cba6
--- /dev/null
+++ b/webrtc/api/videotrack_unittest.cc
@@ -0,0 +1,146 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/session/media/channelmanager.h"
+#include "webrtc/api/remotevideocapturer.h"
+#include "webrtc/api/test/fakevideotrackrenderer.h"
+#include "webrtc/api/videosource.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/media/base/fakemediaengine.h"
+#include "webrtc/media/webrtc/webrtcvideoframe.h"
+
+using webrtc::FakeVideoTrackRenderer;
+using webrtc::VideoSource;
+using webrtc::VideoTrack;
+using webrtc::VideoTrackInterface;
+
+namespace {
+
+class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
+ public:
+  using cricket::WebRtcVideoFrame::SetRotation;
+};
+
+}  // namespace
+
+class VideoTrackTest : public testing::Test {
+ public:
+  VideoTrackTest() {
+    static const char kVideoTrackId[] = "track_id";
+
+    channel_manager_.reset(new cricket::ChannelManager(
+        new cricket::FakeMediaEngine(), rtc::Thread::Current()));
+    EXPECT_TRUE(channel_manager_->Init());
+    video_track_ = VideoTrack::Create(
+        kVideoTrackId,
+        VideoSource::Create(channel_manager_.get(),
+                            new webrtc::RemoteVideoCapturer(), NULL, true));
+  }
+
+ protected:
+  rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+  rtc::scoped_refptr<VideoTrackInterface> video_track_;
+};
+
+// Test adding renderers to a video track and render to them by providing
+// frames to the source.
+TEST_F(VideoTrackTest, RenderVideo) {
+  // FakeVideoTrackRenderer register itself to |video_track_|
+  rtc::scoped_ptr<FakeVideoTrackRenderer> renderer_1(
+      new FakeVideoTrackRenderer(video_track_.get()));
+
+  rtc::VideoSinkInterface<cricket::VideoFrame>* renderer_input =
+      video_track_->GetSink();
+  ASSERT_FALSE(renderer_input == NULL);
+
+  cricket::WebRtcVideoFrame frame;
+  frame.InitToBlack(123, 123, 0);
+  renderer_input->OnFrame(frame);
+  EXPECT_EQ(1, renderer_1->num_rendered_frames());
+
+  EXPECT_EQ(123, renderer_1->width());
+  EXPECT_EQ(123, renderer_1->height());
+
+  // FakeVideoTrackRenderer register itself to |video_track_|
+  rtc::scoped_ptr<FakeVideoTrackRenderer> renderer_2(
+      new FakeVideoTrackRenderer(video_track_.get()));
+
+  renderer_input->OnFrame(frame);
+
+  EXPECT_EQ(123, renderer_1->width());
+  EXPECT_EQ(123, renderer_1->height());
+  EXPECT_EQ(123, renderer_2->width());
+  EXPECT_EQ(123, renderer_2->height());
+
+  EXPECT_EQ(2, renderer_1->num_rendered_frames());
+  EXPECT_EQ(1, renderer_2->num_rendered_frames());
+
+  video_track_->RemoveRenderer(renderer_1.get());
+  renderer_input->OnFrame(frame);
+
+  EXPECT_EQ(2, renderer_1->num_rendered_frames());
+  EXPECT_EQ(2, renderer_2->num_rendered_frames());
+}
+
+// Test that disabling the track results in blacked out frames.
+TEST_F(VideoTrackTest, DisableTrackBlackout) {
+  rtc::scoped_ptr<FakeVideoTrackRenderer> renderer(
+      new FakeVideoTrackRenderer(video_track_.get()));
+
+  rtc::VideoSinkInterface<cricket::VideoFrame>* renderer_input =
+      video_track_->GetSink();
+  ASSERT_FALSE(renderer_input == NULL);
+
+  cricket::WebRtcVideoFrame frame;
+  frame.InitToBlack(100, 200, 0);
+  // Make it not all-black
+  frame.GetUPlane()[0] = 0;
+
+  renderer_input->OnFrame(frame);
+  EXPECT_EQ(1, renderer->num_rendered_frames());
+  EXPECT_FALSE(renderer->black_frame());
+  EXPECT_EQ(100, renderer->width());
+  EXPECT_EQ(200, renderer->height());
+
+  video_track_->set_enabled(false);
+  renderer_input->OnFrame(frame);
+  EXPECT_EQ(2, renderer->num_rendered_frames());
+  EXPECT_TRUE(renderer->black_frame());
+  EXPECT_EQ(100, renderer->width());
+  EXPECT_EQ(200, renderer->height());
+
+  video_track_->set_enabled(true);
+  renderer_input->OnFrame(frame);
+  EXPECT_EQ(3, renderer->num_rendered_frames());
+  EXPECT_FALSE(renderer->black_frame());
+  EXPECT_EQ(100, renderer->width());
+  EXPECT_EQ(200, renderer->height());
+}
diff --git a/webrtc/api/videotrackrenderers.cc b/webrtc/api/videotrackrenderers.cc
new file mode 100644
index 0000000..83615d4
--- /dev/null
+++ b/webrtc/api/videotrackrenderers.cc
@@ -0,0 +1,98 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/videotrackrenderers.h"
+#include "webrtc/media/webrtc/webrtcvideoframe.h"
+
+namespace webrtc {
+
+VideoTrackRenderers::VideoTrackRenderers() : enabled_(true) {
+}
+
+VideoTrackRenderers::~VideoTrackRenderers() {
+}
+
+void VideoTrackRenderers::AddRenderer(VideoRendererInterface* renderer) {
+  if (!renderer) {
+    return;
+  }
+  rtc::CritScope cs(&critical_section_);
+  renderers_.insert(renderer);
+}
+
+void VideoTrackRenderers::RemoveRenderer(VideoRendererInterface* renderer) {
+  rtc::CritScope cs(&critical_section_);
+  renderers_.erase(renderer);
+}
+
+void VideoTrackRenderers::SetEnabled(bool enable) {
+  rtc::CritScope cs(&critical_section_);
+  enabled_ = enable;
+}
+
+bool VideoTrackRenderers::RenderFrame(const cricket::VideoFrame* frame) {
+  {
+    rtc::CritScope cs(&critical_section_);
+    if (enabled_) {
+      RenderFrameToRenderers(frame);
+      return true;
+    }
+  }
+
+  // Generate the black frame outside of the critical section. Note
+  // that this may result in unexpected frame order, in the unlikely
+  // case that RenderFrame is called from multiple threads without
+  // proper serialization, and the track is switched from disabled to
+  // enabled in the middle of the first call.
+  cricket::WebRtcVideoFrame black(new rtc::RefCountedObject<I420Buffer>(
+                                      static_cast<int>(frame->GetWidth()),
+                                      static_cast<int>(frame->GetHeight())),
+                                  frame->GetTimeStamp(),
+                                  frame->GetVideoRotation());
+  black.SetToBlack();
+
+  {
+    rtc::CritScope cs(&critical_section_);
+    // Check enabled_ flag again, since the track might have been
+    // enabled while we generated the black frame. I think the
+    // enabled-ness ought to be applied at the track output, and hence
+    // an enabled track shouldn't send any blacked out frames.
+    RenderFrameToRenderers(enabled_ ? frame : &black);
+
+    return true;
+  }
+}
+
+// Called with critical_section_ already locked
+void VideoTrackRenderers::RenderFrameToRenderers(
+    const cricket::VideoFrame* frame) {
+  for (VideoRendererInterface* renderer : renderers_) {
+    renderer->RenderFrame(frame);
+  }
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/videotrackrenderers.h b/webrtc/api/videotrackrenderers.h
new file mode 100644
index 0000000..1ce5afa
--- /dev/null
+++ b/webrtc/api/videotrackrenderers.h
@@ -0,0 +1,71 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_VIDEOTRACKRENDERERS_H_
+#define WEBRTC_API_VIDEOTRACKRENDERERS_H_
+
+#include <set>
+
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/media/base/videorenderer.h"
+
+namespace webrtc {
+
+// Class used for rendering cricket::VideoFrames to multiple renderers of type
+// VideoRendererInterface.
+// Each VideoTrack owns a VideoTrackRenderers instance.
+// The class is thread safe. Rendering to the added VideoRendererInterfaces is
+// done on the same thread as the cricket::VideoRenderer.
+class VideoTrackRenderers : public cricket::VideoRenderer {
+ public:
+  VideoTrackRenderers();
+  ~VideoTrackRenderers();
+
+  // Implements cricket::VideoRenderer. If the track is disabled,
+  // incoming frames are replaced by black frames.
+  virtual bool RenderFrame(const cricket::VideoFrame* frame);
+
+  void AddRenderer(VideoRendererInterface* renderer);
+  void RemoveRenderer(VideoRendererInterface* renderer);
+  void SetEnabled(bool enable);
+
+ private:
+  // Pass the frame on to to each registered renderer. Requires
+  // critical_section_ already locked.
+  void RenderFrameToRenderers(const cricket::VideoFrame* frame);
+
+  bool enabled_;
+  std::set<VideoRendererInterface*> renderers_;
+
+  rtc::CriticalSection critical_section_;  // Protects the above variables
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_VIDEOTRACKRENDERERS_H_
diff --git a/webrtc/api/webrtcsdp.cc b/webrtc/api/webrtcsdp.cc
new file mode 100644
index 0000000..1f06b69
--- /dev/null
+++ b/webrtc/api/webrtcsdp.cc
@@ -0,0 +1,3083 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/webrtcsdp.h"
+
+#include <ctype.h>
+#include <limits.h>
+#include <stdio.h>
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/jsepicecandidate.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "webrtc/base/arraysize.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/media/base/codec.h"
+#include "webrtc/media/base/constants.h"
+#include "webrtc/media/base/cryptoparams.h"
+#include "webrtc/media/base/rtputils.h"
+#include "webrtc/media/sctp/sctpdataengine.h"
+#include "webrtc/p2p/base/candidate.h"
+#include "webrtc/p2p/base/constants.h"
+#include "webrtc/p2p/base/port.h"
+
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::Candidates;
+using cricket::ContentDescription;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::DataContentDescription;
+using cricket::ICE_CANDIDATE_COMPONENT_RTP;
+using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+using cricket::kCodecParamMaxBitrate;
+using cricket::kCodecParamMaxPTime;
+using cricket::kCodecParamMaxQuantization;
+using cricket::kCodecParamMinBitrate;
+using cricket::kCodecParamMinPTime;
+using cricket::kCodecParamPTime;
+using cricket::kCodecParamSPropStereo;
+using cricket::kCodecParamStartBitrate;
+using cricket::kCodecParamStereo;
+using cricket::kCodecParamUseInbandFec;
+using cricket::kCodecParamUseDtx;
+using cricket::kCodecParamSctpProtocol;
+using cricket::kCodecParamSctpStreams;
+using cricket::kCodecParamMaxAverageBitrate;
+using cricket::kCodecParamMaxPlaybackRate;
+using cricket::kCodecParamAssociatedPayloadType;
+using cricket::MediaContentDescription;
+using cricket::MediaType;
+using cricket::RtpHeaderExtension;
+using cricket::SsrcGroup;
+using cricket::StreamParams;
+using cricket::StreamParamsVec;
+using cricket::TransportDescription;
+using cricket::TransportInfo;
+using cricket::VideoContentDescription;
+using rtc::SocketAddress;
+
+typedef std::vector<RtpHeaderExtension> RtpHeaderExtensions;
+
+namespace cricket {
+class SessionDescription;
+}
+
+namespace webrtc {
+
+// Line type
+// RFC 4566
+// An SDP session description consists of a number of lines of text of
+// the form:
+// <type>=<value>
+// where <type> MUST be exactly one case-significant character.
+static const int kLinePrefixLength = 2;  // Lenght of <type>=
+static const char kLineTypeVersion = 'v';
+static const char kLineTypeOrigin = 'o';
+static const char kLineTypeSessionName = 's';
+static const char kLineTypeSessionInfo = 'i';
+static const char kLineTypeSessionUri = 'u';
+static const char kLineTypeSessionEmail = 'e';
+static const char kLineTypeSessionPhone = 'p';
+static const char kLineTypeSessionBandwidth = 'b';
+static const char kLineTypeTiming = 't';
+static const char kLineTypeRepeatTimes = 'r';
+static const char kLineTypeTimeZone = 'z';
+static const char kLineTypeEncryptionKey = 'k';
+static const char kLineTypeMedia = 'm';
+static const char kLineTypeConnection = 'c';
+static const char kLineTypeAttributes = 'a';
+
+// Attributes
+static const char kAttributeGroup[] = "group";
+static const char kAttributeMid[] = "mid";
+static const char kAttributeRtcpMux[] = "rtcp-mux";
+static const char kAttributeRtcpReducedSize[] = "rtcp-rsize";
+static const char kAttributeSsrc[] = "ssrc";
+static const char kSsrcAttributeCname[] = "cname";
+static const char kAttributeExtmap[] = "extmap";
+// draft-alvestrand-mmusic-msid-01
+// a=msid-semantic: WMS
+static const char kAttributeMsidSemantics[] = "msid-semantic";
+static const char kMediaStreamSemantic[] = "WMS";
+static const char kSsrcAttributeMsid[] = "msid";
+static const char kDefaultMsid[] = "default";
+static const char kSsrcAttributeMslabel[] = "mslabel";
+static const char kSSrcAttributeLabel[] = "label";
+static const char kAttributeSsrcGroup[] = "ssrc-group";
+static const char kAttributeCrypto[] = "crypto";
+static const char kAttributeCandidate[] = "candidate";
+static const char kAttributeCandidateTyp[] = "typ";
+static const char kAttributeCandidateRaddr[] = "raddr";
+static const char kAttributeCandidateRport[] = "rport";
+static const char kAttributeCandidateUfrag[] = "ufrag";
+static const char kAttributeCandidatePwd[] = "pwd";
+static const char kAttributeCandidateGeneration[] = "generation";
+static const char kAttributeFingerprint[] = "fingerprint";
+static const char kAttributeSetup[] = "setup";
+static const char kAttributeFmtp[] = "fmtp";
+static const char kAttributeRtpmap[] = "rtpmap";
+static const char kAttributeSctpmap[] = "sctpmap";
+static const char kAttributeRtcp[] = "rtcp";
+static const char kAttributeIceUfrag[] = "ice-ufrag";
+static const char kAttributeIcePwd[] = "ice-pwd";
+static const char kAttributeIceLite[] = "ice-lite";
+static const char kAttributeIceOption[] = "ice-options";
+static const char kAttributeSendOnly[] = "sendonly";
+static const char kAttributeRecvOnly[] = "recvonly";
+static const char kAttributeRtcpFb[] = "rtcp-fb";
+static const char kAttributeSendRecv[] = "sendrecv";
+static const char kAttributeInactive[] = "inactive";
+// draft-ietf-mmusic-sctp-sdp-07
+// a=sctp-port
+static const char kAttributeSctpPort[] = "sctp-port";
+
+// Experimental flags
+static const char kAttributeXGoogleFlag[] = "x-google-flag";
+static const char kValueConference[] = "conference";
+
+// Candidate
+static const char kCandidateHost[] = "host";
+static const char kCandidateSrflx[] = "srflx";
+// TODO: How to map the prflx with circket candidate type
+// static const char kCandidatePrflx[] = "prflx";
+static const char kCandidateRelay[] = "relay";
+static const char kTcpCandidateType[] = "tcptype";
+
+static const char kSdpDelimiterEqual = '=';
+static const char kSdpDelimiterSpace = ' ';
+static const char kSdpDelimiterColon = ':';
+static const char kSdpDelimiterSemicolon = ';';
+static const char kSdpDelimiterSlash = '/';
+static const char kNewLine = '\n';
+static const char kReturn = '\r';
+static const char kLineBreak[] = "\r\n";
+
+// TODO: Generate the Session and Time description
+// instead of hardcoding.
+static const char kSessionVersion[] = "v=0";
+// RFC 4566
+static const char kSessionOriginUsername[] = "-";
+static const char kSessionOriginSessionId[] = "0";
+static const char kSessionOriginSessionVersion[] = "0";
+static const char kSessionOriginNettype[] = "IN";
+static const char kSessionOriginAddrtype[] = "IP4";
+static const char kSessionOriginAddress[] = "127.0.0.1";
+static const char kSessionName[] = "s=-";
+static const char kTimeDescription[] = "t=0 0";
+static const char kAttrGroup[] = "a=group:BUNDLE";
+static const char kConnectionNettype[] = "IN";
+static const char kConnectionIpv4Addrtype[] = "IP4";
+static const char kConnectionIpv6Addrtype[] = "IP6";
+static const char kMediaTypeVideo[] = "video";
+static const char kMediaTypeAudio[] = "audio";
+static const char kMediaTypeData[] = "application";
+static const char kMediaPortRejected[] = "0";
+// draft-ietf-mmusic-trickle-ice-01
+// When no candidates have been gathered, set the connection
+// address to IP6 ::.
+// TODO(perkj): FF can not parse IP6 ::. See http://crbug/430333
+// Use IPV4 per default.
+static const char kDummyAddress[] = "0.0.0.0";
+static const char kDummyPort[] = "9";
+// RFC 3556
+static const char kApplicationSpecificMaximum[] = "AS";
+
+static const int kDefaultVideoClockrate = 90000;
+
+// ISAC special-case.
+static const char kIsacCodecName[] = "ISAC";  // From webrtcvoiceengine.cc
+static const int kIsacWbDefaultRate = 32000;  // From acm_common_defs.h
+static const int kIsacSwbDefaultRate = 56000;  // From acm_common_defs.h
+
+static const char kDefaultSctpmapProtocol[] = "webrtc-datachannel";
+
+// RTP payload type is in the 0-127 range. Use -1 to indicate "all" payload
+// types.
+const int kWildcardPayloadType = -1;
+
+struct SsrcInfo {
+  SsrcInfo()
+      : msid_identifier(kDefaultMsid),
+        // TODO(ronghuawu): What should we do if the appdata doesn't appear?
+        // Create random string (which will be used as track label later)?
+        msid_appdata(rtc::CreateRandomString(8)) {
+  }
+  uint32_t ssrc_id;
+  std::string cname;
+  std::string msid_identifier;
+  std::string msid_appdata;
+
+  // For backward compatibility.
+  // TODO(ronghuawu): Remove below 2 fields once all the clients support msid.
+  std::string label;
+  std::string mslabel;
+};
+typedef std::vector<SsrcInfo> SsrcInfoVec;
+typedef std::vector<SsrcGroup> SsrcGroupVec;
+
+template <class T>
+static void AddFmtpLine(const T& codec, std::string* message);
+static void BuildMediaDescription(const ContentInfo* content_info,
+                                  const TransportInfo* transport_info,
+                                  const MediaType media_type,
+                                  const std::vector<Candidate>& candidates,
+                                  std::string* message);
+static void BuildSctpContentAttributes(std::string* message, int sctp_port);
+static void BuildRtpContentAttributes(
+    const MediaContentDescription* media_desc,
+    const MediaType media_type,
+    std::string* message);
+static void BuildRtpMap(const MediaContentDescription* media_desc,
+                        const MediaType media_type,
+                        std::string* message);
+static void BuildCandidate(const std::vector<Candidate>& candidates,
+                           bool include_ufrag,
+                           std::string* message);
+static void BuildIceOptions(const std::vector<std::string>& transport_options,
+                            std::string* message);
+static bool IsRtp(const std::string& protocol);
+static bool IsDtlsSctp(const std::string& protocol);
+static bool ParseSessionDescription(const std::string& message, size_t* pos,
+                                    std::string* session_id,
+                                    std::string* session_version,
+                                    TransportDescription* session_td,
+                                    RtpHeaderExtensions* session_extmaps,
+                                    cricket::SessionDescription* desc,
+                                    SdpParseError* error);
+static bool ParseGroupAttribute(const std::string& line,
+                                cricket::SessionDescription* desc,
+                                SdpParseError* error);
+static bool ParseMediaDescription(
+    const std::string& message,
+    const TransportDescription& session_td,
+    const RtpHeaderExtensions& session_extmaps,
+    size_t* pos, cricket::SessionDescription* desc,
+    std::vector<JsepIceCandidate*>* candidates,
+    SdpParseError* error);
+static bool ParseContent(const std::string& message,
+                         const MediaType media_type,
+                         int mline_index,
+                         const std::string& protocol,
+                         const std::vector<int>& codec_preference,
+                         size_t* pos,
+                         std::string* content_name,
+                         MediaContentDescription* media_desc,
+                         TransportDescription* transport,
+                         std::vector<JsepIceCandidate*>* candidates,
+                         SdpParseError* error);
+static bool ParseSsrcAttribute(const std::string& line,
+                               SsrcInfoVec* ssrc_infos,
+                               SdpParseError* error);
+static bool ParseSsrcGroupAttribute(const std::string& line,
+                                    SsrcGroupVec* ssrc_groups,
+                                    SdpParseError* error);
+static bool ParseCryptoAttribute(const std::string& line,
+                                 MediaContentDescription* media_desc,
+                                 SdpParseError* error);
+static bool ParseRtpmapAttribute(const std::string& line,
+                                 const MediaType media_type,
+                                 const std::vector<int>& codec_preference,
+                                 MediaContentDescription* media_desc,
+                                 SdpParseError* error);
+static bool ParseFmtpAttributes(const std::string& line,
+                                const MediaType media_type,
+                                MediaContentDescription* media_desc,
+                                SdpParseError* error);
+static bool ParseFmtpParam(const std::string& line, std::string* parameter,
+                           std::string* value, SdpParseError* error);
+static bool ParseCandidate(const std::string& message, Candidate* candidate,
+                           SdpParseError* error, bool is_raw);
+static bool ParseRtcpFbAttribute(const std::string& line,
+                                 const MediaType media_type,
+                                 MediaContentDescription* media_desc,
+                                 SdpParseError* error);
+static bool ParseIceOptions(const std::string& line,
+                            std::vector<std::string>* transport_options,
+                            SdpParseError* error);
+static bool ParseExtmap(const std::string& line,
+                        RtpHeaderExtension* extmap,
+                        SdpParseError* error);
+static bool ParseFingerprintAttribute(const std::string& line,
+                                      rtc::SSLFingerprint** fingerprint,
+                                      SdpParseError* error);
+static bool ParseDtlsSetup(const std::string& line,
+                           cricket::ConnectionRole* role,
+                           SdpParseError* error);
+
+// Helper functions
+
+// Below ParseFailed*** functions output the line that caused the parsing
+// failure and the detailed reason (|description|) of the failure to |error|.
+// The functions always return false so that they can be used directly in the
+// following way when error happens:
+// "return ParseFailed***(...);"
+
+// The line starting at |line_start| of |message| is the failing line.
+// The reason for the failure should be provided in the |description|.
+// An example of a description could be "unknown character".
+static bool ParseFailed(const std::string& message,
+                        size_t line_start,
+                        const std::string& description,
+                        SdpParseError* error) {
+  // Get the first line of |message| from |line_start|.
+  std::string first_line;
+  size_t line_end = message.find(kNewLine, line_start);
+  if (line_end != std::string::npos) {
+    if (line_end > 0 && (message.at(line_end - 1) == kReturn)) {
+      --line_end;
+    }
+    first_line = message.substr(line_start, (line_end - line_start));
+  } else {
+    first_line = message.substr(line_start);
+  }
+
+  if (error) {
+    error->line = first_line;
+    error->description = description;
+  }
+  LOG(LS_ERROR) << "Failed to parse: \"" << first_line
+                << "\". Reason: " << description;
+  return false;
+}
+
+// |line| is the failing line. The reason for the failure should be
+// provided in the |description|.
+static bool ParseFailed(const std::string& line,
+                        const std::string& description,
+                        SdpParseError* error) {
+  return ParseFailed(line, 0, description, error);
+}
+
+// Parses failure where the failing SDP line isn't know or there are multiple
+// failing lines.
+static bool ParseFailed(const std::string& description,
+                        SdpParseError* error) {
+  return ParseFailed("", description, error);
+}
+
+// |line| is the failing line. The failure is due to the fact that |line|
+// doesn't have |expected_fields| fields.
+static bool ParseFailedExpectFieldNum(const std::string& line,
+                                      int expected_fields,
+                                      SdpParseError* error) {
+  std::ostringstream description;
+  description << "Expects " << expected_fields << " fields.";
+  return ParseFailed(line, description.str(), error);
+}
+
+// |line| is the failing line. The failure is due to the fact that |line| has
+// less than |expected_min_fields| fields.
+static bool ParseFailedExpectMinFieldNum(const std::string& line,
+                                         int expected_min_fields,
+                                         SdpParseError* error) {
+  std::ostringstream description;
+  description << "Expects at least " << expected_min_fields << " fields.";
+  return ParseFailed(line, description.str(), error);
+}
+
+// |line| is the failing line. The failure is due to the fact that it failed to
+// get the value of |attribute|.
+static bool ParseFailedGetValue(const std::string& line,
+                                const std::string& attribute,
+                                SdpParseError* error) {
+  std::ostringstream description;
+  description << "Failed to get the value of attribute: " << attribute;
+  return ParseFailed(line, description.str(), error);
+}
+
+// The line starting at |line_start| of |message| is the failing line. The
+// failure is due to the line type (e.g. the "m" part of the "m-line")
+// not matching what is expected. The expected line type should be
+// provided as |line_type|.
+static bool ParseFailedExpectLine(const std::string& message,
+                                  size_t line_start,
+                                  const char line_type,
+                                  const std::string& line_value,
+                                  SdpParseError* error) {
+  std::ostringstream description;
+  description << "Expect line: " << line_type << "=" << line_value;
+  return ParseFailed(message, line_start, description.str(), error);
+}
+
+static bool AddLine(const std::string& line, std::string* message) {
+  if (!message)
+    return false;
+
+  message->append(line);
+  message->append(kLineBreak);
+  return true;
+}
+
+static bool GetLine(const std::string& message,
+                    size_t* pos,
+                    std::string* line) {
+  size_t line_begin = *pos;
+  size_t line_end = message.find(kNewLine, line_begin);
+  if (line_end == std::string::npos) {
+    return false;
+  }
+  // Update the new start position
+  *pos = line_end + 1;
+  if (line_end > 0 && (message.at(line_end - 1) == kReturn)) {
+    --line_end;
+  }
+  *line = message.substr(line_begin, (line_end - line_begin));
+  const char* cline = line->c_str();
+  // RFC 4566
+  // An SDP session description consists of a number of lines of text of
+  // the form:
+  // <type>=<value>
+  // where <type> MUST be exactly one case-significant character and
+  // <value> is structured text whose format depends on <type>.
+  // Whitespace MUST NOT be used on either side of the "=" sign.
+  if (line->length() < 3 ||
+      !islower(cline[0]) ||
+      cline[1] != kSdpDelimiterEqual ||
+      cline[2] == kSdpDelimiterSpace) {
+    *pos = line_begin;
+    return false;
+  }
+  return true;
+}
+
+// Init |os| to "|type|=|value|".
+static void InitLine(const char type,
+                     const std::string& value,
+                     std::ostringstream* os) {
+  os->str("");
+  *os << type << kSdpDelimiterEqual << value;
+}
+
+// Init |os| to "a=|attribute|".
+static void InitAttrLine(const std::string& attribute, std::ostringstream* os) {
+  InitLine(kLineTypeAttributes, attribute, os);
+}
+
+// Writes a SDP attribute line based on |attribute| and |value| to |message|.
+static void AddAttributeLine(const std::string& attribute, int value,
+                             std::string* message) {
+  std::ostringstream os;
+  InitAttrLine(attribute, &os);
+  os << kSdpDelimiterColon << value;
+  AddLine(os.str(), message);
+}
+
+static bool IsLineType(const std::string& message,
+                       const char type,
+                       size_t line_start) {
+  if (message.size() < line_start + kLinePrefixLength) {
+    return false;
+  }
+  const char* cmessage = message.c_str();
+  return (cmessage[line_start] == type &&
+          cmessage[line_start + 1] == kSdpDelimiterEqual);
+}
+
+static bool IsLineType(const std::string& line,
+                       const char type) {
+  return IsLineType(line, type, 0);
+}
+
+static bool GetLineWithType(const std::string& message, size_t* pos,
+                            std::string* line, const char type) {
+  if (!IsLineType(message, type, *pos)) {
+    return false;
+  }
+
+  if (!GetLine(message, pos, line))
+    return false;
+
+  return true;
+}
+
+static bool HasAttribute(const std::string& line,
+                         const std::string& attribute) {
+  return (line.compare(kLinePrefixLength, attribute.size(), attribute) == 0);
+}
+
+static bool AddSsrcLine(uint32_t ssrc_id,
+                        const std::string& attribute,
+                        const std::string& value,
+                        std::string* message) {
+  // RFC 5576
+  // a=ssrc:<ssrc-id> <attribute>:<value>
+  std::ostringstream os;
+  InitAttrLine(kAttributeSsrc, &os);
+  os << kSdpDelimiterColon << ssrc_id << kSdpDelimiterSpace
+     << attribute << kSdpDelimiterColon << value;
+  return AddLine(os.str(), message);
+}
+
+// Get value only from <attribute>:<value>.
+static bool GetValue(const std::string& message, const std::string& attribute,
+                     std::string* value, SdpParseError* error) {
+  std::string leftpart;
+  if (!rtc::tokenize_first(message, kSdpDelimiterColon, &leftpart, value)) {
+    return ParseFailedGetValue(message, attribute, error);
+  }
+  // The left part should end with the expected attribute.
+  if (leftpart.length() < attribute.length() ||
+      leftpart.compare(leftpart.length() - attribute.length(),
+                       attribute.length(), attribute) != 0) {
+    return ParseFailedGetValue(message, attribute, error);
+  }
+  return true;
+}
+
+static bool CaseInsensitiveFind(std::string str1, std::string str2) {
+  std::transform(str1.begin(), str1.end(), str1.begin(),
+                 ::tolower);
+  std::transform(str2.begin(), str2.end(), str2.begin(),
+                 ::tolower);
+  return str1.find(str2) != std::string::npos;
+}
+
+template <class T>
+static bool GetValueFromString(const std::string& line,
+                               const std::string& s,
+                               T* t,
+                               SdpParseError* error) {
+  if (!rtc::FromString(s, t)) {
+    std::ostringstream description;
+    description << "Invalid value: " << s << ".";
+    return ParseFailed(line, description.str(), error);
+  }
+  return true;
+}
+
+static bool GetPayloadTypeFromString(const std::string& line,
+                                     const std::string& s,
+                                     int* payload_type,
+                                     SdpParseError* error) {
+  return GetValueFromString(line, s, payload_type, error) &&
+      cricket::IsValidRtpPayloadType(*payload_type);
+}
+
+void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos,
+                               StreamParamsVec* tracks) {
+  ASSERT(tracks != NULL);
+  for (SsrcInfoVec::const_iterator ssrc_info = ssrc_infos.begin();
+       ssrc_info != ssrc_infos.end(); ++ssrc_info) {
+    if (ssrc_info->cname.empty()) {
+      continue;
+    }
+
+    std::string sync_label;
+    std::string track_id;
+    if (ssrc_info->msid_identifier == kDefaultMsid &&
+        !ssrc_info->mslabel.empty()) {
+      // If there's no msid and there's mslabel, we consider this is a sdp from
+      // a older version of client that doesn't support msid.
+      // In that case, we use the mslabel and label to construct the track.
+      sync_label = ssrc_info->mslabel;
+      track_id = ssrc_info->label;
+    } else {
+      sync_label = ssrc_info->msid_identifier;
+      // The appdata consists of the "id" attribute of a MediaStreamTrack, which
+      // is corresponding to the "id" attribute of StreamParams.
+      track_id = ssrc_info->msid_appdata;
+    }
+    if (sync_label.empty() || track_id.empty()) {
+      ASSERT(false);
+      continue;
+    }
+
+    StreamParamsVec::iterator track = tracks->begin();
+    for (; track != tracks->end(); ++track) {
+      if (track->id == track_id) {
+        break;
+      }
+    }
+    if (track == tracks->end()) {
+      // If we don't find an existing track, create a new one.
+      tracks->push_back(StreamParams());
+      track = tracks->end() - 1;
+    }
+    track->add_ssrc(ssrc_info->ssrc_id);
+    track->cname = ssrc_info->cname;
+    track->sync_label = sync_label;
+    track->id = track_id;
+  }
+}
+
+void GetMediaStreamLabels(const ContentInfo* content,
+                          std::set<std::string>* labels) {
+  const MediaContentDescription* media_desc =
+      static_cast<const MediaContentDescription*>(
+          content->description);
+  const cricket::StreamParamsVec& streams =  media_desc->streams();
+  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
+       it != streams.end(); ++it) {
+    labels->insert(it->sync_label);
+  }
+}
+
+// RFC 5245
+// It is RECOMMENDED that default candidates be chosen based on the
+// likelihood of those candidates to work with the peer that is being
+// contacted.  It is RECOMMENDED that relayed > reflexive > host.
+static const int kPreferenceUnknown = 0;
+static const int kPreferenceHost = 1;
+static const int kPreferenceReflexive = 2;
+static const int kPreferenceRelayed = 3;
+
+static int GetCandidatePreferenceFromType(const std::string& type) {
+  int preference = kPreferenceUnknown;
+  if (type == cricket::LOCAL_PORT_TYPE) {
+    preference = kPreferenceHost;
+  } else if (type == cricket::STUN_PORT_TYPE) {
+    preference = kPreferenceReflexive;
+  } else if (type == cricket::RELAY_PORT_TYPE) {
+    preference = kPreferenceRelayed;
+  } else {
+    ASSERT(false);
+  }
+  return preference;
+}
+
+// Get ip and port of the default destination from the |candidates| with the
+// given value of |component_id|. The default candidate should be the one most
+// likely to work, typically IPv4 relay.
+// RFC 5245
+// The value of |component_id| currently supported are 1 (RTP) and 2 (RTCP).
+// TODO: Decide the default destination in webrtcsession and
+// pass it down via SessionDescription.
+static void GetDefaultDestination(
+    const std::vector<Candidate>& candidates,
+    int component_id, std::string* port,
+    std::string* ip, std::string* addr_type) {
+  *addr_type = kConnectionIpv4Addrtype;
+  *port = kDummyPort;
+  *ip = kDummyAddress;
+  int current_preference = kPreferenceUnknown;
+  int current_family = AF_UNSPEC;
+  for (std::vector<Candidate>::const_iterator it = candidates.begin();
+       it != candidates.end(); ++it) {
+    if (it->component() != component_id) {
+      continue;
+    }
+    // Default destination should be UDP only.
+    if (it->protocol() != cricket::UDP_PROTOCOL_NAME) {
+      continue;
+    }
+    const int preference = GetCandidatePreferenceFromType(it->type());
+    const int family = it->address().ipaddr().family();
+    // See if this candidate is more preferable then the current one if it's the
+    // same family. Or if the current family is IPv4 already so we could safely
+    // ignore all IPv6 ones. WebRTC bug 4269.
+    // http://code.google.com/p/webrtc/issues/detail?id=4269
+    if ((preference <= current_preference && current_family == family) ||
+        (current_family == AF_INET && family == AF_INET6)) {
+      continue;
+    }
+    if (family == AF_INET) {
+      addr_type->assign(kConnectionIpv4Addrtype);
+    } else if (family == AF_INET6) {
+      addr_type->assign(kConnectionIpv6Addrtype);
+    }
+    current_preference = preference;
+    current_family = family;
+    *port = it->address().PortAsString();
+    *ip = it->address().ipaddr().ToString();
+  }
+}
+
+// Update |mline|'s default destination and append a c line after it.
+static void UpdateMediaDefaultDestination(
+    const std::vector<Candidate>& candidates,
+    const std::string& mline,
+    std::string* message) {
+  std::string new_lines;
+  AddLine(mline, &new_lines);
+  // RFC 4566
+  // m=<media> <port> <proto> <fmt> ...
+  std::vector<std::string> fields;
+  rtc::split(mline, kSdpDelimiterSpace, &fields);
+  if (fields.size() < 3) {
+    return;
+  }
+
+  std::ostringstream os;
+  std::string rtp_port, rtp_ip, addr_type;
+  GetDefaultDestination(candidates, ICE_CANDIDATE_COMPONENT_RTP,
+                        &rtp_port, &rtp_ip, &addr_type);
+  // Found default RTP candidate.
+  // RFC 5245
+  // The default candidates are added to the SDP as the default
+  // destination for media.  For streams based on RTP, this is done by
+  // placing the IP address and port of the RTP candidate into the c and m
+  // lines, respectively.
+  // Update the port in the m line.
+  // If this is a m-line with port equal to 0, we don't change it.
+  if (fields[1] != kMediaPortRejected) {
+    new_lines.replace(fields[0].size() + 1,
+                      fields[1].size(),
+                      rtp_port);
+  }
+  // Add the c line.
+  // RFC 4566
+  // c=<nettype> <addrtype> <connection-address>
+  InitLine(kLineTypeConnection, kConnectionNettype, &os);
+  os << " " << addr_type << " " << rtp_ip;
+  AddLine(os.str(), &new_lines);
+  message->append(new_lines);
+}
+
+// Gets "a=rtcp" line if found default RTCP candidate from |candidates|.
+static std::string GetRtcpLine(const std::vector<Candidate>& candidates) {
+  std::string rtcp_line, rtcp_port, rtcp_ip, addr_type;
+  GetDefaultDestination(candidates, ICE_CANDIDATE_COMPONENT_RTCP,
+                        &rtcp_port, &rtcp_ip, &addr_type);
+  // Found default RTCP candidate.
+  // RFC 5245
+  // If the agent is utilizing RTCP, it MUST encode the RTCP candidate
+  // using the a=rtcp attribute as defined in RFC 3605.
+
+  // RFC 3605
+  // rtcp-attribute =  "a=rtcp:" port  [nettype space addrtype space
+  // connection-address] CRLF
+  std::ostringstream os;
+  InitAttrLine(kAttributeRtcp, &os);
+  os << kSdpDelimiterColon
+     << rtcp_port << " "
+     << kConnectionNettype << " "
+     << addr_type << " "
+     << rtcp_ip;
+  rtcp_line = os.str();
+  return rtcp_line;
+}
+
+// Get candidates according to the mline index from SessionDescriptionInterface.
+static void GetCandidatesByMindex(const SessionDescriptionInterface& desci,
+                                  int mline_index,
+                                  std::vector<Candidate>* candidates) {
+  if (!candidates) {
+    return;
+  }
+  const IceCandidateCollection* cc = desci.candidates(mline_index);
+  for (size_t i = 0; i < cc->count(); ++i) {
+    const IceCandidateInterface* candidate = cc->at(i);
+    candidates->push_back(candidate->candidate());
+  }
+}
+
+std::string SdpSerialize(const JsepSessionDescription& jdesc) {
+  const cricket::SessionDescription* desc = jdesc.description();
+  if (!desc) {
+    return "";
+  }
+
+  std::string message;
+
+  // Session Description.
+  AddLine(kSessionVersion, &message);
+  // Session Origin
+  // RFC 4566
+  // o=<username> <sess-id> <sess-version> <nettype> <addrtype>
+  // <unicast-address>
+  std::ostringstream os;
+  InitLine(kLineTypeOrigin, kSessionOriginUsername, &os);
+  const std::string& session_id = jdesc.session_id().empty() ?
+      kSessionOriginSessionId : jdesc.session_id();
+  const std::string& session_version = jdesc.session_version().empty() ?
+      kSessionOriginSessionVersion : jdesc.session_version();
+  os << " " << session_id << " " << session_version << " "
+     << kSessionOriginNettype << " " << kSessionOriginAddrtype << " "
+     << kSessionOriginAddress;
+  AddLine(os.str(), &message);
+  AddLine(kSessionName, &message);
+
+  // Time Description.
+  AddLine(kTimeDescription, &message);
+
+  // Group
+  if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) {
+    std::string group_line = kAttrGroup;
+    const cricket::ContentGroup* group =
+        desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+    ASSERT(group != NULL);
+    const cricket::ContentNames& content_names = group->content_names();
+    for (cricket::ContentNames::const_iterator it = content_names.begin();
+         it != content_names.end(); ++it) {
+      group_line.append(" ");
+      group_line.append(*it);
+    }
+    AddLine(group_line, &message);
+  }
+
+  // MediaStream semantics
+  InitAttrLine(kAttributeMsidSemantics, &os);
+  os << kSdpDelimiterColon << " " << kMediaStreamSemantic;
+
+  std::set<std::string> media_stream_labels;
+  const ContentInfo* audio_content = GetFirstAudioContent(desc);
+  if (audio_content)
+    GetMediaStreamLabels(audio_content, &media_stream_labels);
+
+  const ContentInfo* video_content = GetFirstVideoContent(desc);
+  if (video_content)
+    GetMediaStreamLabels(video_content, &media_stream_labels);
+
+  for (std::set<std::string>::const_iterator it =
+      media_stream_labels.begin(); it != media_stream_labels.end(); ++it) {
+    os << " " << *it;
+  }
+  AddLine(os.str(), &message);
+
+  // Preserve the order of the media contents.
+  int mline_index = -1;
+  for (cricket::ContentInfos::const_iterator it = desc->contents().begin();
+       it != desc->contents().end(); ++it) {
+    const MediaContentDescription* mdesc =
+      static_cast<const MediaContentDescription*>(it->description);
+    std::vector<Candidate> candidates;
+    GetCandidatesByMindex(jdesc, ++mline_index, &candidates);
+    BuildMediaDescription(&*it,
+                          desc->GetTransportInfoByName(it->name),
+                          mdesc->type(),
+                          candidates,
+                          &message);
+  }
+  return message;
+}
+
+// Serializes the passed in IceCandidateInterface to a SDP string.
+// candidate - The candidate to be serialized.
+std::string SdpSerializeCandidate(
+    const IceCandidateInterface& candidate) {
+  std::string message;
+  std::vector<cricket::Candidate> candidates;
+  candidates.push_back(candidate.candidate());
+  BuildCandidate(candidates, true, &message);
+  // From WebRTC draft section 4.8.1.1 candidate-attribute will be
+  // just candidate:<candidate> not a=candidate:<blah>CRLF
+  ASSERT(message.find("a=") == 0);
+  message.erase(0, 2);
+  ASSERT(message.find(kLineBreak) == message.size() - 2);
+  message.resize(message.size() - 2);
+  return message;
+}
+
+bool SdpDeserialize(const std::string& message,
+                    JsepSessionDescription* jdesc,
+                    SdpParseError* error) {
+  std::string session_id;
+  std::string session_version;
+  TransportDescription session_td("", "");
+  RtpHeaderExtensions session_extmaps;
+  cricket::SessionDescription* desc = new cricket::SessionDescription();
+  std::vector<JsepIceCandidate*> candidates;
+  size_t current_pos = 0;
+
+  // Session Description
+  if (!ParseSessionDescription(message, &current_pos, &session_id,
+                               &session_version, &session_td, &session_extmaps,
+                               desc, error)) {
+    delete desc;
+    return false;
+  }
+
+  // Media Description
+  if (!ParseMediaDescription(message, session_td, session_extmaps, &current_pos,
+                             desc, &candidates, error)) {
+    delete desc;
+    for (std::vector<JsepIceCandidate*>::const_iterator
+         it = candidates.begin(); it != candidates.end(); ++it) {
+      delete *it;
+    }
+    return false;
+  }
+
+  jdesc->Initialize(desc, session_id, session_version);
+
+  for (std::vector<JsepIceCandidate*>::const_iterator
+       it = candidates.begin(); it != candidates.end(); ++it) {
+    jdesc->AddCandidate(*it);
+    delete *it;
+  }
+  return true;
+}
+
+bool SdpDeserializeCandidate(const std::string& message,
+                             JsepIceCandidate* jcandidate,
+                             SdpParseError* error) {
+  ASSERT(jcandidate != NULL);
+  Candidate candidate;
+  if (!ParseCandidate(message, &candidate, error, true)) {
+    return false;
+  }
+  jcandidate->SetCandidate(candidate);
+  return true;
+}
+
+bool ParseCandidate(const std::string& message, Candidate* candidate,
+                    SdpParseError* error, bool is_raw) {
+  ASSERT(candidate != NULL);
+
+  // Get the first line from |message|.
+  std::string first_line = message;
+  size_t pos = 0;
+  GetLine(message, &pos, &first_line);
+
+  // Makes sure |message| contains only one line.
+  if (message.size() > first_line.size()) {
+    std::string left, right;
+    if (rtc::tokenize_first(message, kNewLine, &left, &right) &&
+        !right.empty()) {
+      return ParseFailed(message, 0, "Expect one line only", error);
+    }
+  }
+
+  // From WebRTC draft section 4.8.1.1 candidate-attribute should be
+  // candidate:<candidate> when trickled, but we still support
+  // a=candidate:<blah>CRLF for backward compatibility and for parsing a line
+  // from the SDP.
+  if (IsLineType(first_line, kLineTypeAttributes)) {
+    first_line = first_line.substr(kLinePrefixLength);
+  }
+
+  std::string attribute_candidate;
+  std::string candidate_value;
+
+  // |first_line| must be in the form of "candidate:<value>".
+  if (!rtc::tokenize_first(first_line, kSdpDelimiterColon, &attribute_candidate,
+                           &candidate_value) ||
+      attribute_candidate != kAttributeCandidate) {
+    if (is_raw) {
+      std::ostringstream description;
+      description << "Expect line: " << kAttributeCandidate
+                  << ":" << "<candidate-str>";
+      return ParseFailed(first_line, 0, description.str(), error);
+    } else {
+      return ParseFailedExpectLine(first_line, 0, kLineTypeAttributes,
+                                   kAttributeCandidate, error);
+    }
+  }
+
+  std::vector<std::string> fields;
+  rtc::split(candidate_value, kSdpDelimiterSpace, &fields);
+
+  // RFC 5245
+  // a=candidate:<foundation> <component-id> <transport> <priority>
+  // <connection-address> <port> typ <candidate-types>
+  // [raddr <connection-address>] [rport <port>]
+  // *(SP extension-att-name SP extension-att-value)
+  const size_t expected_min_fields = 8;
+  if (fields.size() < expected_min_fields ||
+      (fields[6] != kAttributeCandidateTyp)) {
+    return ParseFailedExpectMinFieldNum(first_line, expected_min_fields, error);
+  }
+  const std::string& foundation = fields[0];
+
+  int component_id = 0;
+  if (!GetValueFromString(first_line, fields[1], &component_id, error)) {
+    return false;
+  }
+  const std::string& transport = fields[2];
+  uint32_t priority = 0;
+  if (!GetValueFromString(first_line, fields[3], &priority, error)) {
+    return false;
+  }
+  const std::string& connection_address = fields[4];
+  int port = 0;
+  if (!GetValueFromString(first_line, fields[5], &port, error)) {
+    return false;
+  }
+  SocketAddress address(connection_address, port);
+
+  cricket::ProtocolType protocol;
+  if (!StringToProto(transport.c_str(), &protocol)) {
+    return ParseFailed(first_line, "Unsupported transport type.", error);
+  }
+
+  std::string candidate_type;
+  const std::string& type = fields[7];
+  if (type == kCandidateHost) {
+    candidate_type = cricket::LOCAL_PORT_TYPE;
+  } else if (type == kCandidateSrflx) {
+    candidate_type = cricket::STUN_PORT_TYPE;
+  } else if (type == kCandidateRelay) {
+    candidate_type = cricket::RELAY_PORT_TYPE;
+  } else {
+    return ParseFailed(first_line, "Unsupported candidate type.", error);
+  }
+
+  size_t current_position = expected_min_fields;
+  SocketAddress related_address;
+  // The 2 optional fields for related address
+  // [raddr <connection-address>] [rport <port>]
+  if (fields.size() >= (current_position + 2) &&
+      fields[current_position] == kAttributeCandidateRaddr) {
+    related_address.SetIP(fields[++current_position]);
+    ++current_position;
+  }
+  if (fields.size() >= (current_position + 2) &&
+      fields[current_position] == kAttributeCandidateRport) {
+    int port = 0;
+    if (!GetValueFromString(
+        first_line, fields[++current_position], &port, error)) {
+      return false;
+    }
+    related_address.SetPort(port);
+    ++current_position;
+  }
+
+  // If this is a TCP candidate, it has additional extension as defined in
+  // RFC 6544.
+  std::string tcptype;
+  if (fields.size() >= (current_position + 2) &&
+      fields[current_position] == kTcpCandidateType) {
+    tcptype = fields[++current_position];
+    ++current_position;
+
+    if (tcptype != cricket::TCPTYPE_ACTIVE_STR &&
+        tcptype != cricket::TCPTYPE_PASSIVE_STR &&
+        tcptype != cricket::TCPTYPE_SIMOPEN_STR) {
+      return ParseFailed(first_line, "Invalid TCP candidate type.", error);
+    }
+
+    if (protocol != cricket::PROTO_TCP) {
+      return ParseFailed(first_line, "Invalid non-TCP candidate", error);
+    }
+  }
+
+  // Extension
+  // Though non-standard, we support the ICE ufrag and pwd being signaled on
+  // the candidate to avoid issues with confusing which generation a candidate
+  // belongs to when trickling multiple generations at the same time.
+  std::string username;
+  std::string password;
+  uint32_t generation = 0;
+  for (size_t i = current_position; i + 1 < fields.size(); ++i) {
+    // RFC 5245
+    // *(SP extension-att-name SP extension-att-value)
+    if (fields[i] == kAttributeCandidateGeneration) {
+      if (!GetValueFromString(first_line, fields[++i], &generation, error)) {
+        return false;
+      }
+    } else if (fields[i] == kAttributeCandidateUfrag) {
+      username = fields[++i];
+    } else if (fields[i] == kAttributeCandidatePwd) {
+      password = fields[++i];
+    } else {
+      // Skip the unknown extension.
+      ++i;
+    }
+  }
+
+  *candidate = Candidate(component_id, cricket::ProtoToString(protocol),
+                         address, priority, username, password, candidate_type,
+                         generation, foundation);
+  candidate->set_related_address(related_address);
+  candidate->set_tcptype(tcptype);
+  return true;
+}
+
+bool ParseIceOptions(const std::string& line,
+                     std::vector<std::string>* transport_options,
+                     SdpParseError* error) {
+  std::string ice_options;
+  if (!GetValue(line, kAttributeIceOption, &ice_options, error)) {
+    return false;
+  }
+  std::vector<std::string> fields;
+  rtc::split(ice_options, kSdpDelimiterSpace, &fields);
+  for (size_t i = 0; i < fields.size(); ++i) {
+    transport_options->push_back(fields[i]);
+  }
+  return true;
+}
+
+bool ParseSctpPort(const std::string& line,
+                   int* sctp_port,
+                   SdpParseError* error) {
+  // draft-ietf-mmusic-sctp-sdp-07
+  // a=sctp-port
+  std::vector<std::string> fields;
+  const size_t expected_min_fields = 2;
+  rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColon, &fields);
+  if (fields.size() < expected_min_fields) {
+    fields.resize(0);
+    rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpace, &fields);
+  }
+  if (fields.size() < expected_min_fields) {
+    return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+  }
+  if (!rtc::FromString(fields[1], sctp_port)) {
+    return ParseFailed(line, "Invalid sctp port value.", error);
+  }
+  return true;
+}
+
+bool ParseExtmap(const std::string& line, RtpHeaderExtension* extmap,
+                 SdpParseError* error) {
+  // RFC 5285
+  // a=extmap:<value>["/"<direction>] <URI> <extensionattributes>
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  const size_t expected_min_fields = 2;
+  if (fields.size() < expected_min_fields) {
+    return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+  }
+  std::string uri = fields[1];
+
+  std::string value_direction;
+  if (!GetValue(fields[0], kAttributeExtmap, &value_direction, error)) {
+    return false;
+  }
+  std::vector<std::string> sub_fields;
+  rtc::split(value_direction, kSdpDelimiterSlash, &sub_fields);
+  int value = 0;
+  if (!GetValueFromString(line, sub_fields[0], &value, error)) {
+    return false;
+  }
+
+  *extmap = RtpHeaderExtension(uri, value);
+  return true;
+}
+
+void BuildMediaDescription(const ContentInfo* content_info,
+                           const TransportInfo* transport_info,
+                           const MediaType media_type,
+                           const std::vector<Candidate>& candidates,
+                           std::string* message) {
+  ASSERT(message != NULL);
+  if (content_info == NULL || message == NULL) {
+    return;
+  }
+  // TODO: Rethink if we should use sprintfn instead of stringstream.
+  // According to the style guide, streams should only be used for logging.
+  // http://google-styleguide.googlecode.com/svn/
+  // trunk/cppguide.xml?showone=Streams#Streams
+  std::ostringstream os;
+  const MediaContentDescription* media_desc =
+      static_cast<const MediaContentDescription*>(
+          content_info->description);
+  ASSERT(media_desc != NULL);
+
+  int sctp_port = cricket::kSctpDefaultPort;
+
+  // RFC 4566
+  // m=<media> <port> <proto> <fmt>
+  // fmt is a list of payload type numbers that MAY be used in the session.
+  const char* type = NULL;
+  if (media_type == cricket::MEDIA_TYPE_AUDIO)
+    type = kMediaTypeAudio;
+  else if (media_type == cricket::MEDIA_TYPE_VIDEO)
+    type = kMediaTypeVideo;
+  else if (media_type == cricket::MEDIA_TYPE_DATA)
+    type = kMediaTypeData;
+  else
+    ASSERT(false);
+
+  std::string fmt;
+  if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    const VideoContentDescription* video_desc =
+        static_cast<const VideoContentDescription*>(media_desc);
+    for (std::vector<cricket::VideoCodec>::const_iterator it =
+             video_desc->codecs().begin();
+         it != video_desc->codecs().end(); ++it) {
+      fmt.append(" ");
+      fmt.append(rtc::ToString<int>(it->id));
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    const AudioContentDescription* audio_desc =
+        static_cast<const AudioContentDescription*>(media_desc);
+    for (std::vector<cricket::AudioCodec>::const_iterator it =
+             audio_desc->codecs().begin();
+         it != audio_desc->codecs().end(); ++it) {
+      fmt.append(" ");
+      fmt.append(rtc::ToString<int>(it->id));
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+    const DataContentDescription* data_desc =
+          static_cast<const DataContentDescription*>(media_desc);
+    if (IsDtlsSctp(media_desc->protocol())) {
+      fmt.append(" ");
+
+      for (std::vector<cricket::DataCodec>::const_iterator it =
+           data_desc->codecs().begin();
+           it != data_desc->codecs().end(); ++it) {
+        if (it->id == cricket::kGoogleSctpDataCodecId &&
+            it->GetParam(cricket::kCodecParamPort, &sctp_port)) {
+          break;
+        }
+      }
+
+      fmt.append(rtc::ToString<int>(sctp_port));
+    } else {
+      for (std::vector<cricket::DataCodec>::const_iterator it =
+           data_desc->codecs().begin();
+           it != data_desc->codecs().end(); ++it) {
+        fmt.append(" ");
+        fmt.append(rtc::ToString<int>(it->id));
+      }
+    }
+  }
+  // The fmt must never be empty. If no codecs are found, set the fmt attribute
+  // to 0.
+  if (fmt.empty()) {
+    fmt = " 0";
+  }
+
+  // The port number in the m line will be updated later when associate with
+  // the candidates.
+  // RFC 3264
+  // To reject an offered stream, the port number in the corresponding stream in
+  // the answer MUST be set to zero.
+  const std::string& port = content_info->rejected ?
+      kMediaPortRejected : kDummyPort;
+
+  rtc::SSLFingerprint* fp = (transport_info) ?
+      transport_info->description.identity_fingerprint.get() : NULL;
+
+  // Add the m and c lines.
+  InitLine(kLineTypeMedia, type, &os);
+  os << " " << port << " " << media_desc->protocol() << fmt;
+  std::string mline = os.str();
+  UpdateMediaDefaultDestination(candidates, mline, message);
+
+  // RFC 4566
+  // b=AS:<bandwidth>
+  if (media_desc->bandwidth() >= 1000) {
+    InitLine(kLineTypeSessionBandwidth, kApplicationSpecificMaximum, &os);
+    os << kSdpDelimiterColon << (media_desc->bandwidth() / 1000);
+    AddLine(os.str(), message);
+  }
+
+  // Add the a=rtcp line.
+  if (IsRtp(media_desc->protocol())) {
+    std::string rtcp_line = GetRtcpLine(candidates);
+    if (!rtcp_line.empty()) {
+      AddLine(rtcp_line, message);
+    }
+  }
+
+  // Build the a=candidate lines. We don't include ufrag and pwd in the
+  // candidates in the SDP to avoid redundancy.
+  BuildCandidate(candidates, false, message);
+
+  // Use the transport_info to build the media level ice-ufrag and ice-pwd.
+  if (transport_info) {
+    // RFC 5245
+    // ice-pwd-att           = "ice-pwd" ":" password
+    // ice-ufrag-att         = "ice-ufrag" ":" ufrag
+    // ice-ufrag
+    if (!transport_info->description.ice_ufrag.empty()) {
+      InitAttrLine(kAttributeIceUfrag, &os);
+      os << kSdpDelimiterColon << transport_info->description.ice_ufrag;
+      AddLine(os.str(), message);
+    }
+    // ice-pwd
+    if (!transport_info->description.ice_pwd.empty()) {
+      InitAttrLine(kAttributeIcePwd, &os);
+      os << kSdpDelimiterColon << transport_info->description.ice_pwd;
+      AddLine(os.str(), message);
+    }
+
+    // draft-petithuguenin-mmusic-ice-attributes-level-03
+    BuildIceOptions(transport_info->description.transport_options, message);
+
+    // RFC 4572
+    // fingerprint-attribute  =
+    //   "fingerprint" ":" hash-func SP fingerprint
+    if (fp) {
+      // Insert the fingerprint attribute.
+      InitAttrLine(kAttributeFingerprint, &os);
+      os << kSdpDelimiterColon
+         << fp->algorithm << kSdpDelimiterSpace
+         << fp->GetRfc4572Fingerprint();
+      AddLine(os.str(), message);
+
+      // Inserting setup attribute.
+      if (transport_info->description.connection_role !=
+              cricket::CONNECTIONROLE_NONE) {
+        // Making sure we are not using "passive" mode.
+        cricket::ConnectionRole role =
+            transport_info->description.connection_role;
+        std::string dtls_role_str;
+        VERIFY(cricket::ConnectionRoleToString(role, &dtls_role_str));
+        InitAttrLine(kAttributeSetup, &os);
+        os << kSdpDelimiterColon << dtls_role_str;
+        AddLine(os.str(), message);
+      }
+    }
+  }
+
+  // RFC 3388
+  // mid-attribute      = "a=mid:" identification-tag
+  // identification-tag = token
+  // Use the content name as the mid identification-tag.
+  InitAttrLine(kAttributeMid, &os);
+  os << kSdpDelimiterColon << content_info->name;
+  AddLine(os.str(), message);
+
+  if (IsDtlsSctp(media_desc->protocol())) {
+    BuildSctpContentAttributes(message, sctp_port);
+  } else if (IsRtp(media_desc->protocol())) {
+    BuildRtpContentAttributes(media_desc, media_type, message);
+  }
+}
+
+void BuildSctpContentAttributes(std::string* message, int sctp_port) {
+  // draft-ietf-mmusic-sctp-sdp-04
+  // a=sctpmap:sctpmap-number  protocol  [streams]
+  // TODO(lally): switch this over to mmusic-sctp-sdp-12 (or later), with
+  // 'a=sctp-port:'
+  std::ostringstream os;
+  InitAttrLine(kAttributeSctpmap, &os);
+  os << kSdpDelimiterColon << sctp_port << kSdpDelimiterSpace
+     << kDefaultSctpmapProtocol << kSdpDelimiterSpace
+     << (cricket::kMaxSctpSid + 1);
+  AddLine(os.str(), message);
+}
+
+void BuildRtpContentAttributes(
+    const MediaContentDescription* media_desc,
+    const MediaType media_type,
+    std::string* message) {
+  std::ostringstream os;
+  // RFC 5285
+  // a=extmap:<value>["/"<direction>] <URI> <extensionattributes>
+  // The definitions MUST be either all session level or all media level. This
+  // implementation uses all media level.
+  for (size_t i = 0; i < media_desc->rtp_header_extensions().size(); ++i) {
+    InitAttrLine(kAttributeExtmap, &os);
+    os << kSdpDelimiterColon << media_desc->rtp_header_extensions()[i].id
+       << kSdpDelimiterSpace << media_desc->rtp_header_extensions()[i].uri;
+    AddLine(os.str(), message);
+  }
+
+  // RFC 3264
+  // a=sendrecv || a=sendonly || a=sendrecv || a=inactive
+  switch (media_desc->direction()) {
+    case cricket::MD_INACTIVE:
+      InitAttrLine(kAttributeInactive, &os);
+      break;
+    case cricket::MD_SENDONLY:
+      InitAttrLine(kAttributeSendOnly, &os);
+      break;
+    case cricket::MD_RECVONLY:
+      InitAttrLine(kAttributeRecvOnly, &os);
+      break;
+    case cricket::MD_SENDRECV:
+    default:
+      InitAttrLine(kAttributeSendRecv, &os);
+      break;
+  }
+  AddLine(os.str(), message);
+
+  // RFC 5761
+  // a=rtcp-mux
+  if (media_desc->rtcp_mux()) {
+    InitAttrLine(kAttributeRtcpMux, &os);
+    AddLine(os.str(), message);
+  }
+
+  // RFC 5506
+  // a=rtcp-rsize
+  if (media_desc->rtcp_reduced_size()) {
+    InitAttrLine(kAttributeRtcpReducedSize, &os);
+    AddLine(os.str(), message);
+  }
+
+  // RFC 4568
+  // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+  for (std::vector<CryptoParams>::const_iterator it =
+           media_desc->cryptos().begin();
+       it != media_desc->cryptos().end(); ++it) {
+    InitAttrLine(kAttributeCrypto, &os);
+    os << kSdpDelimiterColon << it->tag << " " << it->cipher_suite << " "
+       << it->key_params;
+    if (!it->session_params.empty()) {
+      os << " " << it->session_params;
+    }
+    AddLine(os.str(), message);
+  }
+
+  // RFC 4566
+  // a=rtpmap:<payload type> <encoding name>/<clock rate>
+  // [/<encodingparameters>]
+  BuildRtpMap(media_desc, media_type, message);
+
+  for (StreamParamsVec::const_iterator track = media_desc->streams().begin();
+       track != media_desc->streams().end(); ++track) {
+    // Require that the track belongs to a media stream,
+    // ie the sync_label is set. This extra check is necessary since the
+    // MediaContentDescription always contains a streamparam with an ssrc even
+    // if no track or media stream have been created.
+    if (track->sync_label.empty()) continue;
+
+    // Build the ssrc-group lines.
+    for (size_t i = 0; i < track->ssrc_groups.size(); ++i) {
+      // RFC 5576
+      // a=ssrc-group:<semantics> <ssrc-id> ...
+      if (track->ssrc_groups[i].ssrcs.empty()) {
+        continue;
+      }
+      std::ostringstream os;
+      InitAttrLine(kAttributeSsrcGroup, &os);
+      os << kSdpDelimiterColon << track->ssrc_groups[i].semantics;
+      std::vector<uint32_t>::const_iterator ssrc =
+          track->ssrc_groups[i].ssrcs.begin();
+      for (; ssrc != track->ssrc_groups[i].ssrcs.end(); ++ssrc) {
+        os << kSdpDelimiterSpace << rtc::ToString<uint32_t>(*ssrc);
+      }
+      AddLine(os.str(), message);
+    }
+    // Build the ssrc lines for each ssrc.
+    for (size_t i = 0; i < track->ssrcs.size(); ++i) {
+      uint32_t ssrc = track->ssrcs[i];
+      // RFC 5576
+      // a=ssrc:<ssrc-id> cname:<value>
+      AddSsrcLine(ssrc, kSsrcAttributeCname,
+                  track->cname, message);
+
+      // draft-alvestrand-mmusic-msid-00
+      // a=ssrc:<ssrc-id> msid:identifier [appdata]
+      // The appdata consists of the "id" attribute of a MediaStreamTrack, which
+      // is corresponding to the "name" attribute of StreamParams.
+      std::string appdata = track->id;
+      std::ostringstream os;
+      InitAttrLine(kAttributeSsrc, &os);
+      os << kSdpDelimiterColon << ssrc << kSdpDelimiterSpace
+         << kSsrcAttributeMsid << kSdpDelimiterColon << track->sync_label
+         << kSdpDelimiterSpace << appdata;
+      AddLine(os.str(), message);
+
+      // TODO(ronghuawu): Remove below code which is for backward compatibility.
+      // draft-alvestrand-rtcweb-mid-01
+      // a=ssrc:<ssrc-id> mslabel:<value>
+      // The label isn't yet defined.
+      // a=ssrc:<ssrc-id> label:<value>
+      AddSsrcLine(ssrc, kSsrcAttributeMslabel, track->sync_label, message);
+      AddSsrcLine(ssrc, kSSrcAttributeLabel, track->id, message);
+    }
+  }
+}
+
+void WriteFmtpHeader(int payload_type, std::ostringstream* os) {
+  // fmtp header: a=fmtp:|payload_type| <parameters>
+  // Add a=fmtp
+  InitAttrLine(kAttributeFmtp, os);
+  // Add :|payload_type|
+  *os << kSdpDelimiterColon << payload_type;
+}
+
+void WriteRtcpFbHeader(int payload_type, std::ostringstream* os) {
+  // rtcp-fb header: a=rtcp-fb:|payload_type|
+  // <parameters>/<ccm <ccm_parameters>>
+  // Add a=rtcp-fb
+  InitAttrLine(kAttributeRtcpFb, os);
+  // Add :
+  *os << kSdpDelimiterColon;
+  if (payload_type == kWildcardPayloadType) {
+    *os << "*";
+  } else {
+    *os << payload_type;
+  }
+}
+
+void WriteFmtpParameter(const std::string& parameter_name,
+                        const std::string& parameter_value,
+                        std::ostringstream* os) {
+  // fmtp parameters: |parameter_name|=|parameter_value|
+  *os << parameter_name << kSdpDelimiterEqual << parameter_value;
+}
+
+void WriteFmtpParameters(const cricket::CodecParameterMap& parameters,
+                         std::ostringstream* os) {
+  for (cricket::CodecParameterMap::const_iterator fmtp = parameters.begin();
+       fmtp != parameters.end(); ++fmtp) {
+    // Each new parameter, except the first one starts with ";" and " ".
+    if (fmtp != parameters.begin()) {
+      *os << kSdpDelimiterSemicolon;
+    }
+    *os << kSdpDelimiterSpace;
+    WriteFmtpParameter(fmtp->first, fmtp->second, os);
+  }
+}
+
+bool IsFmtpParam(const std::string& name) {
+  const char* kFmtpParams[] = {
+    kCodecParamMinPTime, kCodecParamSPropStereo,
+    kCodecParamStereo, kCodecParamUseInbandFec, kCodecParamUseDtx,
+    kCodecParamStartBitrate, kCodecParamMaxBitrate, kCodecParamMinBitrate,
+    kCodecParamMaxQuantization, kCodecParamSctpProtocol, kCodecParamSctpStreams,
+    kCodecParamMaxAverageBitrate, kCodecParamMaxPlaybackRate,
+    kCodecParamAssociatedPayloadType
+  };
+  for (size_t i = 0; i < arraysize(kFmtpParams); ++i) {
+    if (_stricmp(name.c_str(), kFmtpParams[i]) == 0) {
+      return true;
+    }
+  }
+  return false;
+}
+
+// Retreives fmtp parameters from |params|, which may contain other parameters
+// as well, and puts them in |fmtp_parameters|.
+void GetFmtpParams(const cricket::CodecParameterMap& params,
+                   cricket::CodecParameterMap* fmtp_parameters) {
+  for (cricket::CodecParameterMap::const_iterator iter = params.begin();
+       iter != params.end(); ++iter) {
+    if (IsFmtpParam(iter->first)) {
+      (*fmtp_parameters)[iter->first] = iter->second;
+    }
+  }
+}
+
+template <class T>
+void AddFmtpLine(const T& codec, std::string* message) {
+  cricket::CodecParameterMap fmtp_parameters;
+  GetFmtpParams(codec.params, &fmtp_parameters);
+  if (fmtp_parameters.empty()) {
+    // No need to add an fmtp if it will have no (optional) parameters.
+    return;
+  }
+  std::ostringstream os;
+  WriteFmtpHeader(codec.id, &os);
+  WriteFmtpParameters(fmtp_parameters, &os);
+  AddLine(os.str(), message);
+  return;
+}
+
+template <class T>
+void AddRtcpFbLines(const T& codec, std::string* message) {
+  for (std::vector<cricket::FeedbackParam>::const_iterator iter =
+           codec.feedback_params.params().begin();
+       iter != codec.feedback_params.params().end(); ++iter) {
+    std::ostringstream os;
+    WriteRtcpFbHeader(codec.id, &os);
+    os << " " << iter->id();
+    if (!iter->param().empty()) {
+      os << " " << iter->param();
+    }
+    AddLine(os.str(), message);
+  }
+}
+
+bool AddSctpDataCodec(DataContentDescription* media_desc,
+                      int sctp_port) {
+  if (media_desc->HasCodec(cricket::kGoogleSctpDataCodecId)) {
+    return ParseFailed("",
+                       "Can't have multiple sctp port attributes.",
+                       NULL);
+  }
+  // Add the SCTP Port number as a pseudo-codec "port" parameter
+  cricket::DataCodec codec_port(
+      cricket::kGoogleSctpDataCodecId, cricket::kGoogleSctpDataCodecName,
+      0);
+  codec_port.SetParam(cricket::kCodecParamPort, sctp_port);
+  LOG(INFO) << "AddSctpDataCodec: Got SCTP Port Number "
+            << sctp_port;
+  media_desc->AddCodec(codec_port);
+  return true;
+}
+
+bool GetMinValue(const std::vector<int>& values, int* value) {
+  if (values.empty()) {
+    return false;
+  }
+  std::vector<int>::const_iterator found =
+      std::min_element(values.begin(), values.end());
+  *value = *found;
+  return true;
+}
+
+bool GetParameter(const std::string& name,
+                  const cricket::CodecParameterMap& params, int* value) {
+  std::map<std::string, std::string>::const_iterator found =
+      params.find(name);
+  if (found == params.end()) {
+    return false;
+  }
+  if (!rtc::FromString(found->second, value)) {
+    return false;
+  }
+  return true;
+}
+
+void BuildRtpMap(const MediaContentDescription* media_desc,
+                 const MediaType media_type,
+                 std::string* message) {
+  ASSERT(message != NULL);
+  ASSERT(media_desc != NULL);
+  std::ostringstream os;
+  if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    const VideoContentDescription* video_desc =
+        static_cast<const VideoContentDescription*>(media_desc);
+    for (std::vector<cricket::VideoCodec>::const_iterator it =
+             video_desc->codecs().begin();
+         it != video_desc->codecs().end(); ++it) {
+      // RFC 4566
+      // a=rtpmap:<payload type> <encoding name>/<clock rate>
+      // [/<encodingparameters>]
+      if (it->id != kWildcardPayloadType) {
+        InitAttrLine(kAttributeRtpmap, &os);
+        os << kSdpDelimiterColon << it->id << " " << it->name
+         << "/" << kDefaultVideoClockrate;
+        AddLine(os.str(), message);
+      }
+      AddRtcpFbLines(*it, message);
+      AddFmtpLine(*it, message);
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    const AudioContentDescription* audio_desc =
+        static_cast<const AudioContentDescription*>(media_desc);
+    std::vector<int> ptimes;
+    std::vector<int> maxptimes;
+    int max_minptime = 0;
+    for (std::vector<cricket::AudioCodec>::const_iterator it =
+             audio_desc->codecs().begin();
+         it != audio_desc->codecs().end(); ++it) {
+      ASSERT(!it->name.empty());
+      // RFC 4566
+      // a=rtpmap:<payload type> <encoding name>/<clock rate>
+      // [/<encodingparameters>]
+      InitAttrLine(kAttributeRtpmap, &os);
+      os << kSdpDelimiterColon << it->id << " ";
+      os << it->name << "/" << it->clockrate;
+      if (it->channels != 1) {
+        os << "/" << it->channels;
+      }
+      AddLine(os.str(), message);
+      AddRtcpFbLines(*it, message);
+      AddFmtpLine(*it, message);
+      int minptime = 0;
+      if (GetParameter(kCodecParamMinPTime, it->params, &minptime)) {
+        max_minptime = std::max(minptime, max_minptime);
+      }
+      int ptime;
+      if (GetParameter(kCodecParamPTime, it->params, &ptime)) {
+        ptimes.push_back(ptime);
+      }
+      int maxptime;
+      if (GetParameter(kCodecParamMaxPTime, it->params, &maxptime)) {
+        maxptimes.push_back(maxptime);
+      }
+    }
+    // Populate the maxptime attribute with the smallest maxptime of all codecs
+    // under the same m-line.
+    int min_maxptime = INT_MAX;
+    if (GetMinValue(maxptimes, &min_maxptime)) {
+      AddAttributeLine(kCodecParamMaxPTime, min_maxptime, message);
+    }
+    ASSERT(min_maxptime > max_minptime);
+    // Populate the ptime attribute with the smallest ptime or the largest
+    // minptime, whichever is the largest, for all codecs under the same m-line.
+    int ptime = INT_MAX;
+    if (GetMinValue(ptimes, &ptime)) {
+      ptime = std::min(ptime, min_maxptime);
+      ptime = std::max(ptime, max_minptime);
+      AddAttributeLine(kCodecParamPTime, ptime, message);
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+    const DataContentDescription* data_desc =
+        static_cast<const DataContentDescription*>(media_desc);
+    for (std::vector<cricket::DataCodec>::const_iterator it =
+         data_desc->codecs().begin();
+         it != data_desc->codecs().end(); ++it) {
+      // RFC 4566
+      // a=rtpmap:<payload type> <encoding name>/<clock rate>
+      // [/<encodingparameters>]
+      InitAttrLine(kAttributeRtpmap, &os);
+      os << kSdpDelimiterColon << it->id << " "
+         << it->name << "/" << it->clockrate;
+      AddLine(os.str(), message);
+    }
+  }
+}
+
+void BuildCandidate(const std::vector<Candidate>& candidates,
+                    bool include_ufrag,
+                    std::string* message) {
+  std::ostringstream os;
+
+  for (std::vector<Candidate>::const_iterator it = candidates.begin();
+       it != candidates.end(); ++it) {
+    // RFC 5245
+    // a=candidate:<foundation> <component-id> <transport> <priority>
+    // <connection-address> <port> typ <candidate-types>
+    // [raddr <connection-address>] [rport <port>]
+    // *(SP extension-att-name SP extension-att-value)
+    std::string type;
+    // Map the cricket candidate type to "host" / "srflx" / "prflx" / "relay"
+    if (it->type() == cricket::LOCAL_PORT_TYPE) {
+      type = kCandidateHost;
+    } else if (it->type() == cricket::STUN_PORT_TYPE) {
+      type = kCandidateSrflx;
+    } else if (it->type() == cricket::RELAY_PORT_TYPE) {
+      type = kCandidateRelay;
+    } else {
+      ASSERT(false);
+      // Never write out candidates if we don't know the type.
+      continue;
+    }
+
+    InitAttrLine(kAttributeCandidate, &os);
+    os << kSdpDelimiterColon
+       << it->foundation() << " "
+       << it->component() << " "
+       << it->protocol() << " "
+       << it->priority() << " "
+       << it->address().ipaddr().ToString() << " "
+       << it->address().PortAsString() << " "
+       << kAttributeCandidateTyp << " "
+       << type << " ";
+
+    // Related address
+    if (!it->related_address().IsNil()) {
+      os << kAttributeCandidateRaddr << " "
+         << it->related_address().ipaddr().ToString() << " "
+         << kAttributeCandidateRport << " "
+         << it->related_address().PortAsString() << " ";
+    }
+
+    if (it->protocol() == cricket::TCP_PROTOCOL_NAME) {
+      os << kTcpCandidateType << " " << it->tcptype() << " ";
+    }
+
+    // Extensions
+    os << kAttributeCandidateGeneration << " " << it->generation();
+    if (include_ufrag && !it->username().empty()) {
+      os << " " << kAttributeCandidateUfrag << " " << it->username();
+    }
+
+    AddLine(os.str(), message);
+  }
+}
+
+void BuildIceOptions(const std::vector<std::string>& transport_options,
+                     std::string* message) {
+  if (!transport_options.empty()) {
+    std::ostringstream os;
+    InitAttrLine(kAttributeIceOption, &os);
+    os << kSdpDelimiterColon << transport_options[0];
+    for (size_t i = 1; i < transport_options.size(); ++i) {
+      os << kSdpDelimiterSpace << transport_options[i];
+    }
+    AddLine(os.str(), message);
+  }
+}
+
+bool IsRtp(const std::string& protocol) {
+  return protocol.empty() ||
+      (protocol.find(cricket::kMediaProtocolRtpPrefix) != std::string::npos);
+}
+
+bool IsDtlsSctp(const std::string& protocol) {
+  // This intentionally excludes "SCTP" and "SCTP/DTLS".
+  return protocol.find(cricket::kMediaProtocolDtlsSctp) != std::string::npos;
+}
+
+bool ParseSessionDescription(const std::string& message, size_t* pos,
+                             std::string* session_id,
+                             std::string* session_version,
+                             TransportDescription* session_td,
+                             RtpHeaderExtensions* session_extmaps,
+                             cricket::SessionDescription* desc,
+                             SdpParseError* error) {
+  std::string line;
+
+  desc->set_msid_supported(false);
+
+  // RFC 4566
+  // v=  (protocol version)
+  if (!GetLineWithType(message, pos, &line, kLineTypeVersion)) {
+    return ParseFailedExpectLine(message, *pos, kLineTypeVersion,
+                                 std::string(), error);
+  }
+  // RFC 4566
+  // o=<username> <sess-id> <sess-version> <nettype> <addrtype>
+  // <unicast-address>
+  if (!GetLineWithType(message, pos, &line, kLineTypeOrigin)) {
+    return ParseFailedExpectLine(message, *pos, kLineTypeOrigin,
+                                 std::string(), error);
+  }
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  const size_t expected_fields = 6;
+  if (fields.size() != expected_fields) {
+    return ParseFailedExpectFieldNum(line, expected_fields, error);
+  }
+  *session_id = fields[1];
+  *session_version = fields[2];
+
+  // RFC 4566
+  // s=  (session name)
+  if (!GetLineWithType(message, pos, &line, kLineTypeSessionName)) {
+    return ParseFailedExpectLine(message, *pos, kLineTypeSessionName,
+                                 std::string(), error);
+  }
+
+  // Optional lines
+  // Those are the optional lines, so shouldn't return false if not present.
+  // RFC 4566
+  // i=* (session information)
+  GetLineWithType(message, pos, &line, kLineTypeSessionInfo);
+
+  // RFC 4566
+  // u=* (URI of description)
+  GetLineWithType(message, pos, &line, kLineTypeSessionUri);
+
+  // RFC 4566
+  // e=* (email address)
+  GetLineWithType(message, pos, &line, kLineTypeSessionEmail);
+
+  // RFC 4566
+  // p=* (phone number)
+  GetLineWithType(message, pos, &line, kLineTypeSessionPhone);
+
+  // RFC 4566
+  // c=* (connection information -- not required if included in
+  //      all media)
+  GetLineWithType(message, pos, &line, kLineTypeConnection);
+
+  // RFC 4566
+  // b=* (zero or more bandwidth information lines)
+  while (GetLineWithType(message, pos, &line, kLineTypeSessionBandwidth)) {
+    // By pass zero or more b lines.
+  }
+
+  // RFC 4566
+  // One or more time descriptions ("t=" and "r=" lines; see below)
+  // t=  (time the session is active)
+  // r=* (zero or more repeat times)
+  // Ensure there's at least one time description
+  if (!GetLineWithType(message, pos, &line, kLineTypeTiming)) {
+    return ParseFailedExpectLine(message, *pos, kLineTypeTiming, std::string(),
+                                 error);
+  }
+
+  while (GetLineWithType(message, pos, &line, kLineTypeRepeatTimes)) {
+    // By pass zero or more r lines.
+  }
+
+  // Go through the rest of the time descriptions
+  while (GetLineWithType(message, pos, &line, kLineTypeTiming)) {
+    while (GetLineWithType(message, pos, &line, kLineTypeRepeatTimes)) {
+      // By pass zero or more r lines.
+    }
+  }
+
+  // RFC 4566
+  // z=* (time zone adjustments)
+  GetLineWithType(message, pos, &line, kLineTypeTimeZone);
+
+  // RFC 4566
+  // k=* (encryption key)
+  GetLineWithType(message, pos, &line, kLineTypeEncryptionKey);
+
+  // RFC 4566
+  // a=* (zero or more session attribute lines)
+  while (GetLineWithType(message, pos, &line, kLineTypeAttributes)) {
+    if (HasAttribute(line, kAttributeGroup)) {
+      if (!ParseGroupAttribute(line, desc, error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeIceUfrag)) {
+      if (!GetValue(line, kAttributeIceUfrag,
+                    &(session_td->ice_ufrag), error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeIcePwd)) {
+      if (!GetValue(line, kAttributeIcePwd, &(session_td->ice_pwd), error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeIceLite)) {
+      session_td->ice_mode = cricket::ICEMODE_LITE;
+    } else if (HasAttribute(line, kAttributeIceOption)) {
+      if (!ParseIceOptions(line, &(session_td->transport_options), error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeFingerprint)) {
+      if (session_td->identity_fingerprint.get()) {
+        return ParseFailed(
+            line,
+            "Can't have multiple fingerprint attributes at the same level.",
+            error);
+      }
+      rtc::SSLFingerprint* fingerprint = NULL;
+      if (!ParseFingerprintAttribute(line, &fingerprint, error)) {
+        return false;
+      }
+      session_td->identity_fingerprint.reset(fingerprint);
+    } else if (HasAttribute(line, kAttributeSetup)) {
+      if (!ParseDtlsSetup(line, &(session_td->connection_role), error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeMsidSemantics)) {
+      std::string semantics;
+      if (!GetValue(line, kAttributeMsidSemantics, &semantics, error)) {
+        return false;
+      }
+      desc->set_msid_supported(
+          CaseInsensitiveFind(semantics, kMediaStreamSemantic));
+    } else if (HasAttribute(line, kAttributeExtmap)) {
+      RtpHeaderExtension extmap;
+      if (!ParseExtmap(line, &extmap, error)) {
+        return false;
+      }
+      session_extmaps->push_back(extmap);
+    }
+  }
+
+  return true;
+}
+
+bool ParseGroupAttribute(const std::string& line,
+                         cricket::SessionDescription* desc,
+                         SdpParseError* error) {
+  ASSERT(desc != NULL);
+
+  // RFC 5888 and draft-holmberg-mmusic-sdp-bundle-negotiation-00
+  // a=group:BUNDLE video voice
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  std::string semantics;
+  if (!GetValue(fields[0], kAttributeGroup, &semantics, error)) {
+    return false;
+  }
+  cricket::ContentGroup group(semantics);
+  for (size_t i = 1; i < fields.size(); ++i) {
+    group.AddContentName(fields[i]);
+  }
+  desc->AddGroup(group);
+  return true;
+}
+
+static bool ParseFingerprintAttribute(const std::string& line,
+                                      rtc::SSLFingerprint** fingerprint,
+                                      SdpParseError* error) {
+  if (!IsLineType(line, kLineTypeAttributes) ||
+      !HasAttribute(line, kAttributeFingerprint)) {
+    return ParseFailedExpectLine(line, 0, kLineTypeAttributes,
+                                 kAttributeFingerprint, error);
+  }
+
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  const size_t expected_fields = 2;
+  if (fields.size() != expected_fields) {
+    return ParseFailedExpectFieldNum(line, expected_fields, error);
+  }
+
+  // The first field here is "fingerprint:<hash>.
+  std::string algorithm;
+  if (!GetValue(fields[0], kAttributeFingerprint, &algorithm, error)) {
+    return false;
+  }
+
+  // Downcase the algorithm. Note that we don't need to downcase the
+  // fingerprint because hex_decode can handle upper-case.
+  std::transform(algorithm.begin(), algorithm.end(), algorithm.begin(),
+                 ::tolower);
+
+  // The second field is the digest value. De-hexify it.
+  *fingerprint = rtc::SSLFingerprint::CreateFromRfc4572(
+      algorithm, fields[1]);
+  if (!*fingerprint) {
+    return ParseFailed(line,
+                       "Failed to create fingerprint from the digest.",
+                       error);
+  }
+
+  return true;
+}
+
+static bool ParseDtlsSetup(const std::string& line,
+                           cricket::ConnectionRole* role,
+                           SdpParseError* error) {
+  // setup-attr           =  "a=setup:" role
+  // role                 =  "active" / "passive" / "actpass" / "holdconn"
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColon, &fields);
+  const size_t expected_fields = 2;
+  if (fields.size() != expected_fields) {
+    return ParseFailedExpectFieldNum(line, expected_fields, error);
+  }
+  std::string role_str = fields[1];
+  if (!cricket::StringToConnectionRole(role_str, role)) {
+    return ParseFailed(line, "Invalid attribute value.", error);
+  }
+  return true;
+}
+
+// RFC 3551
+//  PT   encoding    media type  clock rate   channels
+//                      name                    (Hz)
+//  0    PCMU        A            8,000       1
+//  1    reserved    A
+//  2    reserved    A
+//  3    GSM         A            8,000       1
+//  4    G723        A            8,000       1
+//  5    DVI4        A            8,000       1
+//  6    DVI4        A           16,000       1
+//  7    LPC         A            8,000       1
+//  8    PCMA        A            8,000       1
+//  9    G722        A            8,000       1
+//  10   L16         A           44,100       2
+//  11   L16         A           44,100       1
+//  12   QCELP       A            8,000       1
+//  13   CN          A            8,000       1
+//  14   MPA         A           90,000       (see text)
+//  15   G728        A            8,000       1
+//  16   DVI4        A           11,025       1
+//  17   DVI4        A           22,050       1
+//  18   G729        A            8,000       1
+struct StaticPayloadAudioCodec {
+  const char* name;
+  int clockrate;
+  size_t channels;
+};
+static const StaticPayloadAudioCodec kStaticPayloadAudioCodecs[] = {
+  { "PCMU", 8000, 1 },
+  { "reserved", 0, 0 },
+  { "reserved", 0, 0 },
+  { "GSM", 8000, 1 },
+  { "G723", 8000, 1 },
+  { "DVI4", 8000, 1 },
+  { "DVI4", 16000, 1 },
+  { "LPC", 8000, 1 },
+  { "PCMA", 8000, 1 },
+  { "G722", 8000, 1 },
+  { "L16", 44100, 2 },
+  { "L16", 44100, 1 },
+  { "QCELP", 8000, 1 },
+  { "CN", 8000, 1 },
+  { "MPA", 90000, 1 },
+  { "G728", 8000, 1 },
+  { "DVI4", 11025, 1 },
+  { "DVI4", 22050, 1 },
+  { "G729", 8000, 1 },
+};
+
+void MaybeCreateStaticPayloadAudioCodecs(
+    const std::vector<int>& fmts, AudioContentDescription* media_desc) {
+  if (!media_desc) {
+    return;
+  }
+  int preference = static_cast<int>(fmts.size());
+  std::vector<int>::const_iterator it = fmts.begin();
+  bool add_new_codec = false;
+  for (; it != fmts.end(); ++it) {
+    int payload_type = *it;
+    if (!media_desc->HasCodec(payload_type) &&
+        payload_type >= 0 &&
+        payload_type < arraysize(kStaticPayloadAudioCodecs)) {
+      std::string encoding_name = kStaticPayloadAudioCodecs[payload_type].name;
+      int clock_rate = kStaticPayloadAudioCodecs[payload_type].clockrate;
+      size_t channels = kStaticPayloadAudioCodecs[payload_type].channels;
+      media_desc->AddCodec(cricket::AudioCodec(payload_type, encoding_name,
+                                               clock_rate, 0, channels,
+                                               preference));
+      add_new_codec = true;
+    }
+    --preference;
+  }
+  if (add_new_codec) {
+    media_desc->SortCodecs();
+  }
+}
+
+template <class C>
+static C* ParseContentDescription(const std::string& message,
+                                  const MediaType media_type,
+                                  int mline_index,
+                                  const std::string& protocol,
+                                  const std::vector<int>& codec_preference,
+                                  size_t* pos,
+                                  std::string* content_name,
+                                  TransportDescription* transport,
+                                  std::vector<JsepIceCandidate*>* candidates,
+                                  webrtc::SdpParseError* error) {
+  C* media_desc = new C();
+  switch (media_type) {
+    case cricket::MEDIA_TYPE_AUDIO:
+      *content_name = cricket::CN_AUDIO;
+      break;
+    case cricket::MEDIA_TYPE_VIDEO:
+      *content_name = cricket::CN_VIDEO;
+      break;
+    case cricket::MEDIA_TYPE_DATA:
+      *content_name = cricket::CN_DATA;
+      break;
+    default:
+      ASSERT(false);
+      break;
+  }
+  if (!ParseContent(message, media_type, mline_index, protocol,
+                    codec_preference, pos, content_name,
+                    media_desc, transport, candidates, error)) {
+    delete media_desc;
+    return NULL;
+  }
+  // Sort the codecs according to the m-line fmt list.
+  media_desc->SortCodecs();
+  return media_desc;
+}
+
+bool ParseMediaDescription(const std::string& message,
+                           const TransportDescription& session_td,
+                           const RtpHeaderExtensions& session_extmaps,
+                           size_t* pos,
+                           cricket::SessionDescription* desc,
+                           std::vector<JsepIceCandidate*>* candidates,
+                           SdpParseError* error) {
+  ASSERT(desc != NULL);
+  std::string line;
+  int mline_index = -1;
+
+  // Zero or more media descriptions
+  // RFC 4566
+  // m=<media> <port> <proto> <fmt>
+  while (GetLineWithType(message, pos, &line, kLineTypeMedia)) {
+    ++mline_index;
+
+    std::vector<std::string> fields;
+    rtc::split(line.substr(kLinePrefixLength),
+                     kSdpDelimiterSpace, &fields);
+    const size_t expected_min_fields = 4;
+    if (fields.size() < expected_min_fields) {
+      return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+    }
+    bool rejected = false;
+    // RFC 3264
+    // To reject an offered stream, the port number in the corresponding stream
+    // in the answer MUST be set to zero.
+    if (fields[1] == kMediaPortRejected) {
+      rejected = true;
+    }
+
+    std::string protocol = fields[2];
+
+    // <fmt>
+    std::vector<int> codec_preference;
+    if (IsRtp(protocol)) {
+      for (size_t j = 3 ; j < fields.size(); ++j) {
+        // TODO(wu): Remove when below bug is fixed.
+        // https://bugzilla.mozilla.org/show_bug.cgi?id=996329
+        if (fields[j].empty() && j == fields.size() - 1) {
+          continue;
+        }
+
+        int pl = 0;
+        if (!GetPayloadTypeFromString(line, fields[j], &pl, error)) {
+          return false;
+        }
+        codec_preference.push_back(pl);
+      }
+    }
+
+    // Make a temporary TransportDescription based on |session_td|.
+    // Some of this gets overwritten by ParseContent.
+    TransportDescription transport(
+        session_td.transport_options, session_td.ice_ufrag, session_td.ice_pwd,
+        session_td.ice_mode, session_td.connection_role,
+        session_td.identity_fingerprint.get());
+
+    rtc::scoped_ptr<MediaContentDescription> content;
+    std::string content_name;
+    if (HasAttribute(line, kMediaTypeVideo)) {
+      content.reset(ParseContentDescription<VideoContentDescription>(
+                    message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol,
+                    codec_preference, pos, &content_name,
+                    &transport, candidates, error));
+    } else if (HasAttribute(line, kMediaTypeAudio)) {
+      content.reset(ParseContentDescription<AudioContentDescription>(
+                    message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol,
+                    codec_preference, pos, &content_name,
+                    &transport, candidates, error));
+    } else if (HasAttribute(line, kMediaTypeData)) {
+      DataContentDescription* data_desc =
+          ParseContentDescription<DataContentDescription>(
+                    message, cricket::MEDIA_TYPE_DATA, mline_index, protocol,
+                    codec_preference, pos, &content_name,
+                    &transport, candidates, error);
+      content.reset(data_desc);
+
+      int p;
+      if (data_desc && IsDtlsSctp(protocol) && rtc::FromString(fields[3], &p)) {
+        if (!AddSctpDataCodec(data_desc, p))
+          return false;
+      }
+    } else {
+      LOG(LS_WARNING) << "Unsupported media type: " << line;
+      continue;
+    }
+    if (!content.get()) {
+      // ParseContentDescription returns NULL if failed.
+      return false;
+    }
+
+    if (IsRtp(protocol)) {
+      // Set the extmap.
+      if (!session_extmaps.empty() &&
+          !content->rtp_header_extensions().empty()) {
+        return ParseFailed("",
+                           "The a=extmap MUST be either all session level or "
+                           "all media level.",
+                           error);
+      }
+      for (size_t i = 0; i < session_extmaps.size(); ++i) {
+        content->AddRtpHeaderExtension(session_extmaps[i]);
+      }
+    }
+    content->set_protocol(protocol);
+    desc->AddContent(content_name,
+                     IsDtlsSctp(protocol) ? cricket::NS_JINGLE_DRAFT_SCTP :
+                                            cricket::NS_JINGLE_RTP,
+                     rejected,
+                     content.release());
+    // Create TransportInfo with the media level "ice-pwd" and "ice-ufrag".
+    TransportInfo transport_info(content_name, transport);
+
+    if (!desc->AddTransportInfo(transport_info)) {
+      std::ostringstream description;
+      description << "Failed to AddTransportInfo with content name: "
+                  << content_name;
+      return ParseFailed("", description.str(), error);
+    }
+  }
+
+  size_t end_of_message = message.size();
+  if (mline_index == -1 && *pos != end_of_message) {
+    ParseFailed(message, *pos, "Expects m line.", error);
+    return false;
+  }
+  return true;
+}
+
+bool VerifyCodec(const cricket::Codec& codec) {
+  // Codec has not been populated correctly unless the name has been set. This
+  // can happen if an SDP has an fmtp or rtcp-fb with a payload type but doesn't
+  // have a corresponding "rtpmap" line.
+  cricket::Codec default_codec;
+  return default_codec.name != codec.name;
+}
+
+bool VerifyAudioCodecs(const AudioContentDescription* audio_desc) {
+  const std::vector<cricket::AudioCodec>& codecs = audio_desc->codecs();
+  for (std::vector<cricket::AudioCodec>::const_iterator iter = codecs.begin();
+       iter != codecs.end(); ++iter) {
+    if (!VerifyCodec(*iter)) {
+      return false;
+    }
+  }
+  return true;
+}
+
+bool VerifyVideoCodecs(const VideoContentDescription* video_desc) {
+  const std::vector<cricket::VideoCodec>& codecs = video_desc->codecs();
+  for (std::vector<cricket::VideoCodec>::const_iterator iter = codecs.begin();
+       iter != codecs.end(); ++iter) {
+    if (!VerifyCodec(*iter)) {
+      return false;
+    }
+  }
+  return true;
+}
+
+void AddParameters(const cricket::CodecParameterMap& parameters,
+                   cricket::Codec* codec) {
+  for (cricket::CodecParameterMap::const_iterator iter =
+           parameters.begin(); iter != parameters.end(); ++iter) {
+    codec->SetParam(iter->first, iter->second);
+  }
+}
+
+void AddFeedbackParameter(const cricket::FeedbackParam& feedback_param,
+                          cricket::Codec* codec) {
+  codec->AddFeedbackParam(feedback_param);
+}
+
+void AddFeedbackParameters(const cricket::FeedbackParams& feedback_params,
+                           cricket::Codec* codec) {
+  for (std::vector<cricket::FeedbackParam>::const_iterator iter =
+           feedback_params.params().begin();
+       iter != feedback_params.params().end(); ++iter) {
+    codec->AddFeedbackParam(*iter);
+  }
+}
+
+// Gets the current codec setting associated with |payload_type|. If there
+// is no Codec associated with that payload type it returns an empty codec
+// with that payload type.
+template <class T>
+T GetCodecWithPayloadType(const std::vector<T>& codecs, int payload_type) {
+  T ret_val;
+  if (!FindCodecById(codecs, payload_type, &ret_val)) {
+    ret_val.id = payload_type;
+  }
+  return ret_val;
+}
+
+// Updates or creates a new codec entry in the audio description.
+template <class T, class U>
+void AddOrReplaceCodec(MediaContentDescription* content_desc, const U& codec) {
+  T* desc = static_cast<T*>(content_desc);
+  std::vector<U> codecs = desc->codecs();
+  bool found = false;
+
+  typename std::vector<U>::iterator iter;
+  for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
+    if (iter->id == codec.id) {
+      *iter = codec;
+      found = true;
+      break;
+    }
+  }
+  if (!found) {
+    desc->AddCodec(codec);
+    return;
+  }
+  desc->set_codecs(codecs);
+}
+
+// Adds or updates existing codec corresponding to |payload_type| according
+// to |parameters|.
+template <class T, class U>
+void UpdateCodec(MediaContentDescription* content_desc, int payload_type,
+                 const cricket::CodecParameterMap& parameters) {
+  // Codec might already have been populated (from rtpmap).
+  U new_codec = GetCodecWithPayloadType(static_cast<T*>(content_desc)->codecs(),
+                                        payload_type);
+  AddParameters(parameters, &new_codec);
+  AddOrReplaceCodec<T, U>(content_desc, new_codec);
+}
+
+// Adds or updates existing codec corresponding to |payload_type| according
+// to |feedback_param|.
+template <class T, class U>
+void UpdateCodec(MediaContentDescription* content_desc, int payload_type,
+                 const cricket::FeedbackParam& feedback_param) {
+  // Codec might already have been populated (from rtpmap).
+  U new_codec = GetCodecWithPayloadType(static_cast<T*>(content_desc)->codecs(),
+                                        payload_type);
+  AddFeedbackParameter(feedback_param, &new_codec);
+  AddOrReplaceCodec<T, U>(content_desc, new_codec);
+}
+
+template <class T>
+bool PopWildcardCodec(std::vector<T>* codecs, T* wildcard_codec) {
+  for (auto iter = codecs->begin(); iter != codecs->end(); ++iter) {
+    if (iter->id == kWildcardPayloadType) {
+      *wildcard_codec = *iter;
+      codecs->erase(iter);
+      return true;
+    }
+  }
+  return false;
+}
+
+template<class T>
+void UpdateFromWildcardCodecs(cricket::MediaContentDescriptionImpl<T>* desc) {
+  auto codecs = desc->codecs();
+  T wildcard_codec;
+  if (!PopWildcardCodec(&codecs, &wildcard_codec)) {
+    return;
+  }
+  for (auto& codec : codecs) {
+    AddFeedbackParameters(wildcard_codec.feedback_params, &codec);
+  }
+  desc->set_codecs(codecs);
+}
+
+void AddAudioAttribute(const std::string& name, const std::string& value,
+                       AudioContentDescription* audio_desc) {
+  if (value.empty()) {
+    return;
+  }
+  std::vector<cricket::AudioCodec> codecs = audio_desc->codecs();
+  for (std::vector<cricket::AudioCodec>::iterator iter = codecs.begin();
+       iter != codecs.end(); ++iter) {
+    iter->params[name] = value;
+  }
+  audio_desc->set_codecs(codecs);
+}
+
+bool ParseContent(const std::string& message,
+                  const MediaType media_type,
+                  int mline_index,
+                  const std::string& protocol,
+                  const std::vector<int>& codec_preference,
+                  size_t* pos,
+                  std::string* content_name,
+                  MediaContentDescription* media_desc,
+                  TransportDescription* transport,
+                  std::vector<JsepIceCandidate*>* candidates,
+                  SdpParseError* error) {
+  ASSERT(media_desc != NULL);
+  ASSERT(content_name != NULL);
+  ASSERT(transport != NULL);
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    MaybeCreateStaticPayloadAudioCodecs(
+        codec_preference, static_cast<AudioContentDescription*>(media_desc));
+  }
+
+  // The media level "ice-ufrag" and "ice-pwd".
+  // The candidates before update the media level "ice-pwd" and "ice-ufrag".
+  Candidates candidates_orig;
+  std::string line;
+  std::string mline_id;
+  // Tracks created out of the ssrc attributes.
+  StreamParamsVec tracks;
+  SsrcInfoVec ssrc_infos;
+  SsrcGroupVec ssrc_groups;
+  std::string maxptime_as_string;
+  std::string ptime_as_string;
+
+  // Loop until the next m line
+  while (!IsLineType(message, kLineTypeMedia, *pos)) {
+    if (!GetLine(message, pos, &line)) {
+      if (*pos >= message.size()) {
+        break;  // Done parsing
+      } else {
+        return ParseFailed(message, *pos, "Invalid SDP line.", error);
+      }
+    }
+
+    // RFC 4566
+    // b=* (zero or more bandwidth information lines)
+    if (IsLineType(line, kLineTypeSessionBandwidth)) {
+      std::string bandwidth;
+      if (HasAttribute(line, kApplicationSpecificMaximum)) {
+        if (!GetValue(line, kApplicationSpecificMaximum, &bandwidth, error)) {
+          return false;
+        } else {
+          int b = 0;
+          if (!GetValueFromString(line, bandwidth, &b, error)) {
+            return false;
+          }
+          // We should never use more than the default bandwidth for RTP-based
+          // data channels. Don't allow SDP to set the bandwidth, because
+          // that would give JS the opportunity to "break the Internet".
+          // See: https://code.google.com/p/chromium/issues/detail?id=280726
+          if (media_type == cricket::MEDIA_TYPE_DATA && IsRtp(protocol) &&
+              b > cricket::kDataMaxBandwidth / 1000) {
+            std::ostringstream description;
+            description << "RTP-based data channels may not send more than "
+                        << cricket::kDataMaxBandwidth / 1000 << "kbps.";
+            return ParseFailed(line, description.str(), error);
+          }
+          media_desc->set_bandwidth(b * 1000);
+        }
+      }
+      continue;
+    }
+
+    if (!IsLineType(line, kLineTypeAttributes)) {
+      // TODO: Handle other lines if needed.
+      LOG(LS_INFO) << "Ignored line: " << line;
+      continue;
+    }
+
+    // Handle attributes common to SCTP and RTP.
+    if (HasAttribute(line, kAttributeMid)) {
+      // RFC 3388
+      // mid-attribute      = "a=mid:" identification-tag
+      // identification-tag = token
+      // Use the mid identification-tag as the content name.
+      if (!GetValue(line, kAttributeMid, &mline_id, error)) {
+        return false;
+      }
+      *content_name = mline_id;
+    } else if (HasAttribute(line, kAttributeCandidate)) {
+      Candidate candidate;
+      if (!ParseCandidate(line, &candidate, error, false)) {
+        return false;
+      }
+      candidates_orig.push_back(candidate);
+    } else if (HasAttribute(line, kAttributeIceUfrag)) {
+      if (!GetValue(line, kAttributeIceUfrag, &transport->ice_ufrag, error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeIcePwd)) {
+      if (!GetValue(line, kAttributeIcePwd, &transport->ice_pwd, error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeIceOption)) {
+      if (!ParseIceOptions(line, &transport->transport_options, error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeFmtp)) {
+      if (!ParseFmtpAttributes(line, media_type, media_desc, error)) {
+        return false;
+      }
+    } else if (HasAttribute(line, kAttributeFingerprint)) {
+      rtc::SSLFingerprint* fingerprint = NULL;
+
+      if (!ParseFingerprintAttribute(line, &fingerprint, error)) {
+        return false;
+      }
+      transport->identity_fingerprint.reset(fingerprint);
+    } else if (HasAttribute(line, kAttributeSetup)) {
+      if (!ParseDtlsSetup(line, &(transport->connection_role), error)) {
+        return false;
+      }
+    } else if (IsDtlsSctp(protocol) && HasAttribute(line, kAttributeSctpPort)) {
+      int sctp_port;
+      if (!ParseSctpPort(line, &sctp_port, error)) {
+        return false;
+      }
+      if (!AddSctpDataCodec(static_cast<DataContentDescription*>(media_desc),
+                            sctp_port)) {
+        return false;
+      }
+    } else if (IsRtp(protocol)) {
+      //
+      // RTP specific attrubtes
+      //
+      if (HasAttribute(line, kAttributeRtcpMux)) {
+        media_desc->set_rtcp_mux(true);
+      } else if (HasAttribute(line, kAttributeRtcpReducedSize)) {
+        media_desc->set_rtcp_reduced_size(true);
+      } else if (HasAttribute(line, kAttributeSsrcGroup)) {
+        if (!ParseSsrcGroupAttribute(line, &ssrc_groups, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kAttributeSsrc)) {
+        if (!ParseSsrcAttribute(line, &ssrc_infos, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kAttributeCrypto)) {
+        if (!ParseCryptoAttribute(line, media_desc, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kAttributeRtpmap)) {
+        if (!ParseRtpmapAttribute(line, media_type, codec_preference,
+                                  media_desc, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kCodecParamMaxPTime)) {
+        if (!GetValue(line, kCodecParamMaxPTime, &maxptime_as_string, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kAttributeRtcpFb)) {
+        if (!ParseRtcpFbAttribute(line, media_type, media_desc, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kCodecParamPTime)) {
+        if (!GetValue(line, kCodecParamPTime, &ptime_as_string, error)) {
+          return false;
+        }
+      } else if (HasAttribute(line, kAttributeSendOnly)) {
+        media_desc->set_direction(cricket::MD_SENDONLY);
+      } else if (HasAttribute(line, kAttributeRecvOnly)) {
+        media_desc->set_direction(cricket::MD_RECVONLY);
+      } else if (HasAttribute(line, kAttributeInactive)) {
+        media_desc->set_direction(cricket::MD_INACTIVE);
+      } else if (HasAttribute(line, kAttributeSendRecv)) {
+        media_desc->set_direction(cricket::MD_SENDRECV);
+      } else if (HasAttribute(line, kAttributeExtmap)) {
+        RtpHeaderExtension extmap;
+        if (!ParseExtmap(line, &extmap, error)) {
+          return false;
+        }
+        media_desc->AddRtpHeaderExtension(extmap);
+      } else if (HasAttribute(line, kAttributeXGoogleFlag)) {
+        // Experimental attribute.  Conference mode activates more aggressive
+        // AEC and NS settings.
+        // TODO: expose API to set these directly.
+        std::string flag_value;
+        if (!GetValue(line, kAttributeXGoogleFlag, &flag_value, error)) {
+          return false;
+        }
+        if (flag_value.compare(kValueConference) == 0)
+          media_desc->set_conference_mode(true);
+      }
+    } else {
+      // Only parse lines that we are interested of.
+      LOG(LS_INFO) << "Ignored line: " << line;
+      continue;
+    }
+  }
+
+  // Create tracks from the |ssrc_infos|.
+  CreateTracksFromSsrcInfos(ssrc_infos, &tracks);
+
+  // Add the ssrc group to the track.
+  for (SsrcGroupVec::iterator ssrc_group = ssrc_groups.begin();
+       ssrc_group != ssrc_groups.end(); ++ssrc_group) {
+    if (ssrc_group->ssrcs.empty()) {
+      continue;
+    }
+    uint32_t ssrc = ssrc_group->ssrcs.front();
+    for (StreamParamsVec::iterator track = tracks.begin();
+         track != tracks.end(); ++track) {
+      if (track->has_ssrc(ssrc)) {
+        track->ssrc_groups.push_back(*ssrc_group);
+      }
+    }
+  }
+
+  // Add the new tracks to the |media_desc|.
+  for (StreamParamsVec::iterator track = tracks.begin();
+       track != tracks.end(); ++track) {
+    media_desc->AddStream(*track);
+  }
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    AudioContentDescription* audio_desc =
+        static_cast<AudioContentDescription*>(media_desc);
+    UpdateFromWildcardCodecs(audio_desc);
+
+    // Verify audio codec ensures that no audio codec has been populated with
+    // only fmtp.
+    if (!VerifyAudioCodecs(audio_desc)) {
+      return ParseFailed("Failed to parse audio codecs correctly.", error);
+    }
+    AddAudioAttribute(kCodecParamMaxPTime, maxptime_as_string, audio_desc);
+    AddAudioAttribute(kCodecParamPTime, ptime_as_string, audio_desc);
+  }
+
+  if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    VideoContentDescription* video_desc =
+        static_cast<VideoContentDescription*>(media_desc);
+    UpdateFromWildcardCodecs(video_desc);
+    // Verify video codec ensures that no video codec has been populated with
+    // only rtcp-fb.
+    if (!VerifyVideoCodecs(video_desc)) {
+      return ParseFailed("Failed to parse video codecs correctly.", error);
+    }
+  }
+
+  // RFC 5245
+  // Update the candidates with the media level "ice-pwd" and "ice-ufrag".
+  for (Candidates::iterator it = candidates_orig.begin();
+       it != candidates_orig.end(); ++it) {
+    ASSERT((*it).username().empty() ||
+           (*it).username() == transport->ice_ufrag);
+    (*it).set_username(transport->ice_ufrag);
+    ASSERT((*it).password().empty());
+    (*it).set_password(transport->ice_pwd);
+    candidates->push_back(
+        new JsepIceCandidate(mline_id, mline_index, *it));
+  }
+  return true;
+}
+
+bool ParseSsrcAttribute(const std::string& line, SsrcInfoVec* ssrc_infos,
+                        SdpParseError* error) {
+  ASSERT(ssrc_infos != NULL);
+  // RFC 5576
+  // a=ssrc:<ssrc-id> <attribute>
+  // a=ssrc:<ssrc-id> <attribute>:<value>
+  std::string field1, field2;
+  if (!rtc::tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpace,
+                           &field1, &field2)) {
+    const size_t expected_fields = 2;
+    return ParseFailedExpectFieldNum(line, expected_fields, error);
+  }
+
+  // ssrc:<ssrc-id>
+  std::string ssrc_id_s;
+  if (!GetValue(field1, kAttributeSsrc, &ssrc_id_s, error)) {
+    return false;
+  }
+  uint32_t ssrc_id = 0;
+  if (!GetValueFromString(line, ssrc_id_s, &ssrc_id, error)) {
+    return false;
+  }
+
+  std::string attribute;
+  std::string value;
+  if (!rtc::tokenize_first(field2, kSdpDelimiterColon, &attribute, &value)) {
+    std::ostringstream description;
+    description << "Failed to get the ssrc attribute value from " << field2
+                << ". Expected format <attribute>:<value>.";
+    return ParseFailed(line, description.str(), error);
+  }
+
+  // Check if there's already an item for this |ssrc_id|. Create a new one if
+  // there isn't.
+  SsrcInfoVec::iterator ssrc_info = ssrc_infos->begin();
+  for (; ssrc_info != ssrc_infos->end(); ++ssrc_info) {
+    if (ssrc_info->ssrc_id == ssrc_id) {
+      break;
+    }
+  }
+  if (ssrc_info == ssrc_infos->end()) {
+    SsrcInfo info;
+    info.ssrc_id = ssrc_id;
+    ssrc_infos->push_back(info);
+    ssrc_info = ssrc_infos->end() - 1;
+  }
+
+  // Store the info to the |ssrc_info|.
+  if (attribute == kSsrcAttributeCname) {
+    // RFC 5576
+    // cname:<value>
+    ssrc_info->cname = value;
+  } else if (attribute == kSsrcAttributeMsid) {
+    // draft-alvestrand-mmusic-msid-00
+    // "msid:" identifier [ " " appdata ]
+    std::vector<std::string> fields;
+    rtc::split(value, kSdpDelimiterSpace, &fields);
+    if (fields.size() < 1 || fields.size() > 2) {
+      return ParseFailed(line,
+                         "Expected format \"msid:<identifier>[ <appdata>]\".",
+                         error);
+    }
+    ssrc_info->msid_identifier = fields[0];
+    if (fields.size() == 2) {
+      ssrc_info->msid_appdata = fields[1];
+    }
+  } else if (attribute == kSsrcAttributeMslabel) {
+    // draft-alvestrand-rtcweb-mid-01
+    // mslabel:<value>
+    ssrc_info->mslabel = value;
+  } else if (attribute == kSSrcAttributeLabel) {
+    // The label isn't defined.
+    // label:<value>
+    ssrc_info->label = value;
+  }
+  return true;
+}
+
+bool ParseSsrcGroupAttribute(const std::string& line,
+                             SsrcGroupVec* ssrc_groups,
+                             SdpParseError* error) {
+  ASSERT(ssrc_groups != NULL);
+  // RFC 5576
+  // a=ssrc-group:<semantics> <ssrc-id> ...
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  const size_t expected_min_fields = 2;
+  if (fields.size() < expected_min_fields) {
+    return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+  }
+  std::string semantics;
+  if (!GetValue(fields[0], kAttributeSsrcGroup, &semantics, error)) {
+    return false;
+  }
+  std::vector<uint32_t> ssrcs;
+  for (size_t i = 1; i < fields.size(); ++i) {
+    uint32_t ssrc = 0;
+    if (!GetValueFromString(line, fields[i], &ssrc, error)) {
+      return false;
+    }
+    ssrcs.push_back(ssrc);
+  }
+  ssrc_groups->push_back(SsrcGroup(semantics, ssrcs));
+  return true;
+}
+
+bool ParseCryptoAttribute(const std::string& line,
+                          MediaContentDescription* media_desc,
+                          SdpParseError* error) {
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  // RFC 4568
+  // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+  const size_t expected_min_fields = 3;
+  if (fields.size() < expected_min_fields) {
+    return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+  }
+  std::string tag_value;
+  if (!GetValue(fields[0], kAttributeCrypto, &tag_value, error)) {
+    return false;
+  }
+  int tag = 0;
+  if (!GetValueFromString(line, tag_value, &tag, error)) {
+    return false;
+  }
+  const std::string& crypto_suite = fields[1];
+  const std::string& key_params = fields[2];
+  std::string session_params;
+  if (fields.size() > 3) {
+    session_params = fields[3];
+  }
+  media_desc->AddCrypto(CryptoParams(tag, crypto_suite, key_params,
+                                     session_params));
+  return true;
+}
+
+// Updates or creates a new codec entry in the audio description with according
+// to |name|, |clockrate|, |bitrate|, |channels| and |preference|.
+void UpdateCodec(int payload_type, const std::string& name, int clockrate,
+                 int bitrate, size_t channels, int preference,
+                 AudioContentDescription* audio_desc) {
+  // Codec may already be populated with (only) optional parameters
+  // (from an fmtp).
+  cricket::AudioCodec codec =
+      GetCodecWithPayloadType(audio_desc->codecs(), payload_type);
+  codec.name = name;
+  codec.clockrate = clockrate;
+  codec.bitrate = bitrate;
+  codec.channels = channels;
+  codec.preference = preference;
+  AddOrReplaceCodec<AudioContentDescription, cricket::AudioCodec>(audio_desc,
+                                                                  codec);
+}
+
+// Updates or creates a new codec entry in the video description according to
+// |name|, |width|, |height|, |framerate| and |preference|.
+void UpdateCodec(int payload_type, const std::string& name, int width,
+                 int height, int framerate, int preference,
+                 VideoContentDescription* video_desc) {
+  // Codec may already be populated with (only) optional parameters
+  // (from an fmtp).
+  cricket::VideoCodec codec =
+      GetCodecWithPayloadType(video_desc->codecs(), payload_type);
+  codec.name = name;
+  codec.width = width;
+  codec.height = height;
+  codec.framerate = framerate;
+  codec.preference = preference;
+  AddOrReplaceCodec<VideoContentDescription, cricket::VideoCodec>(video_desc,
+                                                                  codec);
+}
+
+bool ParseRtpmapAttribute(const std::string& line,
+                          const MediaType media_type,
+                          const std::vector<int>& codec_preference,
+                          MediaContentDescription* media_desc,
+                          SdpParseError* error) {
+  std::vector<std::string> fields;
+  rtc::split(line.substr(kLinePrefixLength),
+                   kSdpDelimiterSpace, &fields);
+  // RFC 4566
+  // a=rtpmap:<payload type> <encoding name>/<clock rate>[/<encodingparameters>]
+  const size_t expected_min_fields = 2;
+  if (fields.size() < expected_min_fields) {
+    return ParseFailedExpectMinFieldNum(line, expected_min_fields, error);
+  }
+  std::string payload_type_value;
+  if (!GetValue(fields[0], kAttributeRtpmap, &payload_type_value, error)) {
+    return false;
+  }
+  int payload_type = 0;
+  if (!GetPayloadTypeFromString(line, payload_type_value, &payload_type,
+                                error)) {
+    return false;
+  }
+
+  // Set the preference order depending on the order of the pl type in the
+  // <fmt> of the m-line.
+  const int preference = codec_preference.end() -
+      std::find(codec_preference.begin(), codec_preference.end(),
+                payload_type);
+  if (preference == 0) {
+    LOG(LS_WARNING) << "Ignore rtpmap line that did not appear in the "
+                    << "<fmt> of the m-line: " << line;
+    return true;
+  }
+  const std::string& encoder = fields[1];
+  std::vector<std::string> codec_params;
+  rtc::split(encoder, '/', &codec_params);
+  // <encoding name>/<clock rate>[/<encodingparameters>]
+  // 2 mandatory fields
+  if (codec_params.size() < 2 || codec_params.size() > 3) {
+    return ParseFailed(line,
+                       "Expected format \"<encoding name>/<clock rate>"
+                       "[/<encodingparameters>]\".",
+                       error);
+  }
+  const std::string& encoding_name = codec_params[0];
+  int clock_rate = 0;
+  if (!GetValueFromString(line, codec_params[1], &clock_rate, error)) {
+    return false;
+  }
+  if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    VideoContentDescription* video_desc =
+        static_cast<VideoContentDescription*>(media_desc);
+    // TODO: We will send resolution in SDP. For now use
+    // JsepSessionDescription::kMaxVideoCodecWidth and kMaxVideoCodecHeight.
+    UpdateCodec(payload_type, encoding_name,
+                JsepSessionDescription::kMaxVideoCodecWidth,
+                JsepSessionDescription::kMaxVideoCodecHeight,
+                JsepSessionDescription::kDefaultVideoCodecFramerate,
+                preference, video_desc);
+  } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    // RFC 4566
+    // For audio streams, <encoding parameters> indicates the number
+    // of audio channels.  This parameter is OPTIONAL and may be
+    // omitted if the number of channels is one, provided that no
+    // additional parameters are needed.
+    size_t channels = 1;
+    if (codec_params.size() == 3) {
+      if (!GetValueFromString(line, codec_params[2], &channels, error)) {
+        return false;
+      }
+    }
+    int bitrate = 0;
+    // The default behavior for ISAC (bitrate == 0) in webrtcvoiceengine.cc
+    // (specifically FindWebRtcCodec) is bandwidth-adaptive variable bitrate.
+    // The bandwidth adaptation doesn't always work well, so this code
+    // sets a fixed target bitrate instead.
+    if (_stricmp(encoding_name.c_str(), kIsacCodecName) == 0) {
+      if (clock_rate <= 16000) {
+        bitrate = kIsacWbDefaultRate;
+      } else {
+        bitrate = kIsacSwbDefaultRate;
+      }
+    }
+    AudioContentDescription* audio_desc =
+        static_cast<AudioContentDescription*>(media_desc);
+    UpdateCodec(payload_type, encoding_name, clock_rate, bitrate, channels,
+                preference, audio_desc);
+  } else if (media_type == cricket::MEDIA_TYPE_DATA) {
+    DataContentDescription* data_desc =
+        static_cast<DataContentDescription*>(media_desc);
+    data_desc->AddCodec(cricket::DataCodec(payload_type, encoding_name,
+                                           preference));
+  }
+  return true;
+}
+
+bool ParseFmtpParam(const std::string& line, std::string* parameter,
+                    std::string* value, SdpParseError* error) {
+  if (!rtc::tokenize_first(line, kSdpDelimiterEqual, parameter, value)) {
+    ParseFailed(line, "Unable to parse fmtp parameter. \'=\' missing.", error);
+    return false;
+  }
+  // a=fmtp:<payload_type> <param1>=<value1>; <param2>=<value2>; ...
+  return true;
+}
+
+bool ParseFmtpAttributes(const std::string& line, const MediaType media_type,
+                         MediaContentDescription* media_desc,
+                         SdpParseError* error) {
+  if (media_type != cricket::MEDIA_TYPE_AUDIO &&
+      media_type != cricket::MEDIA_TYPE_VIDEO) {
+    return true;
+  }
+
+  std::string line_payload;
+  std::string line_params;
+
+  // RFC 5576
+  // a=fmtp:<format> <format specific parameters>
+  // At least two fields, whereas the second one is any of the optional
+  // parameters.
+  if (!rtc::tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpace,
+                           &line_payload, &line_params)) {
+    ParseFailedExpectMinFieldNum(line, 2, error);
+    return false;
+  }
+
+  // Parse out the payload information.
+  std::string payload_type_str;
+  if (!GetValue(line_payload, kAttributeFmtp, &payload_type_str, error)) {
+    return false;
+  }
+
+  int payload_type = 0;
+  if (!GetPayloadTypeFromString(line_payload, payload_type_str, &payload_type,
+                                error)) {
+    return false;
+  }
+
+  // Parse out format specific parameters.
+  std::vector<std::string> fields;
+  rtc::split(line_params, kSdpDelimiterSemicolon, &fields);
+
+  cricket::CodecParameterMap codec_params;
+  for (auto& iter : fields) {
+    if (iter.find(kSdpDelimiterEqual) == std::string::npos) {
+      // Only fmtps with equals are currently supported. Other fmtp types
+      // should be ignored. Unknown fmtps do not constitute an error.
+      continue;
+    }
+
+    std::string name;
+    std::string value;
+    if (!ParseFmtpParam(rtc::string_trim(iter), &name, &value, error)) {
+      return false;
+    }
+    codec_params[name] = value;
+  }
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    UpdateCodec<AudioContentDescription, cricket::AudioCodec>(
+        media_desc, payload_type, codec_params);
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    UpdateCodec<VideoContentDescription, cricket::VideoCodec>(
+        media_desc, payload_type, codec_params);
+  }
+  return true;
+}
+
+bool ParseRtcpFbAttribute(const std::string& line, const MediaType media_type,
+                          MediaContentDescription* media_desc,
+                          SdpParseError* error) {
+  if (media_type != cricket::MEDIA_TYPE_AUDIO &&
+      media_type != cricket::MEDIA_TYPE_VIDEO) {
+    return true;
+  }
+  std::vector<std::string> rtcp_fb_fields;
+  rtc::split(line.c_str(), kSdpDelimiterSpace, &rtcp_fb_fields);
+  if (rtcp_fb_fields.size() < 2) {
+    return ParseFailedGetValue(line, kAttributeRtcpFb, error);
+  }
+  std::string payload_type_string;
+  if (!GetValue(rtcp_fb_fields[0], kAttributeRtcpFb, &payload_type_string,
+                error)) {
+    return false;
+  }
+  int payload_type = kWildcardPayloadType;
+  if (payload_type_string != "*") {
+    if (!GetPayloadTypeFromString(line, payload_type_string, &payload_type,
+                                  error)) {
+      return false;
+    }
+  }
+  std::string id = rtcp_fb_fields[1];
+  std::string param = "";
+  for (std::vector<std::string>::iterator iter = rtcp_fb_fields.begin() + 2;
+       iter != rtcp_fb_fields.end(); ++iter) {
+    param.append(*iter);
+  }
+  const cricket::FeedbackParam feedback_param(id, param);
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    UpdateCodec<AudioContentDescription, cricket::AudioCodec>(
+        media_desc, payload_type, feedback_param);
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    UpdateCodec<VideoContentDescription, cricket::VideoCodec>(
+        media_desc, payload_type, feedback_param);
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/webrtcsdp.h b/webrtc/api/webrtcsdp.h
new file mode 100644
index 0000000..a75f735
--- /dev/null
+++ b/webrtc/api/webrtcsdp.h
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain functions for parsing and serializing SDP messages.
+// Related RFC/draft including:
+// * RFC 4566 - SDP
+// * RFC 5245 - ICE
+// * RFC 3388 - Grouping of Media Lines in SDP
+// * RFC 4568 - SDP Security Descriptions for Media Streams
+// * draft-lennox-mmusic-sdp-source-selection-02 -
+//   Mechanisms for Media Source Selection in SDP
+
+#ifndef WEBRTC_API_WEBRTCSDP_H_
+#define WEBRTC_API_WEBRTCSDP_H_
+
+#include <string>
+
+namespace webrtc {
+
+class IceCandidateInterface;
+class JsepIceCandidate;
+class JsepSessionDescription;
+struct SdpParseError;
+
+// Serializes the passed in JsepSessionDescription.
+// Serialize SessionDescription including candidates if
+// JsepSessionDescription has candidates.
+// jdesc - The JsepSessionDescription object to be serialized.
+// return - SDP string serialized from the arguments.
+std::string SdpSerialize(const JsepSessionDescription& jdesc);
+
+// Serializes the passed in IceCandidateInterface to a SDP string.
+// candidate - The candidate to be serialized.
+std::string SdpSerializeCandidate(const IceCandidateInterface& candidate);
+
+// Deserializes the passed in SDP string to a JsepSessionDescription.
+// message - SDP string to be Deserialized.
+// jdesc - The JsepSessionDescription deserialized from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+bool SdpDeserialize(const std::string& message,
+                    JsepSessionDescription* jdesc,
+                    SdpParseError* error);
+
+// Deserializes the passed in SDP string to one JsepIceCandidate.
+// The first line must be a=candidate line and only the first line will be
+// parsed.
+// message - The SDP string to be Deserialized.
+// candidates - The JsepIceCandidate from the SDP string.
+// error - The detail error information when parsing fails.
+// return - true on success, false on failure.
+bool SdpDeserializeCandidate(const std::string& message,
+                             JsepIceCandidate* candidate,
+                             SdpParseError* error);
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_WEBRTCSDP_H_
diff --git a/webrtc/api/webrtcsdp_unittest.cc b/webrtc/api/webrtcsdp_unittest.cc
new file mode 100644
index 0000000..24dbd58
--- /dev/null
+++ b/webrtc/api/webrtcsdp_unittest.cc
@@ -0,0 +1,2741 @@
+/*
+ * libjingle
+ * Copyright 2011 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#ifdef WEBRTC_ANDROID
+#include "webrtc/api/test/androidtestinitializer.h"
+#endif
+#include "webrtc/api/webrtcsdp.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/messagedigest.h"
+#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/sslfingerprint.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/media/base/constants.h"
+#include "webrtc/p2p/base/constants.h"
+
+using cricket::AudioCodec;
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::ContentGroup;
+using cricket::DataCodec;
+using cricket::DataContentDescription;
+using cricket::ICE_CANDIDATE_COMPONENT_RTCP;
+using cricket::ICE_CANDIDATE_COMPONENT_RTP;
+using cricket::kFecSsrcGroupSemantics;
+using cricket::LOCAL_PORT_TYPE;
+using cricket::NS_JINGLE_DRAFT_SCTP;
+using cricket::NS_JINGLE_RTP;
+using cricket::RtpHeaderExtension;
+using cricket::RELAY_PORT_TYPE;
+using cricket::SessionDescription;
+using cricket::StreamParams;
+using cricket::STUN_PORT_TYPE;
+using cricket::TransportDescription;
+using cricket::TransportInfo;
+using cricket::VideoCodec;
+using cricket::VideoContentDescription;
+using webrtc::IceCandidateCollection;
+using webrtc::IceCandidateInterface;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::SdpParseError;
+using webrtc::SessionDescriptionInterface;
+
+typedef std::vector<AudioCodec> AudioCodecs;
+typedef std::vector<Candidate> Candidates;
+
+static const uint32_t kDefaultSctpPort = 5000;
+static const char kSessionTime[] = "t=0 0\r\n";
+static const uint32_t kCandidatePriority = 2130706432U;  // pref = 1.0
+static const char kCandidateUfragVoice[] = "ufrag_voice";
+static const char kCandidatePwdVoice[] = "pwd_voice";
+static const char kAttributeIceUfragVoice[] = "a=ice-ufrag:ufrag_voice\r\n";
+static const char kAttributeIcePwdVoice[] = "a=ice-pwd:pwd_voice\r\n";
+static const char kCandidateUfragVideo[] = "ufrag_video";
+static const char kCandidatePwdVideo[] = "pwd_video";
+static const char kCandidateUfragData[] = "ufrag_data";
+static const char kCandidatePwdData[] = "pwd_data";
+static const char kAttributeIceUfragVideo[] = "a=ice-ufrag:ufrag_video\r\n";
+static const char kAttributeIcePwdVideo[] = "a=ice-pwd:pwd_video\r\n";
+static const uint32_t kCandidateGeneration = 2;
+static const char kCandidateFoundation1[] = "a0+B/1";
+static const char kCandidateFoundation2[] = "a0+B/2";
+static const char kCandidateFoundation3[] = "a0+B/3";
+static const char kCandidateFoundation4[] = "a0+B/4";
+static const char kAttributeCryptoVoice[] =
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+    "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+    "dummy_session_params\r\n";
+static const char kAttributeCryptoVideo[] =
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n";
+static const char kFingerprint[] = "a=fingerprint:sha-1 "
+    "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n";
+static const int kExtmapId = 1;
+static const char kExtmapUri[] = "http://example.com/082005/ext.htm#ttime";
+static const char kExtmap[] =
+    "a=extmap:1 http://example.com/082005/ext.htm#ttime\r\n";
+static const char kExtmapWithDirectionAndAttribute[] =
+    "a=extmap:1/sendrecv http://example.com/082005/ext.htm#ttime a1 a2\r\n";
+
+static const uint8_t kIdentityDigest[] = {
+    0x4A, 0xAD, 0xB9, 0xB1, 0x3F, 0x82, 0x18, 0x3B, 0x54, 0x02,
+    0x12, 0xDF, 0x3E, 0x5D, 0x49, 0x6B, 0x19, 0xE5, 0x7C, 0xAB};
+
+static const char kDtlsSctp[] = "DTLS/SCTP";
+static const char kUdpDtlsSctp[] = "UDP/DTLS/SCTP";
+static const char kTcpDtlsSctp[] = "TCP/DTLS/SCTP";
+
+struct CodecParams {
+  int max_ptime;
+  int ptime;
+  int min_ptime;
+  int sprop_stereo;
+  int stereo;
+  int useinband;
+  int maxaveragebitrate;
+};
+
+// Reference sdp string
+static const char kSdpFullString[] =
+    "v=0\r\n"
+    "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n"
+    "m=audio 2345 RTP/SAVPF 111 103 104\r\n"
+    "c=IN IP4 74.125.127.126\r\n"
+    "a=rtcp:2347 IN IP4 74.125.127.126\r\n"
+    "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1235 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/2 2 udp 2130706432 ::1 1239 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+    "raddr 192.168.1.5 rport 2346 "
+    "generation 2\r\n"
+    "a=candidate:a0+B/3 2 udp 2130706432 74.125.127.126 2347 typ srflx "
+    "raddr 192.168.1.5 rport 2348 "
+    "generation 2\r\n"
+    "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+    "a=mid:audio_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=rtcp-mux\r\n"
+    "a=rtcp-rsize\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+    "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+    "dummy_session_params\r\n"
+    "a=rtpmap:111 opus/48000/2\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=rtpmap:104 ISAC/32000\r\n"
+    "a=ssrc:1 cname:stream_1_cname\r\n"
+    "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+    "a=ssrc:1 mslabel:local_stream_1\r\n"
+    "a=ssrc:1 label:audio_track_id_1\r\n"
+    "a=ssrc:4 cname:stream_2_cname\r\n"
+    "a=ssrc:4 msid:local_stream_2 audio_track_id_2\r\n"
+    "a=ssrc:4 mslabel:local_stream_2\r\n"
+    "a=ssrc:4 label:audio_track_id_2\r\n"
+    "m=video 3457 RTP/SAVPF 120\r\n"
+    "c=IN IP4 74.125.224.39\r\n"
+    "a=rtcp:3456 IN IP4 74.125.224.39\r\n"
+    "a=candidate:a0+B/1 2 udp 2130706432 192.168.1.5 1236 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1237 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/2 2 udp 2130706432 ::1 1240 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/2 1 udp 2130706432 ::1 1241 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/4 2 udp 2130706432 74.125.224.39 3456 typ relay "
+    "generation 2\r\n"
+    "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay "
+    "generation 2\r\n"
+    "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+    "a=mid:video_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+    "a=rtpmap:120 VP8/90000\r\n"
+    "a=ssrc:2 cname:stream_1_cname\r\n"
+    "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+    "a=ssrc:2 mslabel:local_stream_1\r\n"
+    "a=ssrc:2 label:video_track_id_1\r\n"
+    "a=ssrc:3 cname:stream_1_cname\r\n"
+    "a=ssrc:3 msid:local_stream_1 video_track_id_2\r\n"
+    "a=ssrc:3 mslabel:local_stream_1\r\n"
+    "a=ssrc:3 label:video_track_id_2\r\n"
+    "a=ssrc-group:FEC 5 6\r\n"
+    "a=ssrc:5 cname:stream_2_cname\r\n"
+    "a=ssrc:5 msid:local_stream_2 video_track_id_3\r\n"
+    "a=ssrc:5 mslabel:local_stream_2\r\n"
+    "a=ssrc:5 label:video_track_id_3\r\n"
+    "a=ssrc:6 cname:stream_2_cname\r\n"
+    "a=ssrc:6 msid:local_stream_2 video_track_id_3\r\n"
+    "a=ssrc:6 mslabel:local_stream_2\r\n"
+    "a=ssrc:6 label:video_track_id_3\r\n";
+
+// SDP reference string without the candidates.
+static const char kSdpString[] =
+    "v=0\r\n"
+    "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n"
+    "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+    "a=mid:audio_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=rtcp-mux\r\n"
+    "a=rtcp-rsize\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+    "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 "
+    "dummy_session_params\r\n"
+    "a=rtpmap:111 opus/48000/2\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=rtpmap:104 ISAC/32000\r\n"
+    "a=ssrc:1 cname:stream_1_cname\r\n"
+    "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n"
+    "a=ssrc:1 mslabel:local_stream_1\r\n"
+    "a=ssrc:1 label:audio_track_id_1\r\n"
+    "a=ssrc:4 cname:stream_2_cname\r\n"
+    "a=ssrc:4 msid:local_stream_2 audio_track_id_2\r\n"
+    "a=ssrc:4 mslabel:local_stream_2\r\n"
+    "a=ssrc:4 label:audio_track_id_2\r\n"
+    "m=video 9 RTP/SAVPF 120\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+    "a=mid:video_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"
+    "a=rtpmap:120 VP8/90000\r\n"
+    "a=ssrc:2 cname:stream_1_cname\r\n"
+    "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n"
+    "a=ssrc:2 mslabel:local_stream_1\r\n"
+    "a=ssrc:2 label:video_track_id_1\r\n"
+    "a=ssrc:3 cname:stream_1_cname\r\n"
+    "a=ssrc:3 msid:local_stream_1 video_track_id_2\r\n"
+    "a=ssrc:3 mslabel:local_stream_1\r\n"
+    "a=ssrc:3 label:video_track_id_2\r\n"
+    "a=ssrc-group:FEC 5 6\r\n"
+    "a=ssrc:5 cname:stream_2_cname\r\n"
+    "a=ssrc:5 msid:local_stream_2 video_track_id_3\r\n"
+    "a=ssrc:5 mslabel:local_stream_2\r\n"
+    "a=ssrc:5 label:video_track_id_3\r\n"
+    "a=ssrc:6 cname:stream_2_cname\r\n"
+    "a=ssrc:6 msid:local_stream_2 video_track_id_3\r\n"
+    "a=ssrc:6 mslabel:local_stream_2\r\n"
+    "a=ssrc:6 label:video_track_id_3\r\n";
+
+static const char kSdpRtpDataChannelString[] =
+    "m=application 9 RTP/SAVPF 101\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_data\r\n"
+    "a=ice-pwd:pwd_data\r\n"
+    "a=mid:data_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5\r\n"
+    "a=rtpmap:101 google-data/90000\r\n"
+    "a=ssrc:10 cname:data_channel_cname\r\n"
+    "a=ssrc:10 msid:data_channel data_channeld0\r\n"
+    "a=ssrc:10 mslabel:data_channel\r\n"
+    "a=ssrc:10 label:data_channeld0\r\n";
+
+static const char kSdpSctpDataChannelString[] =
+    "m=application 9 DTLS/SCTP 5000\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_data\r\n"
+    "a=ice-pwd:pwd_data\r\n"
+    "a=mid:data_content_name\r\n"
+    "a=sctpmap:5000 webrtc-datachannel 1024\r\n";
+
+// draft-ietf-mmusic-sctp-sdp-12
+static const char kSdpSctpDataChannelStringWithSctpPort[] =
+    "m=application 9 DTLS/SCTP webrtc-datachannel\r\n"
+    "a=max-message-size=100000\r\n"
+    "a=sctp-port 5000\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_data\r\n"
+    "a=ice-pwd:pwd_data\r\n"
+    "a=mid:data_content_name\r\n";
+
+static const char kSdpSctpDataChannelStringWithSctpColonPort[] =
+    "m=application 9 DTLS/SCTP webrtc-datachannel\r\n"
+    "a=max-message-size=100000\r\n"
+    "a=sctp-port:5000\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_data\r\n"
+    "a=ice-pwd:pwd_data\r\n"
+    "a=mid:data_content_name\r\n";
+
+static const char kSdpSctpDataChannelWithCandidatesString[] =
+    "m=application 2345 DTLS/SCTP 5000\r\n"
+    "c=IN IP4 74.125.127.126\r\n"
+    "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/2 1 udp 2130706432 ::1 1238 typ host "
+    "generation 2\r\n"
+    "a=candidate:a0+B/3 1 udp 2130706432 74.125.127.126 2345 typ srflx "
+    "raddr 192.168.1.5 rport 2346 "
+    "generation 2\r\n"
+    "a=ice-ufrag:ufrag_data\r\n"
+    "a=ice-pwd:pwd_data\r\n"
+    "a=mid:data_content_name\r\n"
+    "a=sctpmap:5000 webrtc-datachannel 1024\r\n";
+
+static const char kSdpConferenceString[] =
+    "v=0\r\n"
+    "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=msid-semantic: WMS\r\n"
+    "m=audio 9 RTP/SAVPF 111 103 104\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=x-google-flag:conference\r\n"
+    "m=video 9 RTP/SAVPF 120\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=x-google-flag:conference\r\n";
+
+static const char kSdpSessionString[] =
+    "v=0\r\n"
+    "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=msid-semantic: WMS local_stream\r\n";
+
+static const char kSdpAudioString[] =
+    "m=audio 9 RTP/SAVPF 111\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
+    "a=mid:audio_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:111 opus/48000/2\r\n"
+    "a=ssrc:1 cname:stream_1_cname\r\n"
+    "a=ssrc:1 msid:local_stream audio_track_id_1\r\n"
+    "a=ssrc:1 mslabel:local_stream\r\n"
+    "a=ssrc:1 label:audio_track_id_1\r\n";
+
+static const char kSdpVideoString[] =
+    "m=video 9 RTP/SAVPF 120\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
+    "a=mid:video_content_name\r\n"
+    "a=sendrecv\r\n"
+    "a=rtpmap:120 VP8/90000\r\n"
+    "a=ssrc:2 cname:stream_1_cname\r\n"
+    "a=ssrc:2 msid:local_stream video_track_id_1\r\n"
+    "a=ssrc:2 mslabel:local_stream\r\n"
+    "a=ssrc:2 label:video_track_id_1\r\n";
+
+
+// One candidate reference string as per W3c spec.
+// candidate:<blah> not a=candidate:<blah>CRLF
+static const char kRawCandidate[] =
+    "candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host generation 2";
+// One candidate reference string.
+static const char kSdpOneCandidate[] =
+    "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host "
+    "generation 2\r\n";
+
+static const char kSdpTcpActiveCandidate[] =
+    "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+    "tcptype active generation 2";
+static const char kSdpTcpPassiveCandidate[] =
+    "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+    "tcptype passive generation 2";
+static const char kSdpTcpSOCandidate[] =
+    "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+    "tcptype so generation 2";
+static const char kSdpTcpInvalidCandidate[] =
+    "candidate:a0+B/1 1 tcp 2130706432 192.168.1.5 9 typ host "
+    "tcptype invalid generation 2";
+
+// One candidate reference string with IPV6 address.
+static const char kRawIPV6Candidate[] =
+    "candidate:a0+B/1 1 udp 2130706432 "
+    "abcd::abcd::abcd::abcd::abcd::abcd::abcd::abcd 1234 typ host generation 2";
+
+// One candidate reference string.
+static const char kSdpOneCandidateWithUfragPwd[] =
+    "a=candidate:a0+B/1 1 udp 2130706432 192.168.1.5 1234 typ host network_name"
+    " eth0 ufrag user_rtp pwd password_rtp generation 2\r\n";
+
+// Session id and version
+static const char kSessionId[] = "18446744069414584320";
+static const char kSessionVersion[] = "18446462598732840960";
+
+// Ice options
+static const char kIceOption1[] = "iceoption1";
+static const char kIceOption2[] = "iceoption2";
+static const char kIceOption3[] = "iceoption3";
+
+// Content name
+static const char kAudioContentName[] = "audio_content_name";
+static const char kVideoContentName[] = "video_content_name";
+static const char kDataContentName[] = "data_content_name";
+
+// MediaStream 1
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStream1Cname[] = "stream_1_cname";
+static const char kAudioTrackId1[] = "audio_track_id_1";
+static const uint32_t kAudioTrack1Ssrc = 1;
+static const char kVideoTrackId1[] = "video_track_id_1";
+static const uint32_t kVideoTrack1Ssrc = 2;
+static const char kVideoTrackId2[] = "video_track_id_2";
+static const uint32_t kVideoTrack2Ssrc = 3;
+
+// MediaStream 2
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStream2Cname[] = "stream_2_cname";
+static const char kAudioTrackId2[] = "audio_track_id_2";
+static const uint32_t kAudioTrack2Ssrc = 4;
+static const char kVideoTrackId3[] = "video_track_id_3";
+static const uint32_t kVideoTrack3Ssrc = 5;
+static const uint32_t kVideoTrack4Ssrc = 6;
+
+// DataChannel
+static const char kDataChannelLabel[] = "data_channel";
+static const char kDataChannelMsid[] = "data_channeld0";
+static const char kDataChannelCname[] = "data_channel_cname";
+static const uint32_t kDataChannelSsrc = 10;
+
+// Candidate
+static const char kDummyMid[] = "dummy_mid";
+static const int kDummyIndex = 123;
+
+// Misc
+static const char kDummyString[] = "dummy";
+
+// Helper functions
+
+static bool SdpDeserialize(const std::string& message,
+                           JsepSessionDescription* jdesc) {
+  return webrtc::SdpDeserialize(message, jdesc, NULL);
+}
+
+static bool SdpDeserializeCandidate(const std::string& message,
+                                    JsepIceCandidate* candidate) {
+  return webrtc::SdpDeserializeCandidate(message, candidate, NULL);
+}
+
+// Add some extra |newlines| to the |message| after |line|.
+static void InjectAfter(const std::string& line,
+                        const std::string& newlines,
+                        std::string* message) {
+  const std::string tmp = line + newlines;
+  rtc::replace_substrs(line.c_str(), line.length(),
+                             tmp.c_str(), tmp.length(), message);
+}
+
+static void Replace(const std::string& line,
+                    const std::string& newlines,
+                    std::string* message) {
+  rtc::replace_substrs(line.c_str(), line.length(),
+                             newlines.c_str(), newlines.length(), message);
+}
+
+// Expect fail to parase |bad_sdp| and expect |bad_part| be part of the error
+// message.
+static void ExpectParseFailure(const std::string& bad_sdp,
+                               const std::string& bad_part) {
+  JsepSessionDescription desc(kDummyString);
+  SdpParseError error;
+  bool ret = webrtc::SdpDeserialize(bad_sdp, &desc, &error);
+  EXPECT_FALSE(ret);
+  EXPECT_NE(std::string::npos, error.line.find(bad_part.c_str()));
+}
+
+// Expect fail to parse kSdpFullString if replace |good_part| with |bad_part|.
+static void ExpectParseFailure(const char* good_part, const char* bad_part) {
+  std::string bad_sdp = kSdpFullString;
+  Replace(good_part, bad_part, &bad_sdp);
+  ExpectParseFailure(bad_sdp, bad_part);
+}
+
+// Expect fail to parse kSdpFullString if add |newlines| after |injectpoint|.
+static void ExpectParseFailureWithNewLines(const std::string& injectpoint,
+                                           const std::string& newlines,
+                                           const std::string& bad_part) {
+  std::string bad_sdp = kSdpFullString;
+  InjectAfter(injectpoint, newlines, &bad_sdp);
+  ExpectParseFailure(bad_sdp, bad_part);
+}
+
+static void ReplaceDirection(cricket::MediaContentDirection direction,
+                             std::string* message) {
+  std::string new_direction;
+  switch (direction) {
+    case cricket::MD_INACTIVE:
+      new_direction = "a=inactive";
+      break;
+    case cricket::MD_SENDONLY:
+      new_direction = "a=sendonly";
+      break;
+    case cricket::MD_RECVONLY:
+      new_direction = "a=recvonly";
+      break;
+    case cricket::MD_SENDRECV:
+    default:
+      new_direction = "a=sendrecv";
+      break;
+  }
+  Replace("a=sendrecv", new_direction, message);
+}
+
+static void ReplaceRejected(bool audio_rejected, bool video_rejected,
+                            std::string* message) {
+  if (audio_rejected) {
+    Replace("m=audio 9", "m=audio 0", message);
+    Replace(kAttributeIceUfragVoice, "", message);
+    Replace(kAttributeIcePwdVoice, "", message);
+  }
+  if (video_rejected) {
+    Replace("m=video 9", "m=video 0", message);
+    Replace(kAttributeIceUfragVideo, "", message);
+    Replace(kAttributeIcePwdVideo, "", message);
+  }
+}
+
+// WebRtcSdpTest
+
+class WebRtcSdpTest : public testing::Test {
+ public:
+  WebRtcSdpTest()
+     : jdesc_(kDummyString) {
+#ifdef WEBRTC_ANDROID
+    webrtc::InitializeAndroidObjects();
+#endif
+    // AudioContentDescription
+    audio_desc_ = CreateAudioContentDescription();
+    AudioCodec opus(111, "opus", 48000, 0, 2, 3);
+    audio_desc_->AddCodec(opus);
+    audio_desc_->AddCodec(AudioCodec(103, "ISAC", 16000, 32000, 1, 2));
+    audio_desc_->AddCodec(AudioCodec(104, "ISAC", 32000, 56000, 1, 1));
+    desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_);
+
+    // VideoContentDescription
+    rtc::scoped_ptr<VideoContentDescription> video(
+        new VideoContentDescription());
+    video_desc_ = video.get();
+    StreamParams video_stream1;
+    video_stream1.id = kVideoTrackId1;
+    video_stream1.cname = kStream1Cname;
+    video_stream1.sync_label = kStreamLabel1;
+    video_stream1.ssrcs.push_back(kVideoTrack1Ssrc);
+    video->AddStream(video_stream1);
+    StreamParams video_stream2;
+    video_stream2.id = kVideoTrackId2;
+    video_stream2.cname = kStream1Cname;
+    video_stream2.sync_label = kStreamLabel1;
+    video_stream2.ssrcs.push_back(kVideoTrack2Ssrc);
+    video->AddStream(video_stream2);
+    StreamParams video_stream3;
+    video_stream3.id = kVideoTrackId3;
+    video_stream3.cname = kStream2Cname;
+    video_stream3.sync_label = kStreamLabel2;
+    video_stream3.ssrcs.push_back(kVideoTrack3Ssrc);
+    video_stream3.ssrcs.push_back(kVideoTrack4Ssrc);
+    cricket::SsrcGroup ssrc_group(kFecSsrcGroupSemantics, video_stream3.ssrcs);
+    video_stream3.ssrc_groups.push_back(ssrc_group);
+    video->AddStream(video_stream3);
+    video->AddCrypto(CryptoParams(1, "AES_CM_128_HMAC_SHA1_80",
+        "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32", ""));
+    video->set_protocol(cricket::kMediaProtocolSavpf);
+    video->AddCodec(VideoCodec(
+        120,
+        JsepSessionDescription::kDefaultVideoCodecName,
+        JsepSessionDescription::kMaxVideoCodecWidth,
+        JsepSessionDescription::kMaxVideoCodecHeight,
+        JsepSessionDescription::kDefaultVideoCodecFramerate,
+        JsepSessionDescription::kDefaultVideoCodecPreference));
+
+    desc_.AddContent(kVideoContentName, NS_JINGLE_RTP,
+                     video.release());
+
+    // TransportInfo
+    EXPECT_TRUE(desc_.AddTransportInfo(
+        TransportInfo(kAudioContentName,
+                      TransportDescription(kCandidateUfragVoice,
+                                           kCandidatePwdVoice))));
+    EXPECT_TRUE(desc_.AddTransportInfo(
+        TransportInfo(kVideoContentName,
+                      TransportDescription(kCandidateUfragVideo,
+                                           kCandidatePwdVideo))));
+
+    // v4 host
+    int port = 1234;
+    rtc::SocketAddress address("192.168.1.5", port++);
+    Candidate candidate1(ICE_CANDIDATE_COMPONENT_RTP, "udp", address,
+                         kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+                         kCandidateGeneration, kCandidateFoundation1);
+    address.SetPort(port++);
+    Candidate candidate2(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address,
+                         kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+                         kCandidateGeneration, kCandidateFoundation1);
+    address.SetPort(port++);
+    Candidate candidate3(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address,
+                         kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+                         kCandidateGeneration, kCandidateFoundation1);
+    address.SetPort(port++);
+    Candidate candidate4(ICE_CANDIDATE_COMPONENT_RTP, "udp", address,
+                         kCandidatePriority, "", "", LOCAL_PORT_TYPE,
+                         kCandidateGeneration, kCandidateFoundation1);
+
+    // v6 host
+    rtc::SocketAddress v6_address("::1", port++);
+    cricket::Candidate candidate5(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+                                  v6_address, kCandidatePriority, "", "",
+                                  cricket::LOCAL_PORT_TYPE,
+                                  kCandidateGeneration, kCandidateFoundation2);
+    v6_address.SetPort(port++);
+    cricket::Candidate candidate6(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+                                  v6_address, kCandidatePriority, "", "",
+                                  cricket::LOCAL_PORT_TYPE,
+                                  kCandidateGeneration, kCandidateFoundation2);
+    v6_address.SetPort(port++);
+    cricket::Candidate candidate7(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+                                  v6_address, kCandidatePriority, "", "",
+                                  cricket::LOCAL_PORT_TYPE,
+                                  kCandidateGeneration, kCandidateFoundation2);
+    v6_address.SetPort(port++);
+    cricket::Candidate candidate8(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+                                  v6_address, kCandidatePriority, "", "",
+                                  cricket::LOCAL_PORT_TYPE,
+                                  kCandidateGeneration, kCandidateFoundation2);
+
+    // stun
+    int port_stun = 2345;
+    rtc::SocketAddress address_stun("74.125.127.126", port_stun++);
+    rtc::SocketAddress rel_address_stun("192.168.1.5", port_stun++);
+    cricket::Candidate candidate9(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+                                  address_stun, kCandidatePriority, "", "",
+                                  STUN_PORT_TYPE, kCandidateGeneration,
+                                  kCandidateFoundation3);
+    candidate9.set_related_address(rel_address_stun);
+
+    address_stun.SetPort(port_stun++);
+    rel_address_stun.SetPort(port_stun++);
+    cricket::Candidate candidate10(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+                                   address_stun, kCandidatePriority, "", "",
+                                   STUN_PORT_TYPE, kCandidateGeneration,
+                                   kCandidateFoundation3);
+    candidate10.set_related_address(rel_address_stun);
+
+    // relay
+    int port_relay = 3456;
+    rtc::SocketAddress address_relay("74.125.224.39", port_relay++);
+    cricket::Candidate candidate11(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp",
+                                   address_relay, kCandidatePriority, "", "",
+                                   cricket::RELAY_PORT_TYPE,
+                                   kCandidateGeneration, kCandidateFoundation4);
+    address_relay.SetPort(port_relay++);
+    cricket::Candidate candidate12(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+                                   address_relay, kCandidatePriority, "", "",
+                                   RELAY_PORT_TYPE, kCandidateGeneration,
+                                   kCandidateFoundation4);
+
+    // voice
+    candidates_.push_back(candidate1);
+    candidates_.push_back(candidate2);
+    candidates_.push_back(candidate5);
+    candidates_.push_back(candidate6);
+    candidates_.push_back(candidate9);
+    candidates_.push_back(candidate10);
+
+    // video
+    candidates_.push_back(candidate3);
+    candidates_.push_back(candidate4);
+    candidates_.push_back(candidate7);
+    candidates_.push_back(candidate8);
+    candidates_.push_back(candidate11);
+    candidates_.push_back(candidate12);
+
+    jcandidate_.reset(new JsepIceCandidate(std::string("audio_content_name"),
+                                           0, candidate1));
+
+    // Set up JsepSessionDescription.
+    jdesc_.Initialize(desc_.Copy(), kSessionId, kSessionVersion);
+    std::string mline_id;
+    int mline_index = 0;
+    for (size_t i = 0; i< candidates_.size(); ++i) {
+      // In this test, the audio m line index will be 0, and the video m line
+      // will be 1.
+      bool is_video = (i > 5);
+      mline_id = is_video ? "video_content_name" : "audio_content_name";
+      mline_index = is_video ? 1 : 0;
+      JsepIceCandidate jice(mline_id,
+                            mline_index,
+                            candidates_.at(i));
+      jdesc_.AddCandidate(&jice);
+    }
+  }
+
+  AudioContentDescription* CreateAudioContentDescription() {
+    AudioContentDescription* audio = new AudioContentDescription();
+    audio->set_rtcp_mux(true);
+    audio->set_rtcp_reduced_size(true);
+    StreamParams audio_stream1;
+    audio_stream1.id = kAudioTrackId1;
+    audio_stream1.cname = kStream1Cname;
+    audio_stream1.sync_label = kStreamLabel1;
+    audio_stream1.ssrcs.push_back(kAudioTrack1Ssrc);
+    audio->AddStream(audio_stream1);
+    StreamParams audio_stream2;
+    audio_stream2.id = kAudioTrackId2;
+    audio_stream2.cname = kStream2Cname;
+    audio_stream2.sync_label = kStreamLabel2;
+    audio_stream2.ssrcs.push_back(kAudioTrack2Ssrc);
+    audio->AddStream(audio_stream2);
+    audio->AddCrypto(CryptoParams(1, "AES_CM_128_HMAC_SHA1_32",
+        "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32",
+        "dummy_session_params"));
+    audio->set_protocol(cricket::kMediaProtocolSavpf);
+    return audio;
+  }
+
+  template <class MCD>
+  void CompareMediaContentDescription(const MCD* cd1,
+                                      const MCD* cd2) {
+    // type
+    EXPECT_EQ(cd1->type(), cd1->type());
+
+    // content direction
+    EXPECT_EQ(cd1->direction(), cd2->direction());
+
+    // rtcp_mux
+    EXPECT_EQ(cd1->rtcp_mux(), cd2->rtcp_mux());
+
+    // rtcp_reduced_size
+    EXPECT_EQ(cd1->rtcp_reduced_size(), cd2->rtcp_reduced_size());
+
+    // cryptos
+    EXPECT_EQ(cd1->cryptos().size(), cd2->cryptos().size());
+    if (cd1->cryptos().size() != cd2->cryptos().size()) {
+      ADD_FAILURE();
+      return;
+    }
+    for (size_t i = 0; i< cd1->cryptos().size(); ++i) {
+      const CryptoParams c1 = cd1->cryptos().at(i);
+      const CryptoParams c2 = cd2->cryptos().at(i);
+      EXPECT_TRUE(c1.Matches(c2));
+      EXPECT_EQ(c1.key_params, c2.key_params);
+      EXPECT_EQ(c1.session_params, c2.session_params);
+    }
+
+    // protocol
+    // Use an equivalence class here, for old and new versions of the
+    // protocol description.
+    if (cd1->protocol() == cricket::kMediaProtocolDtlsSctp
+        || cd1->protocol() == cricket::kMediaProtocolUdpDtlsSctp
+        || cd1->protocol() == cricket::kMediaProtocolTcpDtlsSctp) {
+      const bool cd2_is_also_dtls_sctp =
+        cd2->protocol() == cricket::kMediaProtocolDtlsSctp
+        || cd2->protocol() == cricket::kMediaProtocolUdpDtlsSctp
+        || cd2->protocol() == cricket::kMediaProtocolTcpDtlsSctp;
+      EXPECT_TRUE(cd2_is_also_dtls_sctp);
+    } else {
+      EXPECT_EQ(cd1->protocol(), cd2->protocol());
+    }
+
+    // codecs
+    EXPECT_EQ(cd1->codecs(), cd2->codecs());
+
+    // bandwidth
+    EXPECT_EQ(cd1->bandwidth(), cd2->bandwidth());
+
+    // streams
+    EXPECT_EQ(cd1->streams(), cd2->streams());
+
+    // extmap
+    ASSERT_EQ(cd1->rtp_header_extensions().size(),
+              cd2->rtp_header_extensions().size());
+    for (size_t i = 0; i< cd1->rtp_header_extensions().size(); ++i) {
+      const RtpHeaderExtension ext1 = cd1->rtp_header_extensions().at(i);
+      const RtpHeaderExtension ext2 = cd2->rtp_header_extensions().at(i);
+      EXPECT_EQ(ext1.uri, ext2.uri);
+      EXPECT_EQ(ext1.id, ext2.id);
+    }
+  }
+
+
+  void CompareSessionDescription(const SessionDescription& desc1,
+                                 const SessionDescription& desc2) {
+    // Compare content descriptions.
+    if (desc1.contents().size() != desc2.contents().size()) {
+      ADD_FAILURE();
+      return;
+    }
+    for (size_t i = 0 ; i < desc1.contents().size(); ++i) {
+      const cricket::ContentInfo& c1 = desc1.contents().at(i);
+      const cricket::ContentInfo& c2 = desc2.contents().at(i);
+      // content name
+      EXPECT_EQ(c1.name, c2.name);
+      // content type
+      // Note, ASSERT will return from the function, but will not stop the test.
+      ASSERT_EQ(c1.type, c2.type);
+
+      ASSERT_EQ(IsAudioContent(&c1), IsAudioContent(&c2));
+      if (IsAudioContent(&c1)) {
+        const AudioContentDescription* acd1 =
+            static_cast<const AudioContentDescription*>(c1.description);
+        const AudioContentDescription* acd2 =
+            static_cast<const AudioContentDescription*>(c2.description);
+        CompareMediaContentDescription<AudioContentDescription>(acd1, acd2);
+      }
+
+      ASSERT_EQ(IsVideoContent(&c1), IsVideoContent(&c2));
+      if (IsVideoContent(&c1)) {
+        const VideoContentDescription* vcd1 =
+            static_cast<const VideoContentDescription*>(c1.description);
+        const VideoContentDescription* vcd2 =
+            static_cast<const VideoContentDescription*>(c2.description);
+        CompareMediaContentDescription<VideoContentDescription>(vcd1, vcd2);
+      }
+
+      ASSERT_EQ(IsDataContent(&c1), IsDataContent(&c2));
+      if (IsDataContent(&c1)) {
+        const DataContentDescription* dcd1 =
+            static_cast<const DataContentDescription*>(c1.description);
+        const DataContentDescription* dcd2 =
+            static_cast<const DataContentDescription*>(c2.description);
+        CompareMediaContentDescription<DataContentDescription>(dcd1, dcd2);
+      }
+    }
+
+    // group
+    const cricket::ContentGroups groups1 = desc1.groups();
+    const cricket::ContentGroups groups2 = desc2.groups();
+    EXPECT_EQ(groups1.size(), groups1.size());
+    if (groups1.size() != groups2.size()) {
+      ADD_FAILURE();
+      return;
+    }
+    for (size_t i = 0; i < groups1.size(); ++i) {
+      const cricket::ContentGroup group1 = groups1.at(i);
+      const cricket::ContentGroup group2 = groups2.at(i);
+      EXPECT_EQ(group1.semantics(), group2.semantics());
+      const cricket::ContentNames names1 = group1.content_names();
+      const cricket::ContentNames names2 = group2.content_names();
+      EXPECT_EQ(names1.size(), names2.size());
+      if (names1.size() != names2.size()) {
+        ADD_FAILURE();
+        return;
+      }
+      cricket::ContentNames::const_iterator iter1 = names1.begin();
+      cricket::ContentNames::const_iterator iter2 = names2.begin();
+      while (iter1 != names1.end()) {
+        EXPECT_EQ(*iter1++, *iter2++);
+      }
+    }
+
+    // transport info
+    const cricket::TransportInfos transports1 = desc1.transport_infos();
+    const cricket::TransportInfos transports2 = desc2.transport_infos();
+    EXPECT_EQ(transports1.size(), transports2.size());
+    if (transports1.size() != transports2.size()) {
+      ADD_FAILURE();
+      return;
+    }
+    for (size_t i = 0; i < transports1.size(); ++i) {
+      const cricket::TransportInfo transport1 = transports1.at(i);
+      const cricket::TransportInfo transport2 = transports2.at(i);
+      EXPECT_EQ(transport1.content_name, transport2.content_name);
+      EXPECT_EQ(transport1.description.ice_ufrag,
+                transport2.description.ice_ufrag);
+      EXPECT_EQ(transport1.description.ice_pwd,
+                transport2.description.ice_pwd);
+      if (transport1.description.identity_fingerprint) {
+        EXPECT_EQ(*transport1.description.identity_fingerprint,
+                  *transport2.description.identity_fingerprint);
+      } else {
+        EXPECT_EQ(transport1.description.identity_fingerprint.get(),
+                  transport2.description.identity_fingerprint.get());
+      }
+      EXPECT_EQ(transport1.description.transport_options,
+                transport2.description.transport_options);
+    }
+
+    // global attributes
+    EXPECT_EQ(desc1.msid_supported(), desc2.msid_supported());
+  }
+
+  bool CompareSessionDescription(
+      const JsepSessionDescription& desc1,
+      const JsepSessionDescription& desc2) {
+    EXPECT_EQ(desc1.session_id(), desc2.session_id());
+    EXPECT_EQ(desc1.session_version(), desc2.session_version());
+    CompareSessionDescription(*desc1.description(), *desc2.description());
+    if (desc1.number_of_mediasections() != desc2.number_of_mediasections())
+      return false;
+    for (size_t i = 0; i < desc1.number_of_mediasections(); ++i) {
+      const IceCandidateCollection* cc1 = desc1.candidates(i);
+      const IceCandidateCollection* cc2 = desc2.candidates(i);
+      if (cc1->count() != cc2->count())
+        return false;
+      for (size_t j = 0; j < cc1->count(); ++j) {
+        const IceCandidateInterface* c1 = cc1->at(j);
+        const IceCandidateInterface* c2 = cc2->at(j);
+        EXPECT_EQ(c1->sdp_mid(), c2->sdp_mid());
+        EXPECT_EQ(c1->sdp_mline_index(), c2->sdp_mline_index());
+        EXPECT_TRUE(c1->candidate().IsEquivalent(c2->candidate()));
+      }
+    }
+    return true;
+  }
+
+  // Disable the ice-ufrag and ice-pwd in given |sdp| message by replacing
+  // them with invalid keywords so that the parser will just ignore them.
+  bool RemoveCandidateUfragPwd(std::string* sdp) {
+    const char ice_ufrag[] = "a=ice-ufrag";
+    const char ice_ufragx[] = "a=xice-ufrag";
+    const char ice_pwd[] = "a=ice-pwd";
+    const char ice_pwdx[] = "a=xice-pwd";
+    rtc::replace_substrs(ice_ufrag, strlen(ice_ufrag),
+        ice_ufragx, strlen(ice_ufragx), sdp);
+    rtc::replace_substrs(ice_pwd, strlen(ice_pwd),
+        ice_pwdx, strlen(ice_pwdx), sdp);
+    return true;
+  }
+
+  // Update the candidates in |jdesc| to use the given |ufrag| and |pwd|.
+  bool UpdateCandidateUfragPwd(JsepSessionDescription* jdesc, int mline_index,
+      const std::string& ufrag, const std::string& pwd) {
+    std::string content_name;
+    if (mline_index == 0) {
+      content_name = kAudioContentName;
+    } else if (mline_index == 1) {
+      content_name = kVideoContentName;
+    } else {
+      ASSERT(false);
+    }
+    TransportInfo transport_info(
+        content_name, TransportDescription(ufrag, pwd));
+    SessionDescription* desc =
+        const_cast<SessionDescription*>(jdesc->description());
+    desc->RemoveTransportInfoByName(content_name);
+    EXPECT_TRUE(desc->AddTransportInfo(transport_info));
+    for (size_t i = 0; i < jdesc_.number_of_mediasections(); ++i) {
+      const IceCandidateCollection* cc = jdesc_.candidates(i);
+      for (size_t j = 0; j < cc->count(); ++j) {
+        if (cc->at(j)->sdp_mline_index() == mline_index) {
+          const_cast<Candidate&>(cc->at(j)->candidate()).set_username(
+              ufrag);
+          const_cast<Candidate&>(cc->at(j)->candidate()).set_password(
+              pwd);
+        }
+      }
+    }
+    return true;
+  }
+
+  void AddIceOptions(const std::string& content_name,
+                     const std::vector<std::string>& transport_options) {
+    ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
+    cricket::TransportInfo transport_info =
+        *(desc_.GetTransportInfoByName(content_name));
+    desc_.RemoveTransportInfoByName(content_name);
+    transport_info.description.transport_options = transport_options;
+    desc_.AddTransportInfo(transport_info);
+  }
+
+  void SetIceUfragPwd(const std::string& content_name,
+                      const std::string& ice_ufrag,
+                      const std::string& ice_pwd) {
+    ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL);
+    cricket::TransportInfo transport_info =
+        *(desc_.GetTransportInfoByName(content_name));
+    desc_.RemoveTransportInfoByName(content_name);
+    transport_info.description.ice_ufrag = ice_ufrag;
+    transport_info.description.ice_pwd = ice_pwd;
+    desc_.AddTransportInfo(transport_info);
+  }
+
+  void AddFingerprint() {
+    desc_.RemoveTransportInfoByName(kAudioContentName);
+    desc_.RemoveTransportInfoByName(kVideoContentName);
+    rtc::SSLFingerprint fingerprint(rtc::DIGEST_SHA_1,
+                                          kIdentityDigest,
+                                          sizeof(kIdentityDigest));
+    EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo(
+        kAudioContentName,
+        TransportDescription(std::vector<std::string>(), kCandidateUfragVoice,
+                             kCandidatePwdVoice, cricket::ICEMODE_FULL,
+                             cricket::CONNECTIONROLE_NONE, &fingerprint))));
+    EXPECT_TRUE(desc_.AddTransportInfo(TransportInfo(
+        kVideoContentName,
+        TransportDescription(std::vector<std::string>(), kCandidateUfragVideo,
+                             kCandidatePwdVideo, cricket::ICEMODE_FULL,
+                             cricket::CONNECTIONROLE_NONE, &fingerprint))));
+  }
+
+  void AddExtmap() {
+    audio_desc_ = static_cast<AudioContentDescription*>(
+        audio_desc_->Copy());
+    video_desc_ = static_cast<VideoContentDescription*>(
+        video_desc_->Copy());
+    audio_desc_->AddRtpHeaderExtension(
+        RtpHeaderExtension(kExtmapUri, kExtmapId));
+    video_desc_->AddRtpHeaderExtension(
+        RtpHeaderExtension(kExtmapUri, kExtmapId));
+    desc_.RemoveContentByName(kAudioContentName);
+    desc_.RemoveContentByName(kVideoContentName);
+    desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_desc_);
+    desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_desc_);
+  }
+
+  void RemoveCryptos() {
+    audio_desc_->set_cryptos(std::vector<CryptoParams>());
+    video_desc_->set_cryptos(std::vector<CryptoParams>());
+  }
+
+  bool TestSerializeDirection(cricket::MediaContentDirection direction) {
+    audio_desc_->set_direction(direction);
+    video_desc_->set_direction(direction);
+    std::string new_sdp = kSdpFullString;
+    ReplaceDirection(direction, &new_sdp);
+
+    if (!jdesc_.Initialize(desc_.Copy(),
+                           jdesc_.session_id(),
+                           jdesc_.session_version())) {
+      return false;
+    }
+    std::string message = webrtc::SdpSerialize(jdesc_);
+    EXPECT_EQ(new_sdp, message);
+    return true;
+  }
+
+  bool TestSerializeRejected(bool audio_rejected, bool video_rejected) {
+    audio_desc_ = static_cast<AudioContentDescription*>(
+        audio_desc_->Copy());
+    video_desc_ = static_cast<VideoContentDescription*>(
+        video_desc_->Copy());
+    desc_.RemoveContentByName(kAudioContentName);
+    desc_.RemoveContentByName(kVideoContentName);
+    desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_rejected,
+                     audio_desc_);
+    desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected,
+                     video_desc_);
+    SetIceUfragPwd(kAudioContentName,
+                   audio_rejected ? "" : kCandidateUfragVoice,
+                   audio_rejected ? "" : kCandidatePwdVoice);
+    SetIceUfragPwd(kVideoContentName,
+                   video_rejected ? "" : kCandidateUfragVideo,
+                   video_rejected ? "" : kCandidatePwdVideo);
+
+    std::string new_sdp = kSdpString;
+    ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
+
+    JsepSessionDescription jdesc_no_candidates(kDummyString);
+    if (!jdesc_no_candidates.Initialize(desc_.Copy(), kSessionId,
+                                        kSessionVersion)) {
+      return false;
+    }
+    std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
+    EXPECT_EQ(new_sdp, message);
+    return true;
+  }
+
+  void AddSctpDataChannel() {
+    rtc::scoped_ptr<DataContentDescription> data(
+        new DataContentDescription());
+    data_desc_ = data.get();
+    data_desc_->set_protocol(cricket::kMediaProtocolDtlsSctp);
+    DataCodec codec(cricket::kGoogleSctpDataCodecId,
+                    cricket::kGoogleSctpDataCodecName, 0);
+    codec.SetParam(cricket::kCodecParamPort, kDefaultSctpPort);
+    data_desc_->AddCodec(codec);
+    desc_.AddContent(kDataContentName, NS_JINGLE_DRAFT_SCTP, data.release());
+    EXPECT_TRUE(desc_.AddTransportInfo(
+           TransportInfo(kDataContentName,
+                         TransportDescription(kCandidateUfragData,
+                                              kCandidatePwdData))));
+  }
+
+  void AddRtpDataChannel() {
+    rtc::scoped_ptr<DataContentDescription> data(
+        new DataContentDescription());
+    data_desc_ = data.get();
+
+    data_desc_->AddCodec(DataCodec(101, "google-data", 1));
+    StreamParams data_stream;
+    data_stream.id = kDataChannelMsid;
+    data_stream.cname = kDataChannelCname;
+    data_stream.sync_label = kDataChannelLabel;
+    data_stream.ssrcs.push_back(kDataChannelSsrc);
+    data_desc_->AddStream(data_stream);
+    data_desc_->AddCrypto(CryptoParams(
+        1, "AES_CM_128_HMAC_SHA1_80",
+        "inline:FvLcvU2P3ZWmQxgPAgcDu7Zl9vftYElFOjEzhWs5", ""));
+    data_desc_->set_protocol(cricket::kMediaProtocolSavpf);
+    desc_.AddContent(kDataContentName, NS_JINGLE_RTP, data.release());
+    EXPECT_TRUE(desc_.AddTransportInfo(
+           TransportInfo(kDataContentName,
+                         TransportDescription(kCandidateUfragData,
+                                              kCandidatePwdData))));
+  }
+
+  bool TestDeserializeDirection(cricket::MediaContentDirection direction) {
+    std::string new_sdp = kSdpFullString;
+    ReplaceDirection(direction, &new_sdp);
+    JsepSessionDescription new_jdesc(kDummyString);
+
+    EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+
+    audio_desc_->set_direction(direction);
+    video_desc_->set_direction(direction);
+    if (!jdesc_.Initialize(desc_.Copy(),
+                           jdesc_.session_id(),
+                           jdesc_.session_version())) {
+      return false;
+    }
+    EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc));
+    return true;
+  }
+
+  bool TestDeserializeRejected(bool audio_rejected, bool video_rejected) {
+    std::string new_sdp = kSdpString;
+    ReplaceRejected(audio_rejected, video_rejected, &new_sdp);
+    JsepSessionDescription new_jdesc(JsepSessionDescription::kOffer);
+    EXPECT_TRUE(SdpDeserialize(new_sdp, &new_jdesc));
+
+    audio_desc_ = static_cast<AudioContentDescription*>(
+        audio_desc_->Copy());
+    video_desc_ = static_cast<VideoContentDescription*>(
+        video_desc_->Copy());
+    desc_.RemoveContentByName(kAudioContentName);
+    desc_.RemoveContentByName(kVideoContentName);
+    desc_.AddContent(kAudioContentName, NS_JINGLE_RTP, audio_rejected,
+                     audio_desc_);
+    desc_.AddContent(kVideoContentName, NS_JINGLE_RTP, video_rejected,
+                     video_desc_);
+    SetIceUfragPwd(kAudioContentName,
+                   audio_rejected ? "" : kCandidateUfragVoice,
+                   audio_rejected ? "" : kCandidatePwdVoice);
+    SetIceUfragPwd(kVideoContentName,
+                   video_rejected ? "" : kCandidateUfragVideo,
+                   video_rejected ? "" : kCandidatePwdVideo);
+    JsepSessionDescription jdesc_no_candidates(kDummyString);
+    if (!jdesc_no_candidates.Initialize(desc_.Copy(), jdesc_.session_id(),
+                                        jdesc_.session_version())) {
+      return false;
+    }
+    EXPECT_TRUE(CompareSessionDescription(jdesc_no_candidates, new_jdesc));
+    return true;
+  }
+
+  void TestDeserializeExtmap(bool session_level, bool media_level) {
+    AddExtmap();
+    JsepSessionDescription new_jdesc("dummy");
+    ASSERT_TRUE(new_jdesc.Initialize(desc_.Copy(),
+                                     jdesc_.session_id(),
+                                     jdesc_.session_version()));
+    JsepSessionDescription jdesc_with_extmap("dummy");
+    std::string sdp_with_extmap = kSdpString;
+    if (session_level) {
+      InjectAfter(kSessionTime, kExtmapWithDirectionAndAttribute,
+                  &sdp_with_extmap);
+    }
+    if (media_level) {
+      InjectAfter(kAttributeIcePwdVoice, kExtmapWithDirectionAndAttribute,
+                  &sdp_with_extmap);
+      InjectAfter(kAttributeIcePwdVideo, kExtmapWithDirectionAndAttribute,
+                  &sdp_with_extmap);
+    }
+    // The extmap can't be present at the same time in both session level and
+    // media level.
+    if (session_level && media_level) {
+      SdpParseError error;
+      EXPECT_FALSE(webrtc::SdpDeserialize(sdp_with_extmap,
+                   &jdesc_with_extmap, &error));
+      EXPECT_NE(std::string::npos, error.description.find("a=extmap"));
+    } else {
+      EXPECT_TRUE(SdpDeserialize(sdp_with_extmap, &jdesc_with_extmap));
+      EXPECT_TRUE(CompareSessionDescription(jdesc_with_extmap, new_jdesc));
+    }
+  }
+
+  void VerifyCodecParameter(const cricket::CodecParameterMap& params,
+      const std::string& name, int expected_value) {
+    cricket::CodecParameterMap::const_iterator found = params.find(name);
+    ASSERT_TRUE(found != params.end());
+    EXPECT_EQ(found->second, rtc::ToString<int>(expected_value));
+  }
+
+  void TestDeserializeCodecParams(const CodecParams& params,
+                                  JsepSessionDescription* jdesc_output) {
+    std::string sdp =
+        "v=0\r\n"
+        "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+        "s=-\r\n"
+        "t=0 0\r\n"
+        // Include semantics for WebRTC Media Streams since it is supported by
+        // this parser, and will be added to the SDP when serializing a session
+        // description.
+        "a=msid-semantic: WMS\r\n"
+        // Pl type 111 preferred.
+        "m=audio 9 RTP/SAVPF 111 104 103\r\n"
+        // Pltype 111 listed before 103 and 104 in the map.
+        "a=rtpmap:111 opus/48000/2\r\n"
+        // Pltype 103 listed before 104.
+        "a=rtpmap:103 ISAC/16000\r\n"
+        "a=rtpmap:104 ISAC/32000\r\n"
+        "a=fmtp:111 0-15,66,70\r\n"
+        "a=fmtp:111 ";
+    std::ostringstream os;
+    os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo
+       << "; sprop-stereo=" << params.sprop_stereo
+       << "; useinbandfec=" << params.useinband
+       << "; maxaveragebitrate=" << params.maxaveragebitrate << "\r\n"
+       << "a=ptime:" << params.ptime << "\r\n"
+       << "a=maxptime:" << params.max_ptime << "\r\n";
+    sdp += os.str();
+
+    os.clear();
+    os.str("");
+    // Pl type 100 preferred.
+    os << "m=video 9 RTP/SAVPF 99 95\r\n"
+       << "a=rtpmap:99 VP8/90000\r\n"
+       << "a=rtpmap:95 RTX/90000\r\n"
+       << "a=fmtp:95 apt=99;\r\n";
+    sdp += os.str();
+
+    // Deserialize
+    SdpParseError error;
+    EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error));
+
+    const ContentInfo* ac = GetFirstAudioContent(jdesc_output->description());
+    ASSERT_TRUE(ac != NULL);
+    const AudioContentDescription* acd =
+        static_cast<const AudioContentDescription*>(ac->description);
+    ASSERT_FALSE(acd->codecs().empty());
+    cricket::AudioCodec opus = acd->codecs()[0];
+    EXPECT_EQ("opus", opus.name);
+    EXPECT_EQ(111, opus.id);
+    VerifyCodecParameter(opus.params, "minptime", params.min_ptime);
+    VerifyCodecParameter(opus.params, "stereo", params.stereo);
+    VerifyCodecParameter(opus.params, "sprop-stereo", params.sprop_stereo);
+    VerifyCodecParameter(opus.params, "useinbandfec", params.useinband);
+    VerifyCodecParameter(opus.params, "maxaveragebitrate",
+                         params.maxaveragebitrate);
+    for (size_t i = 0; i < acd->codecs().size(); ++i) {
+      cricket::AudioCodec codec = acd->codecs()[i];
+      VerifyCodecParameter(codec.params, "ptime", params.ptime);
+      VerifyCodecParameter(codec.params, "maxptime", params.max_ptime);
+      if (codec.name == "ISAC") {
+        if (codec.clockrate == 16000) {
+          EXPECT_EQ(32000, codec.bitrate);
+        } else {
+          EXPECT_EQ(56000, codec.bitrate);
+        }
+      }
+    }
+
+    const ContentInfo* vc = GetFirstVideoContent(jdesc_output->description());
+    ASSERT_TRUE(vc != NULL);
+    const VideoContentDescription* vcd =
+        static_cast<const VideoContentDescription*>(vc->description);
+    ASSERT_FALSE(vcd->codecs().empty());
+    cricket::VideoCodec vp8 = vcd->codecs()[0];
+    EXPECT_EQ("VP8", vp8.name);
+    EXPECT_EQ(99, vp8.id);
+    cricket::VideoCodec rtx = vcd->codecs()[1];
+    EXPECT_EQ("RTX", rtx.name);
+    EXPECT_EQ(95, rtx.id);
+    VerifyCodecParameter(rtx.params, "apt", vp8.id);
+  }
+
+  void TestDeserializeRtcpFb(JsepSessionDescription* jdesc_output,
+                             bool use_wildcard) {
+    std::string sdp_session_and_audio =
+        "v=0\r\n"
+        "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+        "s=-\r\n"
+        "t=0 0\r\n"
+        // Include semantics for WebRTC Media Streams since it is supported by
+        // this parser, and will be added to the SDP when serializing a session
+        // description.
+        "a=msid-semantic: WMS\r\n"
+        "m=audio 9 RTP/SAVPF 111\r\n"
+        "a=rtpmap:111 opus/48000/2\r\n";
+    std::string sdp_video =
+        "m=video 3457 RTP/SAVPF 101\r\n"
+        "a=rtpmap:101 VP8/90000\r\n"
+        "a=rtcp-fb:101 nack\r\n"
+        "a=rtcp-fb:101 nack pli\r\n"
+        "a=rtcp-fb:101 goog-remb\r\n";
+    std::ostringstream os;
+    os << sdp_session_and_audio;
+    os << "a=rtcp-fb:" << (use_wildcard ? "*" : "111") <<  " nack\r\n";
+    os << sdp_video;
+    os << "a=rtcp-fb:" << (use_wildcard ? "*" : "101") <<  " ccm fir\r\n";
+    std::string sdp = os.str();
+    // Deserialize
+    SdpParseError error;
+    EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error));
+    const ContentInfo* ac = GetFirstAudioContent(jdesc_output->description());
+    ASSERT_TRUE(ac != NULL);
+    const AudioContentDescription* acd =
+        static_cast<const AudioContentDescription*>(ac->description);
+    ASSERT_FALSE(acd->codecs().empty());
+    cricket::AudioCodec opus = acd->codecs()[0];
+    EXPECT_EQ(111, opus.id);
+    EXPECT_TRUE(opus.HasFeedbackParam(
+        cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+                               cricket::kParamValueEmpty)));
+
+    const ContentInfo* vc = GetFirstVideoContent(jdesc_output->description());
+    ASSERT_TRUE(vc != NULL);
+    const VideoContentDescription* vcd =
+        static_cast<const VideoContentDescription*>(vc->description);
+    ASSERT_FALSE(vcd->codecs().empty());
+    cricket::VideoCodec vp8 = vcd->codecs()[0];
+    EXPECT_STREQ(webrtc::JsepSessionDescription::kDefaultVideoCodecName,
+                 vp8.name.c_str());
+    EXPECT_EQ(101, vp8.id);
+    EXPECT_TRUE(vp8.HasFeedbackParam(
+        cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+                               cricket::kParamValueEmpty)));
+    EXPECT_TRUE(vp8.HasFeedbackParam(
+        cricket::FeedbackParam(cricket::kRtcpFbParamNack,
+                               cricket::kRtcpFbNackParamPli)));
+    EXPECT_TRUE(vp8.HasFeedbackParam(
+        cricket::FeedbackParam(cricket::kRtcpFbParamRemb,
+                               cricket::kParamValueEmpty)));
+    EXPECT_TRUE(vp8.HasFeedbackParam(
+        cricket::FeedbackParam(cricket::kRtcpFbParamCcm,
+                               cricket::kRtcpFbCcmParamFir)));
+  }
+
+  // Two SDP messages can mean the same thing but be different strings, e.g.
+  // some of the lines can be serialized in different order.
+  // However, a deserialized description can be compared field by field and has
+  // no order. If deserializer has already been tested, serializing then
+  // deserializing and comparing JsepSessionDescription will test
+  // the serializer sufficiently.
+  void TestSerialize(const JsepSessionDescription& jdesc) {
+    std::string message = webrtc::SdpSerialize(jdesc);
+    JsepSessionDescription jdesc_output_des(kDummyString);
+    SdpParseError error;
+    EXPECT_TRUE(webrtc::SdpDeserialize(message, &jdesc_output_des, &error));
+    EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output_des));
+  }
+
+ protected:
+  SessionDescription desc_;
+  AudioContentDescription* audio_desc_;
+  VideoContentDescription* video_desc_;
+  DataContentDescription* data_desc_;
+  Candidates candidates_;
+  rtc::scoped_ptr<IceCandidateInterface> jcandidate_;
+  JsepSessionDescription jdesc_;
+};
+
+void TestMismatch(const std::string& string1, const std::string& string2) {
+  int position = 0;
+  for (size_t i = 0; i < string1.length() && i < string2.length(); ++i) {
+    if (string1.c_str()[i] != string2.c_str()[i]) {
+      position = static_cast<int>(i);
+      break;
+    }
+  }
+  EXPECT_EQ(0, position) << "Strings mismatch at the " << position
+                         << " character\n"
+                         << " 1: " << string1.substr(position, 20) << "\n"
+                         << " 2: " << string2.substr(position, 20) << "\n";
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescription) {
+  // SessionDescription with desc and candidates.
+  std::string message = webrtc::SdpSerialize(jdesc_);
+  TestMismatch(std::string(kSdpFullString), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionEmpty) {
+  JsepSessionDescription jdesc_empty(kDummyString);
+  EXPECT_EQ("", webrtc::SdpSerialize(jdesc_empty));
+}
+
+// This tests serialization of SDP with only IPv6 candidates and verifies that
+// IPv6 is used as default address in c line according to preference.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIPv6Only) {
+  // Only test 1 m line.
+  desc_.RemoveContentByName("video_content_name");
+  // Stun has a high preference than local host.
+  cricket::Candidate candidate1(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+      rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+      cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  cricket::Candidate candidate2(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+      rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+      cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  // Only add the candidates to audio m line.
+  JsepIceCandidate jice1("audio_content_name", 0, candidate1);
+  JsepIceCandidate jice2("audio_content_name", 0, candidate2);
+  ASSERT_TRUE(jdesc.AddCandidate(&jice1));
+  ASSERT_TRUE(jdesc.AddCandidate(&jice2));
+  std::string message = webrtc::SdpSerialize(jdesc);
+
+  // Audio line should have a c line like this one.
+  EXPECT_NE(message.find("c=IN IP6 ::1"), std::string::npos);
+  // Shouldn't have a IP4 c line.
+  EXPECT_EQ(message.find("c=IN IP4"), std::string::npos);
+}
+
+// This tests serialization of SDP with both IPv4 and IPv6 candidates and
+// verifies that IPv4 is used as default address in c line even if the
+// preference of IPv4 is lower.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBothIPFamilies) {
+  // Only test 1 m line.
+  desc_.RemoveContentByName("video_content_name");
+  cricket::Candidate candidate_v4(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+      rtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "",
+      cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  cricket::Candidate candidate_v6(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+      rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+      cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  // Only add the candidates to audio m line.
+  JsepIceCandidate jice_v4("audio_content_name", 0, candidate_v4);
+  JsepIceCandidate jice_v6("audio_content_name", 0, candidate_v6);
+  ASSERT_TRUE(jdesc.AddCandidate(&jice_v4));
+  ASSERT_TRUE(jdesc.AddCandidate(&jice_v6));
+  std::string message = webrtc::SdpSerialize(jdesc);
+
+  // Audio line should have a c line like this one.
+  EXPECT_NE(message.find("c=IN IP4 192.168.1.5"), std::string::npos);
+  // Shouldn't have a IP6 c line.
+  EXPECT_EQ(message.find("c=IN IP6"), std::string::npos);
+}
+
+// This tests serialization of SDP with both UDP and TCP candidates and
+// verifies that UDP is used as default address in c line even if the
+// preference of UDP is lower.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBothProtocols) {
+  // Only test 1 m line.
+  desc_.RemoveContentByName("video_content_name");
+  // Stun has a high preference than local host.
+  cricket::Candidate candidate1(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+      rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+      cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  cricket::Candidate candidate2(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp",
+      rtc::SocketAddress("fe80::1234:5678:abcd:ef12", 1235), kCandidatePriority,
+      "", "", cricket::LOCAL_PORT_TYPE, kCandidateGeneration,
+      kCandidateFoundation1);
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  // Only add the candidates to audio m line.
+  JsepIceCandidate jice1("audio_content_name", 0, candidate1);
+  JsepIceCandidate jice2("audio_content_name", 0, candidate2);
+  ASSERT_TRUE(jdesc.AddCandidate(&jice1));
+  ASSERT_TRUE(jdesc.AddCandidate(&jice2));
+  std::string message = webrtc::SdpSerialize(jdesc);
+
+  // Audio line should have a c line like this one.
+  EXPECT_NE(message.find("c=IN IP6 fe80::1234:5678:abcd:ef12"),
+            std::string::npos);
+  // Shouldn't have a IP4 c line.
+  EXPECT_EQ(message.find("c=IN IP4"), std::string::npos);
+}
+
+// This tests serialization of SDP with only TCP candidates and verifies that
+// null IPv4 is used as default address in c line.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithTCPOnly) {
+  // Only test 1 m line.
+  desc_.RemoveContentByName("video_content_name");
+  // Stun has a high preference than local host.
+  cricket::Candidate candidate1(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+      rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "",
+      cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  cricket::Candidate candidate2(
+      cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+      rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "",
+      cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1);
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  // Only add the candidates to audio m line.
+  JsepIceCandidate jice1("audio_content_name", 0, candidate1);
+  JsepIceCandidate jice2("audio_content_name", 0, candidate2);
+  ASSERT_TRUE(jdesc.AddCandidate(&jice1));
+  ASSERT_TRUE(jdesc.AddCandidate(&jice2));
+  std::string message = webrtc::SdpSerialize(jdesc);
+
+  // Audio line should have a c line like this one when no any default exists.
+  EXPECT_NE(message.find("c=IN IP4 0.0.0.0"), std::string::npos);
+}
+
+// This tests serialization of SDP with a=crypto and a=fingerprint, as would be
+// the case in a DTLS offer.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprint) {
+  AddFingerprint();
+  JsepSessionDescription jdesc_with_fingerprint(kDummyString);
+  ASSERT_TRUE(jdesc_with_fingerprint.Initialize(desc_.Copy(),
+                                                kSessionId, kSessionVersion));
+  std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint);
+
+  std::string sdp_with_fingerprint = kSdpString;
+  InjectAfter(kAttributeIcePwdVoice,
+              kFingerprint, &sdp_with_fingerprint);
+  InjectAfter(kAttributeIcePwdVideo,
+              kFingerprint, &sdp_with_fingerprint);
+
+  EXPECT_EQ(sdp_with_fingerprint, message);
+}
+
+// This tests serialization of SDP with a=fingerprint with no a=crypto, as would
+// be the case in a DTLS answer.
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprintNoCryptos) {
+  AddFingerprint();
+  RemoveCryptos();
+  JsepSessionDescription jdesc_with_fingerprint(kDummyString);
+  ASSERT_TRUE(jdesc_with_fingerprint.Initialize(desc_.Copy(),
+                                                kSessionId, kSessionVersion));
+  std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint);
+
+  std::string sdp_with_fingerprint = kSdpString;
+  Replace(kAttributeCryptoVoice, "", &sdp_with_fingerprint);
+  Replace(kAttributeCryptoVideo, "", &sdp_with_fingerprint);
+  InjectAfter(kAttributeIcePwdVoice,
+              kFingerprint, &sdp_with_fingerprint);
+  InjectAfter(kAttributeIcePwdVideo,
+              kFingerprint, &sdp_with_fingerprint);
+
+  EXPECT_EQ(sdp_with_fingerprint, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) {
+  // JsepSessionDescription with desc but without candidates.
+  JsepSessionDescription jdesc_no_candidates(kDummyString);
+  ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Copy(), kSessionId,
+                                             kSessionVersion));
+  std::string message = webrtc::SdpSerialize(jdesc_no_candidates);
+  EXPECT_EQ(std::string(kSdpString), message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundle) {
+  ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
+  group.AddContentName(kAudioContentName);
+  group.AddContentName(kVideoContentName);
+  desc_.AddGroup(group);
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  std::string message = webrtc::SdpSerialize(jdesc_);
+  std::string sdp_with_bundle = kSdpFullString;
+  InjectAfter(kSessionTime,
+              "a=group:BUNDLE audio_content_name video_content_name\r\n",
+              &sdp_with_bundle);
+  EXPECT_EQ(sdp_with_bundle, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBandwidth) {
+  VideoContentDescription* vcd = static_cast<VideoContentDescription*>(
+      GetFirstVideoContent(&desc_)->description);
+  vcd->set_bandwidth(100 * 1000);
+  AudioContentDescription* acd = static_cast<AudioContentDescription*>(
+      GetFirstAudioContent(&desc_)->description);
+  acd->set_bandwidth(50 * 1000);
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  std::string message = webrtc::SdpSerialize(jdesc_);
+  std::string sdp_with_bandwidth = kSdpFullString;
+  InjectAfter("c=IN IP4 74.125.224.39\r\n",
+              "b=AS:100\r\n",
+              &sdp_with_bandwidth);
+  InjectAfter("c=IN IP4 74.125.127.126\r\n",
+              "b=AS:50\r\n",
+              &sdp_with_bandwidth);
+  EXPECT_EQ(sdp_with_bandwidth, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithIceOptions) {
+  std::vector<std::string> transport_options;
+  transport_options.push_back(kIceOption1);
+  transport_options.push_back(kIceOption3);
+  AddIceOptions(kAudioContentName, transport_options);
+  transport_options.clear();
+  transport_options.push_back(kIceOption2);
+  transport_options.push_back(kIceOption3);
+  AddIceOptions(kVideoContentName, transport_options);
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  std::string message = webrtc::SdpSerialize(jdesc_);
+  std::string sdp_with_ice_options = kSdpFullString;
+  InjectAfter(kAttributeIcePwdVoice,
+              "a=ice-options:iceoption1 iceoption3\r\n",
+              &sdp_with_ice_options);
+  InjectAfter(kAttributeIcePwdVideo,
+              "a=ice-options:iceoption2 iceoption3\r\n",
+              &sdp_with_ice_options);
+  EXPECT_EQ(sdp_with_ice_options, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRecvOnlyContent) {
+  EXPECT_TRUE(TestSerializeDirection(cricket::MD_RECVONLY));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSendOnlyContent) {
+  EXPECT_TRUE(TestSerializeDirection(cricket::MD_SENDONLY));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithInactiveContent) {
+  EXPECT_TRUE(TestSerializeDirection(cricket::MD_INACTIVE));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioRejected) {
+  EXPECT_TRUE(TestSerializeRejected(true, false));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithVideoRejected) {
+  EXPECT_TRUE(TestSerializeRejected(false, true));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithAudioVideoRejected) {
+  EXPECT_TRUE(TestSerializeRejected(true, true));
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithRtpDataChannel) {
+  AddRtpDataChannel();
+  JsepSessionDescription jsep_desc(kDummyString);
+
+  ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+  std::string message = webrtc::SdpSerialize(jsep_desc);
+
+  std::string expected_sdp = kSdpString;
+  expected_sdp.append(kSdpRtpDataChannelString);
+  EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithSctpDataChannel) {
+  AddSctpDataChannel();
+  JsepSessionDescription jsep_desc(kDummyString);
+
+  ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+  std::string message = webrtc::SdpSerialize(jsep_desc);
+
+  std::string expected_sdp = kSdpString;
+  expected_sdp.append(kSdpSctpDataChannelString);
+  EXPECT_EQ(message, expected_sdp);
+}
+
+TEST_F(WebRtcSdpTest, SerializeWithSctpDataChannelAndNewPort) {
+  AddSctpDataChannel();
+  JsepSessionDescription jsep_desc(kDummyString);
+
+  ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+  DataContentDescription* dcdesc = static_cast<DataContentDescription*>(
+      jsep_desc.description()->GetContentDescriptionByName(kDataContentName));
+
+  const int kNewPort = 1234;
+  cricket::DataCodec codec(
+        cricket::kGoogleSctpDataCodecId, cricket::kGoogleSctpDataCodecName, 0);
+  codec.SetParam(cricket::kCodecParamPort, kNewPort);
+  dcdesc->AddOrReplaceCodec(codec);
+
+  std::string message = webrtc::SdpSerialize(jsep_desc);
+
+  std::string expected_sdp = kSdpString;
+  expected_sdp.append(kSdpSctpDataChannelString);
+
+  char default_portstr[16];
+  char new_portstr[16];
+  rtc::sprintfn(default_portstr, sizeof(default_portstr), "%d",
+                      kDefaultSctpPort);
+  rtc::sprintfn(new_portstr, sizeof(new_portstr), "%d", kNewPort);
+  rtc::replace_substrs(default_portstr, strlen(default_portstr),
+                             new_portstr, strlen(new_portstr),
+                             &expected_sdp);
+
+  EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithDataChannelAndBandwidth) {
+  AddRtpDataChannel();
+  data_desc_->set_bandwidth(100*1000);
+  JsepSessionDescription jsep_desc(kDummyString);
+
+  ASSERT_TRUE(jsep_desc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+  std::string message = webrtc::SdpSerialize(jsep_desc);
+
+  std::string expected_sdp = kSdpString;
+  expected_sdp.append(kSdpRtpDataChannelString);
+  // Serializing data content shouldn't ignore bandwidth settings.
+  InjectAfter("m=application 9 RTP/SAVPF 101\r\nc=IN IP4 0.0.0.0\r\n",
+              "b=AS:100\r\n",
+              &expected_sdp);
+  EXPECT_EQ(expected_sdp, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmap) {
+  AddExtmap();
+  JsepSessionDescription desc_with_extmap("dummy");
+  ASSERT_TRUE(desc_with_extmap.Initialize(desc_.Copy(),
+                                          kSessionId, kSessionVersion));
+  std::string message = webrtc::SdpSerialize(desc_with_extmap);
+
+  std::string sdp_with_extmap = kSdpString;
+  InjectAfter("a=mid:audio_content_name\r\n",
+              kExtmap, &sdp_with_extmap);
+  InjectAfter("a=mid:video_content_name\r\n",
+              kExtmap, &sdp_with_extmap);
+
+  EXPECT_EQ(sdp_with_extmap, message);
+}
+
+TEST_F(WebRtcSdpTest, SerializeCandidates) {
+  std::string message = webrtc::SdpSerializeCandidate(*jcandidate_);
+  EXPECT_EQ(std::string(kRawCandidate), message);
+
+  Candidate candidate_with_ufrag(candidates_.front());
+  candidate_with_ufrag.set_username("ABC");
+  jcandidate_.reset(new JsepIceCandidate(std::string("audio_content_name"), 0,
+                                         candidate_with_ufrag));
+  message = webrtc::SdpSerializeCandidate(*jcandidate_);
+  EXPECT_EQ(std::string(kRawCandidate) + " ufrag ABC", message);
+}
+
+// TODO(mallinath) : Enable this test once WebRTCSdp capable of parsing
+// RFC 6544.
+TEST_F(WebRtcSdpTest, SerializeTcpCandidates) {
+  Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+                      rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
+                      "", "", LOCAL_PORT_TYPE, kCandidateGeneration,
+                      kCandidateFoundation1);
+  candidate.set_tcptype(cricket::TCPTYPE_ACTIVE_STR);
+  rtc::scoped_ptr<IceCandidateInterface> jcandidate(
+    new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+
+  std::string message = webrtc::SdpSerializeCandidate(*jcandidate);
+  EXPECT_EQ(std::string(kSdpTcpActiveCandidate), message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescription) {
+  JsepSessionDescription jdesc(kDummyString);
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(kSdpFullString, &jdesc));
+  // Verify
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutMline) {
+  JsepSessionDescription jdesc(kDummyString);
+  const char kSdpWithoutMline[] =
+    "v=0\r\n"
+    "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n";
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(kSdpWithoutMline, &jdesc));
+  EXPECT_EQ(0u, jdesc.description()->contents().size());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCarriageReturn) {
+  JsepSessionDescription jdesc(kDummyString);
+  std::string sdp_without_carriage_return = kSdpFullString;
+  Replace("\r\n", "\n", &sdp_without_carriage_return);
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(sdp_without_carriage_return, &jdesc));
+  // Verify
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCandidates) {
+  // SessionDescription with desc but without candidates.
+  JsepSessionDescription jdesc_no_candidates(kDummyString);
+  ASSERT_TRUE(jdesc_no_candidates.Initialize(desc_.Copy(),
+                                             kSessionId, kSessionVersion));
+  JsepSessionDescription new_jdesc(kDummyString);
+  EXPECT_TRUE(SdpDeserialize(kSdpString, &new_jdesc));
+  EXPECT_TRUE(CompareSessionDescription(jdesc_no_candidates, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmap) {
+  static const char kSdpNoRtpmapString[] =
+      "v=0\r\n"
+      "o=- 11 22 IN IP4 127.0.0.1\r\n"
+      "s=-\r\n"
+      "t=0 0\r\n"
+      "m=audio 49232 RTP/AVP 0 18 103\r\n"
+      // Codec that doesn't appear in the m= line will be ignored.
+      "a=rtpmap:104 ISAC/32000\r\n"
+      // The rtpmap line for static payload codec is optional.
+      "a=rtpmap:18 G729/16000\r\n"
+      "a=rtpmap:103 ISAC/16000\r\n";
+
+  JsepSessionDescription jdesc(kDummyString);
+  EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc));
+  cricket::AudioContentDescription* audio =
+    static_cast<AudioContentDescription*>(
+        jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
+  AudioCodecs ref_codecs;
+  // The codecs in the AudioContentDescription will be sorted by preference.
+  ref_codecs.push_back(AudioCodec(0, "PCMU", 8000, 0, 1, 3));
+  ref_codecs.push_back(AudioCodec(18, "G729", 16000, 0, 1, 2));
+  ref_codecs.push_back(AudioCodec(103, "ISAC", 16000, 32000, 1, 1));
+  EXPECT_EQ(ref_codecs, audio->codecs());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmapButWithFmtp) {
+  static const char kSdpNoRtpmapString[] =
+      "v=0\r\n"
+      "o=- 11 22 IN IP4 127.0.0.1\r\n"
+      "s=-\r\n"
+      "t=0 0\r\n"
+      "m=audio 49232 RTP/AVP 18 103\r\n"
+      "a=fmtp:18 annexb=yes\r\n"
+      "a=rtpmap:103 ISAC/16000\r\n";
+
+  JsepSessionDescription jdesc(kDummyString);
+  EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc));
+  cricket::AudioContentDescription* audio =
+    static_cast<AudioContentDescription*>(
+        jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
+
+  cricket::AudioCodec g729 = audio->codecs()[0];
+  EXPECT_EQ("G729", g729.name);
+  EXPECT_EQ(8000, g729.clockrate);
+  EXPECT_EQ(18, g729.id);
+  cricket::CodecParameterMap::iterator found =
+      g729.params.find("annexb");
+  ASSERT_TRUE(found != g729.params.end());
+  EXPECT_EQ(found->second, "yes");
+
+  cricket::AudioCodec isac = audio->codecs()[1];
+  EXPECT_EQ("ISAC", isac.name);
+  EXPECT_EQ(103, isac.id);
+  EXPECT_EQ(16000, isac.clockrate);
+}
+
+// Ensure that we can deserialize SDP with a=fingerprint properly.
+TEST_F(WebRtcSdpTest, DeserializeJsepSessionDescriptionWithFingerprint) {
+  // Add a DTLS a=fingerprint attribute to our session description.
+  AddFingerprint();
+  JsepSessionDescription new_jdesc(kDummyString);
+  ASSERT_TRUE(new_jdesc.Initialize(desc_.Copy(),
+                                   jdesc_.session_id(),
+                                   jdesc_.session_version()));
+
+  JsepSessionDescription jdesc_with_fingerprint(kDummyString);
+  std::string sdp_with_fingerprint = kSdpString;
+  InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint);
+  InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_fingerprint, &jdesc_with_fingerprint));
+  EXPECT_TRUE(CompareSessionDescription(jdesc_with_fingerprint, new_jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBundle) {
+  JsepSessionDescription jdesc_with_bundle(kDummyString);
+  std::string sdp_with_bundle = kSdpFullString;
+  InjectAfter(kSessionTime,
+              "a=group:BUNDLE audio_content_name video_content_name\r\n",
+              &sdp_with_bundle);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_bundle, &jdesc_with_bundle));
+  ContentGroup group(cricket::GROUP_TYPE_BUNDLE);
+  group.AddContentName(kAudioContentName);
+  group.AddContentName(kVideoContentName);
+  desc_.AddGroup(group);
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bundle));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBandwidth) {
+  JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+  std::string sdp_with_bandwidth = kSdpFullString;
+  InjectAfter("a=mid:video_content_name\r\na=sendrecv\r\n",
+              "b=AS:100\r\n",
+              &sdp_with_bandwidth);
+  InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n",
+              "b=AS:50\r\n",
+              &sdp_with_bandwidth);
+  EXPECT_TRUE(
+      SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+  VideoContentDescription* vcd = static_cast<VideoContentDescription*>(
+      GetFirstVideoContent(&desc_)->description);
+  vcd->set_bandwidth(100 * 1000);
+  AudioContentDescription* acd = static_cast<AudioContentDescription*>(
+      GetFirstAudioContent(&desc_)->description);
+  acd->set_bandwidth(50 * 1000);
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithIceOptions) {
+  JsepSessionDescription jdesc_with_ice_options(kDummyString);
+  std::string sdp_with_ice_options = kSdpFullString;
+  InjectAfter(kSessionTime,
+              "a=ice-options:iceoption3\r\n",
+              &sdp_with_ice_options);
+  InjectAfter(kAttributeIcePwdVoice,
+              "a=ice-options:iceoption1\r\n",
+              &sdp_with_ice_options);
+  InjectAfter(kAttributeIcePwdVideo,
+              "a=ice-options:iceoption2\r\n",
+              &sdp_with_ice_options);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_ice_options, &jdesc_with_ice_options));
+  std::vector<std::string> transport_options;
+  transport_options.push_back(kIceOption3);
+  transport_options.push_back(kIceOption1);
+  AddIceOptions(kAudioContentName, transport_options);
+  transport_options.clear();
+  transport_options.push_back(kIceOption3);
+  transport_options.push_back(kIceOption2);
+  AddIceOptions(kVideoContentName, transport_options);
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ice_options));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithUfragPwd) {
+  // Remove the original ice-ufrag and ice-pwd
+  JsepSessionDescription jdesc_with_ufrag_pwd(kDummyString);
+  std::string sdp_with_ufrag_pwd = kSdpFullString;
+  EXPECT_TRUE(RemoveCandidateUfragPwd(&sdp_with_ufrag_pwd));
+  // Add session level ufrag and pwd
+  InjectAfter(kSessionTime,
+      "a=ice-pwd:session+level+icepwd\r\n"
+      "a=ice-ufrag:session+level+iceufrag\r\n",
+      &sdp_with_ufrag_pwd);
+  // Add media level ufrag and pwd for audio
+  InjectAfter("a=mid:audio_content_name\r\n",
+      "a=ice-pwd:media+level+icepwd\r\na=ice-ufrag:media+level+iceufrag\r\n",
+      &sdp_with_ufrag_pwd);
+  // Update the candidate ufrag and pwd to the expected ones.
+  EXPECT_TRUE(UpdateCandidateUfragPwd(&jdesc_, 0,
+      "media+level+iceufrag", "media+level+icepwd"));
+  EXPECT_TRUE(UpdateCandidateUfragPwd(&jdesc_, 1,
+      "session+level+iceufrag", "session+level+icepwd"));
+  EXPECT_TRUE(SdpDeserialize(sdp_with_ufrag_pwd, &jdesc_with_ufrag_pwd));
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc_with_ufrag_pwd));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRecvOnlyContent) {
+  EXPECT_TRUE(TestDeserializeDirection(cricket::MD_RECVONLY));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithSendOnlyContent) {
+  EXPECT_TRUE(TestDeserializeDirection(cricket::MD_SENDONLY));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithInactiveContent) {
+  EXPECT_TRUE(TestDeserializeDirection(cricket::MD_INACTIVE));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedAudio) {
+  EXPECT_TRUE(TestDeserializeRejected(true, false));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedVideo) {
+  EXPECT_TRUE(TestDeserializeRejected(false, true));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithRejectedAudioVideo) {
+  EXPECT_TRUE(TestDeserializeRejected(true, true));
+}
+
+// Tests that we can still handle the sdp uses mslabel and label instead of
+// msid for backward compatibility.
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutMsid) {
+  jdesc_.description()->set_msid_supported(false);
+  JsepSessionDescription jdesc(kDummyString);
+  std::string sdp_without_msid = kSdpFullString;
+  Replace("msid", "xmsid", &sdp_without_msid);
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(sdp_without_msid, &jdesc));
+  // Verify
+  EXPECT_TRUE(CompareSessionDescription(jdesc_, jdesc));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidate) {
+  JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+  std::string sdp = kSdpOneCandidate;
+  EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+  EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+  EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+
+  // Candidate line without generation extension.
+  sdp = kSdpOneCandidate;
+  Replace(" generation 2", "", &sdp);
+  EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+  EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+  EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+  Candidate expected = jcandidate_->candidate();
+  expected.set_generation(0);
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+
+  sdp = kSdpTcpActiveCandidate;
+  EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+  // Make a cricket::Candidate equivalent to kSdpTcpCandidate string.
+  Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp",
+                      rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
+                      "", "", LOCAL_PORT_TYPE, kCandidateGeneration,
+                      kCandidateFoundation1);
+  rtc::scoped_ptr<IceCandidateInterface> jcandidate_template(
+    new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(
+                    jcandidate_template->candidate()));
+  sdp = kSdpTcpPassiveCandidate;
+  EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+  sdp = kSdpTcpSOCandidate;
+  EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate));
+}
+
+// This test verifies the deserialization of candidate-attribute
+// as per RFC 5245. Candiate-attribute will be of the format
+// candidate:<blah>. This format will be used when candidates
+// are trickled.
+TEST_F(WebRtcSdpTest, DeserializeRawCandidateAttribute) {
+  JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+  std::string candidate_attribute = kRawCandidate;
+  EXPECT_TRUE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+  EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+  EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+  EXPECT_EQ(2u, jcandidate.candidate().generation());
+
+  // Candidate line without generation extension.
+  candidate_attribute = kRawCandidate;
+  Replace(" generation 2", "", &candidate_attribute);
+  EXPECT_TRUE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+  EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+  EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+  Candidate expected = jcandidate_->candidate();
+  expected.set_generation(0);
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(expected));
+
+  // Candidate line without candidate:
+  candidate_attribute = kRawCandidate;
+  Replace("candidate:", "", &candidate_attribute);
+  EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+  // Candidate line with IPV6 address.
+  EXPECT_TRUE(SdpDeserializeCandidate(kRawIPV6Candidate, &jcandidate));
+}
+
+// This test verifies that the deserialization of an invalid candidate string
+// fails.
+TEST_F(WebRtcSdpTest, DeserializeInvalidCandidiate) {
+    JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+
+  std::string candidate_attribute = kRawCandidate;
+  candidate_attribute.replace(0, 1, "x");
+  EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+  candidate_attribute = kSdpOneCandidate;
+  candidate_attribute.replace(0, 1, "x");
+  EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+  candidate_attribute = kRawCandidate;
+  candidate_attribute.append("\r\n");
+  candidate_attribute.append(kRawCandidate);
+  EXPECT_FALSE(SdpDeserializeCandidate(candidate_attribute, &jcandidate));
+
+  EXPECT_FALSE(SdpDeserializeCandidate(kSdpTcpInvalidCandidate, &jcandidate));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannels) {
+  AddRtpDataChannel();
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpRtpDataChannelString);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  // Verify
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannels) {
+  AddSctpDataChannel();
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelString);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  // Verify with DTLS/SCTP (already in kSdpSctpDataChannelString).
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // Verify with UDP/DTLS/SCTP.
+  sdp_with_data.replace(sdp_with_data.find(kDtlsSctp),
+                        strlen(kDtlsSctp), kUdpDtlsSctp);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // Verify with TCP/DTLS/SCTP.
+  sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp),
+                        strlen(kUdpDtlsSctp), kTcpDtlsSctp);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpPort) {
+  AddSctpDataChannel();
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  // Verify with DTLS/SCTP (already in kSdpSctpDataChannelStringWithSctpPort).
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // Verify with UDP/DTLS/SCTP.
+  sdp_with_data.replace(sdp_with_data.find(kDtlsSctp),
+                        strlen(kDtlsSctp), kUdpDtlsSctp);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // Verify with TCP/DTLS/SCTP.
+  sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp),
+                        strlen(kUdpDtlsSctp), kTcpDtlsSctp);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsWithSctpColonPort) {
+  AddSctpDataChannel();
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelStringWithSctpColonPort);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  // Verify with DTLS/SCTP.
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // Verify with UDP/DTLS/SCTP.
+  sdp_with_data.replace(sdp_with_data.find(kDtlsSctp),
+                        strlen(kDtlsSctp), kUdpDtlsSctp);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // Verify with TCP/DTLS/SCTP.
+  sdp_with_data.replace(sdp_with_data.find(kUdpDtlsSctp),
+                        strlen(kUdpDtlsSctp), kTcpDtlsSctp);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+// Test to check the behaviour if sctp-port is specified
+// on the m= line and in a=sctp-port.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithMultiSctpPort) {
+  AddSctpDataChannel();
+  JsepSessionDescription jdesc(kDummyString);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  std::string sdp_with_data = kSdpString;
+  // Append m= attributes
+  sdp_with_data.append(kSdpSctpDataChannelString);
+  // Append a=sctp-port attribute
+  sdp_with_data.append("a=sctp-port 5000\r\n");
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output));
+}
+
+// For crbug/344475.
+TEST_F(WebRtcSdpTest, DeserializeSdpWithCorruptedSctpDataChannels) {
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelString);
+  // Remove the "\n" at the end.
+  sdp_with_data = sdp_with_data.substr(0, sdp_with_data.size() - 1);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  EXPECT_FALSE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  // No crash is a pass.
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelAndNewPort) {
+  AddSctpDataChannel();
+  const uint16_t kUnusualSctpPort = 9556;
+  char default_portstr[16];
+  char unusual_portstr[16];
+  rtc::sprintfn(default_portstr, sizeof(default_portstr), "%d",
+                      kDefaultSctpPort);
+  rtc::sprintfn(unusual_portstr, sizeof(unusual_portstr), "%d",
+                      kUnusualSctpPort);
+
+  // First setup the expected JsepSessionDescription.
+  JsepSessionDescription jdesc(kDummyString);
+  // take our pre-built session description and change the SCTP port.
+  cricket::SessionDescription* mutant = desc_.Copy();
+  DataContentDescription* dcdesc = static_cast<DataContentDescription*>(
+      mutant->GetContentDescriptionByName(kDataContentName));
+  std::vector<cricket::DataCodec> codecs(dcdesc->codecs());
+  EXPECT_EQ(1U, codecs.size());
+  EXPECT_EQ(cricket::kGoogleSctpDataCodecId, codecs[0].id);
+  codecs[0].SetParam(cricket::kCodecParamPort, kUnusualSctpPort);
+  dcdesc->set_codecs(codecs);
+
+  // note: mutant's owned by jdesc now.
+  ASSERT_TRUE(jdesc.Initialize(mutant, kSessionId, kSessionVersion));
+  mutant = NULL;
+
+  // Then get the deserialized JsepSessionDescription.
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelString);
+  rtc::replace_substrs(default_portstr, strlen(default_portstr),
+                             unusual_portstr, strlen(unusual_portstr),
+                             &sdp_with_data);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+
+  // We need to test the deserialized JsepSessionDescription from
+  // kSdpSctpDataChannelStringWithSctpPort for
+  // draft-ietf-mmusic-sctp-sdp-07
+  // a=sctp-port
+  sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort);
+  rtc::replace_substrs(default_portstr, strlen(default_portstr),
+                             unusual_portstr, strlen(unusual_portstr),
+                             &sdp_with_data);
+
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannelsAndBandwidth) {
+  // We want to test that deserializing data content limits bandwidth
+  // settings (it should never be greater than the default).
+  // This should prevent someone from using unlimited data bandwidth through
+  // JS and "breaking the Internet".
+  // See: https://code.google.com/p/chromium/issues/detail?id=280726
+  std::string sdp_with_bandwidth = kSdpString;
+  sdp_with_bandwidth.append(kSdpRtpDataChannelString);
+  InjectAfter("a=mid:data_content_name\r\n",
+              "b=AS:100\r\n",
+              &sdp_with_bandwidth);
+  JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+
+  EXPECT_FALSE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsAndBandwidth) {
+  AddSctpDataChannel();
+  JsepSessionDescription jdesc(kDummyString);
+  DataContentDescription* dcd = static_cast<DataContentDescription*>(
+     GetFirstDataContent(&desc_)->description);
+  dcd->set_bandwidth(100 * 1000);
+  ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
+
+  std::string sdp_with_bandwidth = kSdpString;
+  sdp_with_bandwidth.append(kSdpSctpDataChannelString);
+  InjectAfter("a=mid:data_content_name\r\n",
+              "b=AS:100\r\n",
+              &sdp_with_bandwidth);
+  JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+
+  // SCTP has congestion control, so we shouldn't limit the bandwidth
+  // as we do for RTP.
+  EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+  EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithSessionLevelExtmap) {
+  TestDeserializeExtmap(true, false);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithMediaLevelExtmap) {
+  TestDeserializeExtmap(false, true);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithInvalidExtmap) {
+  TestDeserializeExtmap(true, true);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutEndLineBreak) {
+  JsepSessionDescription jdesc(kDummyString);
+  std::string sdp = kSdpFullString;
+  sdp = sdp.substr(0, sdp.size() - 2);  // Remove \r\n at the end.
+  // Deserialize
+  SdpParseError error;
+  EXPECT_FALSE(webrtc::SdpDeserialize(sdp, &jdesc, &error));
+  const std::string lastline = "a=ssrc:6 label:video_track_id_3";
+  EXPECT_EQ(lastline, error.line);
+  EXPECT_EQ("Invalid SDP line.", error.description);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidateWithDifferentTransport) {
+  JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+  std::string new_sdp = kSdpOneCandidate;
+  Replace("udp", "unsupported_transport", &new_sdp);
+  EXPECT_FALSE(SdpDeserializeCandidate(new_sdp, &jcandidate));
+  new_sdp = kSdpOneCandidate;
+  Replace("udp", "uDP", &new_sdp);
+  EXPECT_TRUE(SdpDeserializeCandidate(new_sdp, &jcandidate));
+  EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+  EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(jcandidate_->candidate()));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeCandidateWithUfragPwd) {
+  JsepIceCandidate jcandidate(kDummyMid, kDummyIndex);
+  EXPECT_TRUE(
+      SdpDeserializeCandidate(kSdpOneCandidateWithUfragPwd, &jcandidate));
+  EXPECT_EQ(kDummyMid, jcandidate.sdp_mid());
+  EXPECT_EQ(kDummyIndex, jcandidate.sdp_mline_index());
+  Candidate ref_candidate = jcandidate_->candidate();
+  ref_candidate.set_username("user_rtp");
+  ref_candidate.set_password("password_rtp");
+  EXPECT_TRUE(jcandidate.candidate().IsEquivalent(ref_candidate));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithConferenceFlag) {
+  JsepSessionDescription jdesc(kDummyString);
+
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(kSdpConferenceString, &jdesc));
+
+  // Verify
+  cricket::AudioContentDescription* audio =
+    static_cast<AudioContentDescription*>(
+      jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
+  EXPECT_TRUE(audio->conference_mode());
+
+  cricket::VideoContentDescription* video =
+    static_cast<VideoContentDescription*>(
+      jdesc.description()->GetContentDescriptionByName(cricket::CN_VIDEO));
+  EXPECT_TRUE(video->conference_mode());
+}
+
+TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
+  const char kSdpDestroyer[] = "!@#$%^&";
+  const char kSdpEmptyType[] = " =candidate";
+  const char kSdpEqualAsPlus[] = "a+candidate";
+  const char kSdpSpaceAfterEqual[] = "a= candidate";
+  const char kSdpUpperType[] = "A=candidate";
+  const char kSdpEmptyLine[] = "";
+  const char kSdpMissingValue[] = "a=";
+
+  const char kSdpBrokenFingerprint[] = "a=fingerprint:sha-1 "
+      "4AAD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
+  const char kSdpExtraField[] = "a=fingerprint:sha-1 "
+      "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB XXX";
+  const char kSdpMissingSpace[] = "a=fingerprint:sha-1"
+      "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB";
+  // MD5 is not allowed in fingerprints.
+  const char kSdpMd5[] = "a=fingerprint:md5 "
+      "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B";
+
+  // Broken session description
+  ExpectParseFailure("v=", kSdpDestroyer);
+  ExpectParseFailure("o=", kSdpDestroyer);
+  ExpectParseFailure("s=-", kSdpDestroyer);
+  // Broken time description
+  ExpectParseFailure("t=", kSdpDestroyer);
+
+  // Broken media description
+  ExpectParseFailure("m=audio", "c=IN IP4 74.125.224.39");
+  ExpectParseFailure("m=video", kSdpDestroyer);
+
+  // Invalid lines
+  ExpectParseFailure("a=candidate", kSdpEmptyType);
+  ExpectParseFailure("a=candidate", kSdpEqualAsPlus);
+  ExpectParseFailure("a=candidate", kSdpSpaceAfterEqual);
+  ExpectParseFailure("a=candidate", kSdpUpperType);
+
+  // Bogus fingerprint replacing a=sendrev. We selected this attribute
+  // because it's orthogonal to what we are replacing and hence
+  // safe.
+  ExpectParseFailure("a=sendrecv", kSdpBrokenFingerprint);
+  ExpectParseFailure("a=sendrecv", kSdpExtraField);
+  ExpectParseFailure("a=sendrecv", kSdpMissingSpace);
+  ExpectParseFailure("a=sendrecv", kSdpMd5);
+
+  // Empty Line
+  ExpectParseFailure("a=rtcp:2347 IN IP4 74.125.127.126", kSdpEmptyLine);
+  ExpectParseFailure("a=rtcp:2347 IN IP4 74.125.127.126", kSdpMissingValue);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithInvalidAttributeValue) {
+  // ssrc
+  ExpectParseFailure("a=ssrc:1", "a=ssrc:badvalue");
+  ExpectParseFailure("a=ssrc-group:FEC 5 6", "a=ssrc-group:FEC badvalue 6");
+  // crypto
+  ExpectParseFailure("a=crypto:1 ", "a=crypto:badvalue ");
+  // rtpmap
+  ExpectParseFailure("a=rtpmap:111 ", "a=rtpmap:badvalue ");
+  ExpectParseFailure("opus/48000/2", "opus/badvalue/2");
+  ExpectParseFailure("opus/48000/2", "opus/48000/badvalue");
+  // candidate
+  ExpectParseFailure("1 udp 2130706432", "badvalue udp 2130706432");
+  ExpectParseFailure("1 udp 2130706432", "1 udp badvalue");
+  ExpectParseFailure("192.168.1.5 1234", "192.168.1.5 badvalue");
+  ExpectParseFailure("rport 2346", "rport badvalue");
+  ExpectParseFailure("rport 2346 generation 2",
+                     "rport 2346 generation badvalue");
+  // m line
+  ExpectParseFailure("m=audio 2345 RTP/SAVPF 111 103 104",
+                     "m=audio 2345 RTP/SAVPF 111 badvalue 104");
+
+  // bandwidth
+  ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+                                 "b=AS:badvalue\r\n",
+                                 "b=AS:badvalue");
+  // rtcp-fb
+  ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+                                 "a=rtcp-fb:badvalue nack\r\n",
+                                 "a=rtcp-fb:badvalue nack");
+  // extmap
+  ExpectParseFailureWithNewLines("a=mid:video_content_name\r\n",
+                                 "a=extmap:badvalue http://example.com\r\n",
+                                 "a=extmap:badvalue http://example.com");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithReorderedPltypes) {
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  const char kSdpWithReorderedPlTypesString[] =
+      "v=0\r\n"
+      "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+      "s=-\r\n"
+      "t=0 0\r\n"
+      "m=audio 9 RTP/SAVPF 104 103\r\n"  // Pl type 104 preferred.
+      "a=rtpmap:111 opus/48000/2\r\n"  // Pltype 111 listed before 103 and 104
+                                       // in the map.
+      "a=rtpmap:103 ISAC/16000\r\n"  // Pltype 103 listed before 104 in the map.
+      "a=rtpmap:104 ISAC/32000\r\n";
+
+  // Deserialize
+  EXPECT_TRUE(SdpDeserialize(kSdpWithReorderedPlTypesString, &jdesc_output));
+
+  const ContentInfo* ac = GetFirstAudioContent(jdesc_output.description());
+  ASSERT_TRUE(ac != NULL);
+  const AudioContentDescription* acd =
+      static_cast<const AudioContentDescription*>(ac->description);
+  ASSERT_FALSE(acd->codecs().empty());
+  EXPECT_EQ("ISAC", acd->codecs()[0].name);
+  EXPECT_EQ(32000, acd->codecs()[0].clockrate);
+  EXPECT_EQ(104, acd->codecs()[0].id);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeCodecParams) {
+  JsepSessionDescription jdesc_output(kDummyString);
+  CodecParams params;
+  params.max_ptime = 40;
+  params.ptime = 30;
+  params.min_ptime = 10;
+  params.sprop_stereo = 1;
+  params.stereo = 1;
+  params.useinband = 1;
+  params.maxaveragebitrate = 128000;
+  TestDeserializeCodecParams(params, &jdesc_output);
+  TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeRtcpFb) {
+  const bool kUseWildcard = false;
+  JsepSessionDescription jdesc_output(kDummyString);
+  TestDeserializeRtcpFb(&jdesc_output, kUseWildcard);
+  TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSerializeRtcpFbWildcard) {
+  const bool kUseWildcard = true;
+  JsepSessionDescription jdesc_output(kDummyString);
+  TestDeserializeRtcpFb(&jdesc_output, kUseWildcard);
+  TestSerialize(jdesc_output);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtp) {
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  const char kSdpWithFmtpString[] =
+      "v=0\r\n"
+      "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+      "s=-\r\n"
+      "t=0 0\r\n"
+      "m=video 3457 RTP/SAVPF 120\r\n"
+      "a=rtpmap:120 VP8/90000\r\n"
+      "a=fmtp:120 x-google-min-bitrate=10;x-google-max-quantization=40\r\n";
+
+  // Deserialize
+  SdpParseError error;
+  EXPECT_TRUE(
+      webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error));
+
+  const ContentInfo* vc = GetFirstVideoContent(jdesc_output.description());
+  ASSERT_TRUE(vc != NULL);
+  const VideoContentDescription* vcd =
+      static_cast<const VideoContentDescription*>(vc->description);
+  ASSERT_FALSE(vcd->codecs().empty());
+  cricket::VideoCodec vp8 = vcd->codecs()[0];
+  EXPECT_EQ("VP8", vp8.name);
+  EXPECT_EQ(120, vp8.id);
+  cricket::CodecParameterMap::iterator found =
+      vp8.params.find("x-google-min-bitrate");
+  ASSERT_TRUE(found != vp8.params.end());
+  EXPECT_EQ(found->second, "10");
+  found = vp8.params.find("x-google-max-quantization");
+  ASSERT_TRUE(found != vp8.params.end());
+  EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSpace) {
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  const char kSdpWithFmtpString[] =
+      "v=0\r\n"
+      "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+      "s=-\r\n"
+      "t=0 0\r\n"
+      "m=video 3457 RTP/SAVPF 120\r\n"
+      "a=rtpmap:120 VP8/90000\r\n"
+      "a=fmtp:120   x-google-min-bitrate=10;  x-google-max-quantization=40\r\n";
+
+  // Deserialize
+  SdpParseError error;
+  EXPECT_TRUE(webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output,
+                                     &error));
+
+  const ContentInfo* vc = GetFirstVideoContent(jdesc_output.description());
+  ASSERT_TRUE(vc != NULL);
+  const VideoContentDescription* vcd =
+      static_cast<const VideoContentDescription*>(vc->description);
+  ASSERT_FALSE(vcd->codecs().empty());
+  cricket::VideoCodec vp8 = vcd->codecs()[0];
+  EXPECT_EQ("VP8", vp8.name);
+  EXPECT_EQ(120, vp8.id);
+  cricket::CodecParameterMap::iterator found =
+      vp8.params.find("x-google-min-bitrate");
+  ASSERT_TRUE(found != vp8.params.end());
+  EXPECT_EQ(found->second, "10");
+  found = vp8.params.find("x-google-max-quantization");
+  ASSERT_TRUE(found != vp8.params.end());
+  EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, SerializeVideoFmtp) {
+  VideoContentDescription* vcd = static_cast<VideoContentDescription*>(
+      GetFirstVideoContent(&desc_)->description);
+
+  cricket::VideoCodecs codecs = vcd->codecs();
+  codecs[0].params["x-google-min-bitrate"] = "10";
+  vcd->set_codecs(codecs);
+
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  std::string message = webrtc::SdpSerialize(jdesc_);
+  std::string sdp_with_fmtp = kSdpFullString;
+  InjectAfter("a=rtpmap:120 VP8/90000\r\n",
+              "a=fmtp:120 x-google-min-bitrate=10\r\n",
+              &sdp_with_fmtp);
+  EXPECT_EQ(sdp_with_fmtp, message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithIceLite) {
+  JsepSessionDescription jdesc_with_icelite(kDummyString);
+  std::string sdp_with_icelite = kSdpFullString;
+  EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite));
+  cricket::SessionDescription* desc = jdesc_with_icelite.description();
+  const cricket::TransportInfo* tinfo1 =
+      desc->GetTransportInfoByName("audio_content_name");
+  EXPECT_EQ(cricket::ICEMODE_FULL, tinfo1->description.ice_mode);
+  const cricket::TransportInfo* tinfo2 =
+      desc->GetTransportInfoByName("video_content_name");
+  EXPECT_EQ(cricket::ICEMODE_FULL, tinfo2->description.ice_mode);
+  InjectAfter(kSessionTime,
+              "a=ice-lite\r\n",
+              &sdp_with_icelite);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite));
+  desc = jdesc_with_icelite.description();
+  const cricket::TransportInfo* atinfo =
+      desc->GetTransportInfoByName("audio_content_name");
+  EXPECT_EQ(cricket::ICEMODE_LITE, atinfo->description.ice_mode);
+  const cricket::TransportInfo* vtinfo =
+        desc->GetTransportInfoByName("video_content_name");
+  EXPECT_EQ(cricket::ICEMODE_LITE, vtinfo->description.ice_mode);
+}
+
+// Verifies that the candidates in the input SDP are parsed and serialized
+// correctly in the output SDP.
+TEST_F(WebRtcSdpTest, RoundTripSdpWithSctpDataChannelsWithCandidates) {
+  std::string sdp_with_data = kSdpString;
+  sdp_with_data.append(kSdpSctpDataChannelWithCandidatesString);
+  JsepSessionDescription jdesc_output(kDummyString);
+
+  EXPECT_TRUE(SdpDeserialize(sdp_with_data, &jdesc_output));
+  EXPECT_EQ(sdp_with_data, webrtc::SdpSerialize(jdesc_output));
+}
+
+TEST_F(WebRtcSdpTest, SerializeDtlsSetupAttribute) {
+  AddFingerprint();
+  TransportInfo audio_transport_info =
+      *(desc_.GetTransportInfoByName(kAudioContentName));
+  EXPECT_EQ(cricket::CONNECTIONROLE_NONE,
+            audio_transport_info.description.connection_role);
+  audio_transport_info.description.connection_role =
+        cricket::CONNECTIONROLE_ACTIVE;
+
+  TransportInfo video_transport_info =
+      *(desc_.GetTransportInfoByName(kVideoContentName));
+  EXPECT_EQ(cricket::CONNECTIONROLE_NONE,
+            video_transport_info.description.connection_role);
+  video_transport_info.description.connection_role =
+        cricket::CONNECTIONROLE_ACTIVE;
+
+  desc_.RemoveTransportInfoByName(kAudioContentName);
+  desc_.RemoveTransportInfoByName(kVideoContentName);
+
+  desc_.AddTransportInfo(audio_transport_info);
+  desc_.AddTransportInfo(video_transport_info);
+
+  ASSERT_TRUE(jdesc_.Initialize(desc_.Copy(),
+                                jdesc_.session_id(),
+                                jdesc_.session_version()));
+  std::string message = webrtc::SdpSerialize(jdesc_);
+  std::string sdp_with_dtlssetup = kSdpFullString;
+
+  // Fingerprint attribute is necessary to add DTLS setup attribute.
+  InjectAfter(kAttributeIcePwdVoice,
+              kFingerprint, &sdp_with_dtlssetup);
+  InjectAfter(kAttributeIcePwdVideo,
+              kFingerprint, &sdp_with_dtlssetup);
+  // Now adding |setup| attribute.
+  InjectAfter(kFingerprint,
+              "a=setup:active\r\n", &sdp_with_dtlssetup);
+  EXPECT_EQ(sdp_with_dtlssetup, message);
+}
+
+TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttribute) {
+  JsepSessionDescription jdesc_with_dtlssetup(kDummyString);
+  std::string sdp_with_dtlssetup = kSdpFullString;
+  InjectAfter(kSessionTime,
+              "a=setup:actpass\r\n",
+              &sdp_with_dtlssetup);
+  EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup));
+  cricket::SessionDescription* desc = jdesc_with_dtlssetup.description();
+  const cricket::TransportInfo* atinfo =
+      desc->GetTransportInfoByName("audio_content_name");
+  EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS,
+            atinfo->description.connection_role);
+  const cricket::TransportInfo* vtinfo =
+        desc->GetTransportInfoByName("video_content_name");
+  EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS,
+            vtinfo->description.connection_role);
+}
+
+// Verifies that the order of the serialized m-lines follows the order of the
+// ContentInfo in SessionDescription, and vise versa for deserialization.
+TEST_F(WebRtcSdpTest, MediaContentOrderMaintainedRoundTrip) {
+  JsepSessionDescription jdesc(kDummyString);
+  const std::string media_content_sdps[3] = {
+    kSdpAudioString,
+    kSdpVideoString,
+    kSdpSctpDataChannelString
+  };
+  const cricket::MediaType media_types[3] = {
+    cricket::MEDIA_TYPE_AUDIO,
+    cricket::MEDIA_TYPE_VIDEO,
+    cricket::MEDIA_TYPE_DATA
+  };
+
+  // Verifies all 6 permutations.
+  for (size_t i = 0; i < 6; ++i) {
+    size_t media_content_in_sdp[3];
+    // The index of the first media content.
+    media_content_in_sdp[0] = i / 2;
+    // The index of the second media content.
+    media_content_in_sdp[1] = (media_content_in_sdp[0] + i % 2 + 1) % 3;
+    // The index of the third media content.
+    media_content_in_sdp[2] = (media_content_in_sdp[0] + (i + 1) % 2 + 1) % 3;
+
+    std::string sdp_string = kSdpSessionString;
+    for (size_t i = 0; i < 3; ++i)
+      sdp_string += media_content_sdps[media_content_in_sdp[i]];
+
+    EXPECT_TRUE(SdpDeserialize(sdp_string, &jdesc));
+    cricket::SessionDescription* desc = jdesc.description();
+    EXPECT_EQ(3u, desc->contents().size());
+
+    for (size_t i = 0; i < 3; ++i) {
+      const cricket::MediaContentDescription* mdesc =
+          static_cast<const cricket::MediaContentDescription*>(
+              desc->contents()[i].description);
+      EXPECT_EQ(media_types[media_content_in_sdp[i]], mdesc->type());
+    }
+
+    std::string serialized_sdp = webrtc::SdpSerialize(jdesc);
+    EXPECT_EQ(sdp_string, serialized_sdp);
+  }
+}
diff --git a/webrtc/api/webrtcsession.cc b/webrtc/api/webrtcsession.cc
new file mode 100644
index 0000000..15feb53
--- /dev/null
+++ b/webrtc/api/webrtcsession.cc
@@ -0,0 +1,2185 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/webrtcsession.h"
+
+#include <limits.h>
+
+#include <algorithm>
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "talk/session/media/channel.h"
+#include "talk/session/media/channelmanager.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/jsepicecandidate.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/sctputils.h"
+#include "webrtc/api/webrtcsessiondescriptionfactory.h"
+#include "webrtc/audio/audio_sink.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/call.h"
+#include "webrtc/media/base/constants.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/p2p/base/portallocator.h"
+#include "webrtc/p2p/base/transportchannel.h"
+
+using cricket::ContentInfo;
+using cricket::ContentInfos;
+using cricket::MediaContentDescription;
+using cricket::SessionDescription;
+using cricket::TransportInfo;
+
+using cricket::LOCAL_PORT_TYPE;
+using cricket::STUN_PORT_TYPE;
+using cricket::RELAY_PORT_TYPE;
+using cricket::PRFLX_PORT_TYPE;
+
+namespace webrtc {
+
+// Error messages
+const char kBundleWithoutRtcpMux[] = "RTCP-MUX must be enabled when BUNDLE "
+                                     "is enabled.";
+const char kCreateChannelFailed[] = "Failed to create channels.";
+const char kInvalidCandidates[] = "Description contains invalid candidates.";
+const char kInvalidSdp[] = "Invalid session description.";
+const char kMlineMismatch[] =
+    "Offer and answer descriptions m-lines are not matching. Rejecting answer.";
+const char kPushDownTDFailed[] =
+    "Failed to push down transport description:";
+const char kSdpWithoutDtlsFingerprint[] =
+    "Called with SDP without DTLS fingerprint.";
+const char kSdpWithoutSdesCrypto[] =
+    "Called with SDP without SDES crypto.";
+const char kSdpWithoutIceUfragPwd[] =
+    "Called with SDP without ice-ufrag and ice-pwd.";
+const char kSessionError[] = "Session error code: ";
+const char kSessionErrorDesc[] = "Session error description: ";
+const char kDtlsSetupFailureRtp[] =
+    "Couldn't set up DTLS-SRTP on RTP channel.";
+const char kDtlsSetupFailureRtcp[] =
+    "Couldn't set up DTLS-SRTP on RTCP channel.";
+const char kEnableBundleFailed[] = "Failed to enable BUNDLE.";
+
+IceCandidatePairType GetIceCandidatePairCounter(
+    const cricket::Candidate& local,
+    const cricket::Candidate& remote) {
+  const auto& l = local.type();
+  const auto& r = remote.type();
+  const auto& host = LOCAL_PORT_TYPE;
+  const auto& srflx = STUN_PORT_TYPE;
+  const auto& relay = RELAY_PORT_TYPE;
+  const auto& prflx = PRFLX_PORT_TYPE;
+  if (l == host && r == host) {
+    bool local_private = IPIsPrivate(local.address().ipaddr());
+    bool remote_private = IPIsPrivate(remote.address().ipaddr());
+    if (local_private) {
+      if (remote_private) {
+        return kIceCandidatePairHostPrivateHostPrivate;
+      } else {
+        return kIceCandidatePairHostPrivateHostPublic;
+      }
+    } else {
+      if (remote_private) {
+        return kIceCandidatePairHostPublicHostPrivate;
+      } else {
+        return kIceCandidatePairHostPublicHostPublic;
+      }
+    }
+  }
+  if (l == host && r == srflx)
+    return kIceCandidatePairHostSrflx;
+  if (l == host && r == relay)
+    return kIceCandidatePairHostRelay;
+  if (l == host && r == prflx)
+    return kIceCandidatePairHostPrflx;
+  if (l == srflx && r == host)
+    return kIceCandidatePairSrflxHost;
+  if (l == srflx && r == srflx)
+    return kIceCandidatePairSrflxSrflx;
+  if (l == srflx && r == relay)
+    return kIceCandidatePairSrflxRelay;
+  if (l == srflx && r == prflx)
+    return kIceCandidatePairSrflxPrflx;
+  if (l == relay && r == host)
+    return kIceCandidatePairRelayHost;
+  if (l == relay && r == srflx)
+    return kIceCandidatePairRelaySrflx;
+  if (l == relay && r == relay)
+    return kIceCandidatePairRelayRelay;
+  if (l == relay && r == prflx)
+    return kIceCandidatePairRelayPrflx;
+  if (l == prflx && r == host)
+    return kIceCandidatePairPrflxHost;
+  if (l == prflx && r == srflx)
+    return kIceCandidatePairPrflxSrflx;
+  if (l == prflx && r == relay)
+    return kIceCandidatePairPrflxRelay;
+  return kIceCandidatePairMax;
+}
+
+// Compares |answer| against |offer|. Comparision is done
+// for number of m-lines in answer against offer. If matches true will be
+// returned otherwise false.
+static bool VerifyMediaDescriptions(
+    const SessionDescription* answer, const SessionDescription* offer) {
+  if (offer->contents().size() != answer->contents().size())
+    return false;
+
+  for (size_t i = 0; i < offer->contents().size(); ++i) {
+    if ((offer->contents()[i].name) != answer->contents()[i].name) {
+      return false;
+    }
+    const MediaContentDescription* offer_mdesc =
+        static_cast<const MediaContentDescription*>(
+            offer->contents()[i].description);
+    const MediaContentDescription* answer_mdesc =
+        static_cast<const MediaContentDescription*>(
+            answer->contents()[i].description);
+    if (offer_mdesc->type() != answer_mdesc->type()) {
+      return false;
+    }
+  }
+  return true;
+}
+
+// Checks that each non-rejected content has SDES crypto keys or a DTLS
+// fingerprint. Mismatches, such as replying with a DTLS fingerprint to SDES
+// keys, will be caught in Transport negotiation, and backstopped by Channel's
+// |secure_required| check.
+static bool VerifyCrypto(const SessionDescription* desc,
+                         bool dtls_enabled,
+                         std::string* error) {
+  const ContentInfos& contents = desc->contents();
+  for (size_t index = 0; index < contents.size(); ++index) {
+    const ContentInfo* cinfo = &contents[index];
+    if (cinfo->rejected) {
+      continue;
+    }
+
+    // If the content isn't rejected, crypto must be present.
+    const MediaContentDescription* media =
+        static_cast<const MediaContentDescription*>(cinfo->description);
+    const TransportInfo* tinfo = desc->GetTransportInfoByName(cinfo->name);
+    if (!media || !tinfo) {
+      // Something is not right.
+      LOG(LS_ERROR) << kInvalidSdp;
+      *error = kInvalidSdp;
+      return false;
+    }
+    if (dtls_enabled) {
+      if (!tinfo->description.identity_fingerprint) {
+        LOG(LS_WARNING) <<
+            "Session description must have DTLS fingerprint if DTLS enabled.";
+        *error = kSdpWithoutDtlsFingerprint;
+        return false;
+      }
+    } else {
+      if (media->cryptos().empty()) {
+        LOG(LS_WARNING) <<
+            "Session description must have SDES when DTLS disabled.";
+        *error = kSdpWithoutSdesCrypto;
+        return false;
+      }
+    }
+  }
+
+  return true;
+}
+
+// Checks that each non-rejected content has ice-ufrag and ice-pwd set.
+static bool VerifyIceUfragPwdPresent(const SessionDescription* desc) {
+  const ContentInfos& contents = desc->contents();
+  for (size_t index = 0; index < contents.size(); ++index) {
+    const ContentInfo* cinfo = &contents[index];
+    if (cinfo->rejected) {
+      continue;
+    }
+
+    // If the content isn't rejected, ice-ufrag and ice-pwd must be present.
+    const TransportInfo* tinfo = desc->GetTransportInfoByName(cinfo->name);
+    if (!tinfo) {
+      // Something is not right.
+      LOG(LS_ERROR) << kInvalidSdp;
+      return false;
+    }
+    if (tinfo->description.ice_ufrag.empty() ||
+        tinfo->description.ice_pwd.empty()) {
+      LOG(LS_ERROR) << "Session description must have ice ufrag and pwd.";
+      return false;
+    }
+  }
+  return true;
+}
+
+// Forces |sdesc->crypto_required| to the appropriate state based on the
+// current security policy, to ensure a failure occurs if there is an error
+// in crypto negotiation.
+// Called when processing the local session description.
+static void UpdateSessionDescriptionSecurePolicy(cricket::CryptoType type,
+                                                 SessionDescription* sdesc) {
+  if (!sdesc) {
+    return;
+  }
+
+  // Updating the |crypto_required_| in MediaContentDescription to the
+  // appropriate state based on the current security policy.
+  for (cricket::ContentInfos::iterator iter = sdesc->contents().begin();
+       iter != sdesc->contents().end(); ++iter) {
+    if (cricket::IsMediaContent(&*iter)) {
+      MediaContentDescription* mdesc =
+          static_cast<MediaContentDescription*> (iter->description);
+      if (mdesc) {
+        mdesc->set_crypto_required(type);
+      }
+    }
+  }
+}
+
+static bool GetAudioSsrcByTrackId(const SessionDescription* session_description,
+                                  const std::string& track_id,
+                                  uint32_t* ssrc) {
+  const cricket::ContentInfo* audio_info =
+      cricket::GetFirstAudioContent(session_description);
+  if (!audio_info) {
+    LOG(LS_ERROR) << "Audio not used in this call";
+    return false;
+  }
+
+  const cricket::MediaContentDescription* audio_content =
+      static_cast<const cricket::MediaContentDescription*>(
+          audio_info->description);
+  const cricket::StreamParams* stream =
+      cricket::GetStreamByIds(audio_content->streams(), "", track_id);
+  if (!stream) {
+    return false;
+  }
+
+  *ssrc = stream->first_ssrc();
+  return true;
+}
+
+static bool GetTrackIdBySsrc(const SessionDescription* session_description,
+                             uint32_t ssrc,
+                             std::string* track_id) {
+  ASSERT(track_id != NULL);
+
+  const cricket::ContentInfo* audio_info =
+      cricket::GetFirstAudioContent(session_description);
+  if (audio_info) {
+    const cricket::MediaContentDescription* audio_content =
+        static_cast<const cricket::MediaContentDescription*>(
+            audio_info->description);
+
+    const auto* found =
+        cricket::GetStreamBySsrc(audio_content->streams(), ssrc);
+    if (found) {
+      *track_id = found->id;
+      return true;
+    }
+  }
+
+  const cricket::ContentInfo* video_info =
+      cricket::GetFirstVideoContent(session_description);
+  if (video_info) {
+    const cricket::MediaContentDescription* video_content =
+        static_cast<const cricket::MediaContentDescription*>(
+            video_info->description);
+
+    const auto* found =
+        cricket::GetStreamBySsrc(video_content->streams(), ssrc);
+    if (found) {
+      *track_id = found->id;
+      return true;
+    }
+  }
+  return false;
+}
+
+static bool BadSdp(const std::string& source,
+                   const std::string& type,
+                   const std::string& reason,
+                   std::string* err_desc) {
+  std::ostringstream desc;
+  desc << "Failed to set " << source;
+  if (!type.empty()) {
+    desc << " " << type;
+  }
+  desc << " sdp: " << reason;
+
+  if (err_desc) {
+    *err_desc = desc.str();
+  }
+  LOG(LS_ERROR) << desc.str();
+  return false;
+}
+
+static bool BadSdp(cricket::ContentSource source,
+                   const std::string& type,
+                   const std::string& reason,
+                   std::string* err_desc) {
+  if (source == cricket::CS_LOCAL) {
+    return BadSdp("local", type, reason, err_desc);
+  } else {
+    return BadSdp("remote", type, reason, err_desc);
+  }
+}
+
+static bool BadLocalSdp(const std::string& type,
+                        const std::string& reason,
+                        std::string* err_desc) {
+  return BadSdp(cricket::CS_LOCAL, type, reason, err_desc);
+}
+
+static bool BadRemoteSdp(const std::string& type,
+                         const std::string& reason,
+                         std::string* err_desc) {
+  return BadSdp(cricket::CS_REMOTE, type, reason, err_desc);
+}
+
+static bool BadOfferSdp(cricket::ContentSource source,
+                        const std::string& reason,
+                        std::string* err_desc) {
+  return BadSdp(source, SessionDescriptionInterface::kOffer, reason, err_desc);
+}
+
+static bool BadPranswerSdp(cricket::ContentSource source,
+                           const std::string& reason,
+                           std::string* err_desc) {
+  return BadSdp(source, SessionDescriptionInterface::kPrAnswer,
+                reason, err_desc);
+}
+
+static bool BadAnswerSdp(cricket::ContentSource source,
+                         const std::string& reason,
+                         std::string* err_desc) {
+  return BadSdp(source, SessionDescriptionInterface::kAnswer, reason, err_desc);
+}
+
+#define GET_STRING_OF_STATE(state)   \
+  case webrtc::WebRtcSession::state: \
+    result = #state;                 \
+    break;
+
+static std::string GetStateString(webrtc::WebRtcSession::State state) {
+  std::string result;
+  switch (state) {
+    GET_STRING_OF_STATE(STATE_INIT)
+    GET_STRING_OF_STATE(STATE_SENTOFFER)
+    GET_STRING_OF_STATE(STATE_RECEIVEDOFFER)
+    GET_STRING_OF_STATE(STATE_SENTPRANSWER)
+    GET_STRING_OF_STATE(STATE_RECEIVEDPRANSWER)
+    GET_STRING_OF_STATE(STATE_INPROGRESS)
+    GET_STRING_OF_STATE(STATE_CLOSED)
+    default:
+      ASSERT(false);
+      break;
+  }
+  return result;
+}
+
+#define GET_STRING_OF_ERROR_CODE(err) \
+  case webrtc::WebRtcSession::err:    \
+    result = #err;                    \
+    break;
+
+static std::string GetErrorCodeString(webrtc::WebRtcSession::Error err) {
+  std::string result;
+  switch (err) {
+    GET_STRING_OF_ERROR_CODE(ERROR_NONE)
+    GET_STRING_OF_ERROR_CODE(ERROR_CONTENT)
+    GET_STRING_OF_ERROR_CODE(ERROR_TRANSPORT)
+    default:
+      RTC_DCHECK(false);
+      break;
+  }
+  return result;
+}
+
+static std::string MakeErrorString(const std::string& error,
+                                   const std::string& desc) {
+  std::ostringstream ret;
+  ret << error << " " << desc;
+  return ret.str();
+}
+
+static std::string MakeTdErrorString(const std::string& desc) {
+  return MakeErrorString(kPushDownTDFailed, desc);
+}
+
+// Set |option| to the highest-priority value of |key| in the optional
+// constraints if the key is found and has a valid value.
+template <typename T>
+static void SetOptionFromOptionalConstraint(
+    const MediaConstraintsInterface* constraints,
+    const std::string& key,
+    rtc::Optional<T>* option) {
+  if (!constraints) {
+    return;
+  }
+  std::string string_value;
+  T value;
+  if (constraints->GetOptional().FindFirst(key, &string_value)) {
+    if (rtc::FromString(string_value, &value)) {
+      *option = rtc::Optional<T>(value);
+    }
+  }
+}
+
+uint32_t ConvertIceTransportTypeToCandidateFilter(
+    PeerConnectionInterface::IceTransportsType type) {
+  switch (type) {
+    case PeerConnectionInterface::kNone:
+        return cricket::CF_NONE;
+    case PeerConnectionInterface::kRelay:
+        return cricket::CF_RELAY;
+    case PeerConnectionInterface::kNoHost:
+        return (cricket::CF_ALL & ~cricket::CF_HOST);
+    case PeerConnectionInterface::kAll:
+        return cricket::CF_ALL;
+    default: ASSERT(false);
+  }
+  return cricket::CF_NONE;
+}
+
+// Help class used to remember if a a remote peer has requested ice restart by
+// by sending a description with new ice ufrag and password.
+class IceRestartAnswerLatch {
+ public:
+  IceRestartAnswerLatch() : ice_restart_(false) { }
+
+  // Returns true if CheckForRemoteIceRestart has been called with a new session
+  // description where ice password and ufrag has changed since last time
+  // Reset() was called.
+  bool Get() const {
+    return ice_restart_;
+  }
+
+  void Reset() {
+    if (ice_restart_) {
+      ice_restart_ = false;
+    }
+  }
+
+  // This method has two purposes: 1. Return whether |new_desc| requests
+  // an ICE restart (i.e., new ufrag/pwd). 2. If it requests an ICE restart
+  // and it is an OFFER, remember this in |ice_restart_| so that the next
+  // Local Answer will be created with new ufrag and pwd.
+  bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
+                                const SessionDescriptionInterface* new_desc) {
+    if (!old_desc) {
+      return false;
+    }
+    const SessionDescription* new_sd = new_desc->description();
+    const SessionDescription* old_sd = old_desc->description();
+    const ContentInfos& contents = new_sd->contents();
+    for (size_t index = 0; index < contents.size(); ++index) {
+      const ContentInfo* cinfo = &contents[index];
+      if (cinfo->rejected) {
+        continue;
+      }
+      // If the content isn't rejected, check if ufrag and password has
+      // changed.
+      const cricket::TransportDescription* new_transport_desc =
+          new_sd->GetTransportDescriptionByName(cinfo->name);
+      const cricket::TransportDescription* old_transport_desc =
+          old_sd->GetTransportDescriptionByName(cinfo->name);
+      if (!new_transport_desc || !old_transport_desc) {
+        // No transport description exist. This is not an ice restart.
+        continue;
+      }
+      if (cricket::IceCredentialsChanged(old_transport_desc->ice_ufrag,
+                                         old_transport_desc->ice_pwd,
+                                         new_transport_desc->ice_ufrag,
+                                         new_transport_desc->ice_pwd)) {
+        LOG(LS_INFO) << "Remote peer request ice restart.";
+        if (new_desc->type() == SessionDescriptionInterface::kOffer) {
+          ice_restart_ = true;
+        }
+        return true;
+      }
+    }
+    return false;
+  }
+
+ private:
+  bool ice_restart_;
+};
+
+WebRtcSession::WebRtcSession(webrtc::MediaControllerInterface* media_controller,
+                             rtc::Thread* signaling_thread,
+                             rtc::Thread* worker_thread,
+                             cricket::PortAllocator* port_allocator)
+    : signaling_thread_(signaling_thread),
+      worker_thread_(worker_thread),
+      port_allocator_(port_allocator),
+      // RFC 3264: The numeric value of the session id and version in the
+      // o line MUST be representable with a "64 bit signed integer".
+      // Due to this constraint session id |sid_| is max limited to LLONG_MAX.
+      sid_(rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX)),
+      transport_controller_(new cricket::TransportController(signaling_thread,
+                                                             worker_thread,
+                                                             port_allocator)),
+      media_controller_(media_controller),
+      channel_manager_(media_controller_->channel_manager()),
+      ice_observer_(NULL),
+      ice_connection_state_(PeerConnectionInterface::kIceConnectionNew),
+      ice_connection_receiving_(true),
+      older_version_remote_peer_(false),
+      dtls_enabled_(false),
+      data_channel_type_(cricket::DCT_NONE),
+      ice_restart_latch_(new IceRestartAnswerLatch),
+      metrics_observer_(NULL) {
+  transport_controller_->SetIceRole(cricket::ICEROLE_CONTROLLED);
+  transport_controller_->SignalConnectionState.connect(
+      this, &WebRtcSession::OnTransportControllerConnectionState);
+  transport_controller_->SignalReceiving.connect(
+      this, &WebRtcSession::OnTransportControllerReceiving);
+  transport_controller_->SignalGatheringState.connect(
+      this, &WebRtcSession::OnTransportControllerGatheringState);
+  transport_controller_->SignalCandidatesGathered.connect(
+      this, &WebRtcSession::OnTransportControllerCandidatesGathered);
+}
+
+WebRtcSession::~WebRtcSession() {
+  ASSERT(signaling_thread()->IsCurrent());
+  // Destroy video_channel_ first since it may have a pointer to the
+  // voice_channel_.
+  if (video_channel_) {
+    SignalVideoChannelDestroyed();
+    channel_manager_->DestroyVideoChannel(video_channel_.release());
+  }
+  if (voice_channel_) {
+    SignalVoiceChannelDestroyed();
+    channel_manager_->DestroyVoiceChannel(voice_channel_.release());
+  }
+  if (data_channel_) {
+    SignalDataChannelDestroyed();
+    channel_manager_->DestroyDataChannel(data_channel_.release());
+  }
+  SignalDestroyed();
+
+  LOG(LS_INFO) << "Session: " << id() << " is destroyed.";
+}
+
+bool WebRtcSession::Initialize(
+    const PeerConnectionFactoryInterface::Options& options,
+    const MediaConstraintsInterface* constraints,
+    rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+    const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
+  bundle_policy_ = rtc_configuration.bundle_policy;
+  rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
+  video_options_.disable_prerenderer_smoothing =
+      rtc::Optional<bool>(rtc_configuration.disable_prerenderer_smoothing);
+  transport_controller_->SetSslMaxProtocolVersion(options.ssl_max_version);
+
+  // Obtain a certificate from RTCConfiguration if any were provided (optional).
+  rtc::scoped_refptr<rtc::RTCCertificate> certificate;
+  if (!rtc_configuration.certificates.empty()) {
+    // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of
+    // just picking the first one. The decision should be made based on the DTLS
+    // handshake. The DTLS negotiations need to know about all certificates.
+    certificate = rtc_configuration.certificates[0];
+  }
+
+  SetIceConfig(ParseIceConfig(rtc_configuration));
+
+  // TODO(perkj): Take |constraints| into consideration. Return false if not all
+  // mandatory constraints can be fulfilled. Note that |constraints|
+  // can be null.
+  bool value;
+
+  if (options.disable_encryption) {
+    dtls_enabled_ = false;
+  } else {
+    // Enable DTLS by default if we have an identity store or a certificate.
+    dtls_enabled_ = (dtls_identity_store || certificate);
+    // |constraints| can override the default |dtls_enabled_| value.
+    if (FindConstraint(constraints, MediaConstraintsInterface::kEnableDtlsSrtp,
+                       &value, nullptr)) {
+      dtls_enabled_ = value;
+    }
+  }
+
+  // Enable creation of RTP data channels if the kEnableRtpDataChannels is set.
+  // It takes precendence over the disable_sctp_data_channels
+  // PeerConnectionFactoryInterface::Options.
+  if (FindConstraint(
+      constraints, MediaConstraintsInterface::kEnableRtpDataChannels,
+      &value, NULL) && value) {
+    LOG(LS_INFO) << "Allowing RTP data engine.";
+    data_channel_type_ = cricket::DCT_RTP;
+  } else {
+    // DTLS has to be enabled to use SCTP.
+    if (!options.disable_sctp_data_channels && dtls_enabled_) {
+      LOG(LS_INFO) << "Allowing SCTP data engine.";
+      data_channel_type_ = cricket::DCT_SCTP;
+    }
+  }
+
+  // Find DSCP constraint.
+  if (FindConstraint(
+        constraints,
+        MediaConstraintsInterface::kEnableDscp,
+        &value, NULL)) {
+    audio_options_.dscp = rtc::Optional<bool>(value);
+    video_options_.dscp = rtc::Optional<bool>(value);
+  }
+
+  // Find Suspend Below Min Bitrate constraint.
+  if (FindConstraint(
+          constraints,
+          MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
+          &value,
+          NULL)) {
+    video_options_.suspend_below_min_bitrate = rtc::Optional<bool>(value);
+  }
+
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kScreencastMinBitrate,
+      &video_options_.screencast_min_bitrate_kbps);
+
+  // Find constraints for cpu overuse detection.
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kCpuOveruseDetection,
+      &video_options_.cpu_overuse_detection);
+
+  SetOptionFromOptionalConstraint(constraints,
+      MediaConstraintsInterface::kCombinedAudioVideoBwe,
+      &audio_options_.combined_audio_video_bwe);
+
+  audio_options_.audio_jitter_buffer_max_packets =
+      rtc::Optional<int>(rtc_configuration.audio_jitter_buffer_max_packets);
+
+  audio_options_.audio_jitter_buffer_fast_accelerate = rtc::Optional<bool>(
+      rtc_configuration.audio_jitter_buffer_fast_accelerate);
+
+  if (!dtls_enabled_) {
+    // Construct with DTLS disabled.
+    webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
+        signaling_thread(), channel_manager_, this, id()));
+  } else {
+    // Construct with DTLS enabled.
+    if (!certificate) {
+      // Use the |dtls_identity_store| to generate a certificate.
+      RTC_DCHECK(dtls_identity_store);
+      webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
+          signaling_thread(), channel_manager_, std::move(dtls_identity_store),
+          this, id()));
+    } else {
+      // Use the already generated certificate.
+      webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
+          signaling_thread(), channel_manager_, certificate, this, id()));
+    }
+  }
+
+  webrtc_session_desc_factory_->SignalCertificateReady.connect(
+      this, &WebRtcSession::OnCertificateReady);
+
+  if (options.disable_encryption) {
+    webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED);
+  }
+  port_allocator()->set_candidate_filter(
+      ConvertIceTransportTypeToCandidateFilter(rtc_configuration.type));
+
+  return true;
+}
+
+void WebRtcSession::Close() {
+  SetState(STATE_CLOSED);
+  RemoveUnusedChannels(nullptr);
+  ASSERT(!voice_channel_);
+  ASSERT(!video_channel_);
+  ASSERT(!data_channel_);
+}
+
+void WebRtcSession::SetSdesPolicy(cricket::SecurePolicy secure_policy) {
+  webrtc_session_desc_factory_->SetSdesPolicy(secure_policy);
+}
+
+cricket::SecurePolicy WebRtcSession::SdesPolicy() const {
+  return webrtc_session_desc_factory_->SdesPolicy();
+}
+
+bool WebRtcSession::GetSslRole(const std::string& transport_name,
+                               rtc::SSLRole* role) {
+  if (!local_desc_ || !remote_desc_) {
+    LOG(LS_INFO) << "Local and Remote descriptions must be applied to get "
+                 << "SSL Role of the session.";
+    return false;
+  }
+
+  return transport_controller_->GetSslRole(transport_name, role);
+}
+
+bool WebRtcSession::GetSslRole(const cricket::BaseChannel* channel,
+                               rtc::SSLRole* role) {
+  return channel && GetSslRole(channel->transport_name(), role);
+}
+
+void WebRtcSession::CreateOffer(
+    CreateSessionDescriptionObserver* observer,
+    const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+    const cricket::MediaSessionOptions& session_options) {
+  webrtc_session_desc_factory_->CreateOffer(observer, options, session_options);
+}
+
+void WebRtcSession::CreateAnswer(
+    CreateSessionDescriptionObserver* observer,
+    const MediaConstraintsInterface* constraints,
+    const cricket::MediaSessionOptions& session_options) {
+  webrtc_session_desc_factory_->CreateAnswer(observer, constraints,
+                                             session_options);
+}
+
+bool WebRtcSession::SetLocalDescription(SessionDescriptionInterface* desc,
+                                        std::string* err_desc) {
+  ASSERT(signaling_thread()->IsCurrent());
+
+  // Takes the ownership of |desc| regardless of the result.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_temp(desc);
+
+  // Validate SDP.
+  if (!ValidateSessionDescription(desc, cricket::CS_LOCAL, err_desc)) {
+    return false;
+  }
+
+  // Update the initial_offerer flag if this session is the initial_offerer.
+  Action action = GetAction(desc->type());
+  if (state() == STATE_INIT && action == kOffer) {
+    initial_offerer_ = true;
+    transport_controller_->SetIceRole(cricket::ICEROLE_CONTROLLING);
+  }
+
+  cricket::SecurePolicy sdes_policy =
+      webrtc_session_desc_factory_->SdesPolicy();
+  cricket::CryptoType crypto_required = dtls_enabled_ ?
+      cricket::CT_DTLS : (sdes_policy == cricket::SEC_REQUIRED ?
+          cricket::CT_SDES : cricket::CT_NONE);
+  // Update the MediaContentDescription crypto settings as per the policy set.
+  UpdateSessionDescriptionSecurePolicy(crypto_required, desc->description());
+
+  local_desc_.reset(desc_temp.release());
+
+  // Transport and Media channels will be created only when offer is set.
+  if (action == kOffer && !CreateChannels(local_desc_->description())) {
+    // TODO(mallinath) - Handle CreateChannel failure, as new local description
+    // is applied. Restore back to old description.
+    return BadLocalSdp(desc->type(), kCreateChannelFailed, err_desc);
+  }
+
+  // Remove unused channels if MediaContentDescription is rejected.
+  RemoveUnusedChannels(local_desc_->description());
+
+  if (!UpdateSessionState(action, cricket::CS_LOCAL, err_desc)) {
+    return false;
+  }
+
+  if (remote_desc_) {
+    // Now that we have a local description, we can push down remote candidates.
+    UseCandidatesInSessionDescription(remote_desc_.get());
+  }
+
+  if (error() != ERROR_NONE) {
+    return BadLocalSdp(desc->type(), GetSessionErrorMsg(), err_desc);
+  }
+  return true;
+}
+
+bool WebRtcSession::SetRemoteDescription(SessionDescriptionInterface* desc,
+                                         std::string* err_desc) {
+  ASSERT(signaling_thread()->IsCurrent());
+
+  // Takes the ownership of |desc| regardless of the result.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_temp(desc);
+
+  // Validate SDP.
+  if (!ValidateSessionDescription(desc, cricket::CS_REMOTE, err_desc)) {
+    return false;
+  }
+
+  rtc::scoped_ptr<SessionDescriptionInterface> old_remote_desc(
+      remote_desc_.release());
+  remote_desc_.reset(desc_temp.release());
+
+  // Transport and Media channels will be created only when offer is set.
+  Action action = GetAction(desc->type());
+  if (action == kOffer && !CreateChannels(desc->description())) {
+    // TODO(mallinath) - Handle CreateChannel failure, as new local description
+    // is applied. Restore back to old description.
+    return BadRemoteSdp(desc->type(), kCreateChannelFailed, err_desc);
+  }
+
+  // Remove unused channels if MediaContentDescription is rejected.
+  RemoveUnusedChannels(desc->description());
+
+  // NOTE: Candidates allocation will be initiated only when SetLocalDescription
+  // is called.
+  if (!UpdateSessionState(action, cricket::CS_REMOTE, err_desc)) {
+    return false;
+  }
+
+  if (local_desc_ && !UseCandidatesInSessionDescription(desc)) {
+    return BadRemoteSdp(desc->type(), kInvalidCandidates, err_desc);
+  }
+
+  // Check if this new SessionDescription contains new ice ufrag and password
+  // that indicates the remote peer requests ice restart.
+  bool ice_restart =
+      ice_restart_latch_->CheckForRemoteIceRestart(old_remote_desc.get(), desc);
+  // We retain all received candidates only if ICE is not restarted.
+  // When ICE is restarted, all previous candidates belong to an old generation
+  // and should not be kept.
+  // TODO(deadbeef): This goes against the W3C spec which says the remote
+  // description should only contain candidates from the last set remote
+  // description plus any candidates added since then. We should remove this
+  // once we're sure it won't break anything.
+  if (!ice_restart) {
+    WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
+        old_remote_desc.get(), desc);
+  }
+
+  if (error() != ERROR_NONE) {
+    return BadRemoteSdp(desc->type(), GetSessionErrorMsg(), err_desc);
+  }
+
+  // Set the the ICE connection state to connecting since the connection may
+  // become writable with peer reflexive candidates before any remote candidate
+  // is signaled.
+  // TODO(pthatcher): This is a short-term solution for crbug/446908. A real fix
+  // is to have a new signal the indicates a change in checking state from the
+  // transport and expose a new checking() member from transport that can be
+  // read to determine the current checking state. The existing SignalConnecting
+  // actually means "gathering candidates", so cannot be be used here.
+  if (desc->type() != SessionDescriptionInterface::kOffer &&
+      ice_connection_state_ == PeerConnectionInterface::kIceConnectionNew) {
+    SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking);
+  }
+  return true;
+}
+
+void WebRtcSession::LogState(State old_state, State new_state) {
+  LOG(LS_INFO) << "Session:" << id()
+               << " Old state:" << GetStateString(old_state)
+               << " New state:" << GetStateString(new_state);
+}
+
+void WebRtcSession::SetState(State state) {
+  ASSERT(signaling_thread_->IsCurrent());
+  if (state != state_) {
+    LogState(state_, state);
+    state_ = state;
+    SignalState(this, state_);
+  }
+}
+
+void WebRtcSession::SetError(Error error, const std::string& error_desc) {
+  ASSERT(signaling_thread_->IsCurrent());
+  if (error != error_) {
+    error_ = error;
+    error_desc_ = error_desc;
+  }
+}
+
+bool WebRtcSession::UpdateSessionState(
+    Action action, cricket::ContentSource source,
+    std::string* err_desc) {
+  ASSERT(signaling_thread()->IsCurrent());
+
+  // If there's already a pending error then no state transition should happen.
+  // But all call-sites should be verifying this before calling us!
+  ASSERT(error() == ERROR_NONE);
+  std::string td_err;
+  if (action == kOffer) {
+    if (!PushdownTransportDescription(source, cricket::CA_OFFER, &td_err)) {
+      return BadOfferSdp(source, MakeTdErrorString(td_err), err_desc);
+    }
+    SetState(source == cricket::CS_LOCAL ? STATE_SENTOFFER
+                                         : STATE_RECEIVEDOFFER);
+    if (!PushdownMediaDescription(cricket::CA_OFFER, source, err_desc)) {
+      SetError(ERROR_CONTENT, *err_desc);
+    }
+    if (error() != ERROR_NONE) {
+      return BadOfferSdp(source, GetSessionErrorMsg(), err_desc);
+    }
+  } else if (action == kPrAnswer) {
+    if (!PushdownTransportDescription(source, cricket::CA_PRANSWER, &td_err)) {
+      return BadPranswerSdp(source, MakeTdErrorString(td_err), err_desc);
+    }
+    EnableChannels();
+    SetState(source == cricket::CS_LOCAL ? STATE_SENTPRANSWER
+                                         : STATE_RECEIVEDPRANSWER);
+    if (!PushdownMediaDescription(cricket::CA_PRANSWER, source, err_desc)) {
+      SetError(ERROR_CONTENT, *err_desc);
+    }
+    if (error() != ERROR_NONE) {
+      return BadPranswerSdp(source, GetSessionErrorMsg(), err_desc);
+    }
+  } else if (action == kAnswer) {
+    const cricket::ContentGroup* local_bundle =
+        local_desc_->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+    const cricket::ContentGroup* remote_bundle =
+        remote_desc_->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+    if (local_bundle && remote_bundle) {
+      // The answerer decides the transport to bundle on.
+      const cricket::ContentGroup* answer_bundle =
+          (source == cricket::CS_LOCAL ? local_bundle : remote_bundle);
+      if (!EnableBundle(*answer_bundle)) {
+        LOG(LS_WARNING) << "Failed to enable BUNDLE.";
+        return BadAnswerSdp(source, kEnableBundleFailed, err_desc);
+      }
+    }
+    // Only push down the transport description after enabling BUNDLE; we don't
+    // want to push down a description on a transport about to be destroyed.
+    if (!PushdownTransportDescription(source, cricket::CA_ANSWER, &td_err)) {
+      return BadAnswerSdp(source, MakeTdErrorString(td_err), err_desc);
+    }
+    EnableChannels();
+    SetState(STATE_INPROGRESS);
+    if (!PushdownMediaDescription(cricket::CA_ANSWER, source, err_desc)) {
+      SetError(ERROR_CONTENT, *err_desc);
+    }
+    if (error() != ERROR_NONE) {
+      return BadAnswerSdp(source, GetSessionErrorMsg(), err_desc);
+    }
+  }
+  return true;
+}
+
+WebRtcSession::Action WebRtcSession::GetAction(const std::string& type) {
+  if (type == SessionDescriptionInterface::kOffer) {
+    return WebRtcSession::kOffer;
+  } else if (type == SessionDescriptionInterface::kPrAnswer) {
+    return WebRtcSession::kPrAnswer;
+  } else if (type == SessionDescriptionInterface::kAnswer) {
+    return WebRtcSession::kAnswer;
+  }
+  ASSERT(false && "unknown action type");
+  return WebRtcSession::kOffer;
+}
+
+bool WebRtcSession::PushdownMediaDescription(
+    cricket::ContentAction action,
+    cricket::ContentSource source,
+    std::string* err) {
+  auto set_content = [this, action, source, err](cricket::BaseChannel* ch) {
+    if (!ch) {
+      return true;
+    } else if (source == cricket::CS_LOCAL) {
+      return ch->PushdownLocalDescription(local_desc_->description(), action,
+                                          err);
+    } else {
+      return ch->PushdownRemoteDescription(remote_desc_->description(), action,
+                                           err);
+    }
+  };
+
+  return (set_content(voice_channel()) &&
+          set_content(video_channel()) &&
+          set_content(data_channel()));
+}
+
+bool WebRtcSession::PushdownTransportDescription(cricket::ContentSource source,
+                                                 cricket::ContentAction action,
+                                                 std::string* error_desc) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+
+  if (source == cricket::CS_LOCAL) {
+    return PushdownLocalTransportDescription(local_desc_->description(), action,
+                                             error_desc);
+  }
+  return PushdownRemoteTransportDescription(remote_desc_->description(), action,
+                                            error_desc);
+}
+
+bool WebRtcSession::PushdownLocalTransportDescription(
+    const SessionDescription* sdesc,
+    cricket::ContentAction action,
+    std::string* err) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+
+  if (!sdesc) {
+    return false;
+  }
+
+  for (const TransportInfo& tinfo : sdesc->transport_infos()) {
+    if (!transport_controller_->SetLocalTransportDescription(
+            tinfo.content_name, tinfo.description, action, err)) {
+      return false;
+    }
+  }
+
+  return true;
+}
+
+bool WebRtcSession::PushdownRemoteTransportDescription(
+    const SessionDescription* sdesc,
+    cricket::ContentAction action,
+    std::string* err) {
+  RTC_DCHECK(signaling_thread()->IsCurrent());
+
+  if (!sdesc) {
+    return false;
+  }
+
+  for (const TransportInfo& tinfo : sdesc->transport_infos()) {
+    if (!transport_controller_->SetRemoteTransportDescription(
+            tinfo.content_name, tinfo.description, action, err)) {
+      return false;
+    }
+  }
+
+  return true;
+}
+
+bool WebRtcSession::GetTransportDescription(
+    const SessionDescription* description,
+    const std::string& content_name,
+    cricket::TransportDescription* tdesc) {
+  if (!description || !tdesc) {
+    return false;
+  }
+  const TransportInfo* transport_info =
+      description->GetTransportInfoByName(content_name);
+  if (!transport_info) {
+    return false;
+  }
+  *tdesc = transport_info->description;
+  return true;
+}
+
+bool WebRtcSession::GetTransportStats(SessionStats* stats) {
+  ASSERT(signaling_thread()->IsCurrent());
+  return (GetChannelTransportStats(voice_channel(), stats) &&
+          GetChannelTransportStats(video_channel(), stats) &&
+          GetChannelTransportStats(data_channel(), stats));
+}
+
+bool WebRtcSession::GetChannelTransportStats(cricket::BaseChannel* ch,
+                                             SessionStats* stats) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!ch) {
+    // Not using this channel.
+    return true;
+  }
+
+  const std::string& content_name = ch->content_name();
+  const std::string& transport_name = ch->transport_name();
+  stats->proxy_to_transport[content_name] = transport_name;
+  if (stats->transport_stats.find(transport_name) !=
+      stats->transport_stats.end()) {
+    // Transport stats already done for this transport.
+    return true;
+  }
+
+  cricket::TransportStats tstats;
+  if (!transport_controller_->GetStats(transport_name, &tstats)) {
+    return false;
+  }
+
+  stats->transport_stats[transport_name] = tstats;
+  return true;
+}
+
+bool WebRtcSession::GetLocalCertificate(
+    const std::string& transport_name,
+    rtc::scoped_refptr<rtc::RTCCertificate>* certificate) {
+  ASSERT(signaling_thread()->IsCurrent());
+  return transport_controller_->GetLocalCertificate(transport_name,
+                                                    certificate);
+}
+
+bool WebRtcSession::GetRemoteSSLCertificate(const std::string& transport_name,
+                                            rtc::SSLCertificate** cert) {
+  ASSERT(signaling_thread()->IsCurrent());
+  return transport_controller_->GetRemoteSSLCertificate(transport_name, cert);
+}
+
+cricket::BaseChannel* WebRtcSession::GetChannel(
+    const std::string& content_name) {
+  if (voice_channel() && voice_channel()->content_name() == content_name) {
+    return voice_channel();
+  }
+  if (video_channel() && video_channel()->content_name() == content_name) {
+    return video_channel();
+  }
+  if (data_channel() && data_channel()->content_name() == content_name) {
+    return data_channel();
+  }
+  return nullptr;
+}
+
+bool WebRtcSession::EnableBundle(const cricket::ContentGroup& bundle) {
+  const std::string* first_content_name = bundle.FirstContentName();
+  if (!first_content_name) {
+    LOG(LS_WARNING) << "Tried to BUNDLE with no contents.";
+    return false;
+  }
+  const std::string& transport_name = *first_content_name;
+  cricket::BaseChannel* first_channel = GetChannel(transport_name);
+
+  auto maybe_set_transport = [this, bundle, transport_name,
+                              first_channel](cricket::BaseChannel* ch) {
+    if (!ch || !bundle.HasContentName(ch->content_name())) {
+      return true;
+    }
+
+    if (ch->transport_name() == transport_name) {
+      LOG(LS_INFO) << "BUNDLE already enabled for " << ch->content_name()
+                   << " on " << transport_name << ".";
+      return true;
+    }
+
+    if (!ch->SetTransport(transport_name)) {
+      LOG(LS_WARNING) << "Failed to enable BUNDLE for " << ch->content_name();
+      return false;
+    }
+    LOG(LS_INFO) << "Enabled BUNDLE for " << ch->content_name() << " on "
+                 << transport_name << ".";
+    return true;
+  };
+
+  if (!maybe_set_transport(voice_channel()) ||
+      !maybe_set_transport(video_channel()) ||
+      !maybe_set_transport(data_channel())) {
+    return false;
+  }
+
+  return true;
+}
+
+bool WebRtcSession::ProcessIceMessage(const IceCandidateInterface* candidate) {
+  if (!remote_desc_) {
+    LOG(LS_ERROR) << "ProcessIceMessage: ICE candidates can't be added "
+                  << "without any remote session description.";
+     return false;
+  }
+
+  if (!candidate) {
+    LOG(LS_ERROR) << "ProcessIceMessage: Candidate is NULL.";
+    return false;
+  }
+
+  bool valid = false;
+  bool ready = ReadyToUseRemoteCandidate(candidate, NULL, &valid);
+  if (!valid) {
+    return false;
+  }
+
+  // Add this candidate to the remote session description.
+  if (!remote_desc_->AddCandidate(candidate)) {
+    LOG(LS_ERROR) << "ProcessIceMessage: Candidate cannot be used.";
+    return false;
+  }
+
+  if (ready) {
+    return UseCandidate(candidate);
+  } else {
+    LOG(LS_INFO) << "ProcessIceMessage: Not ready to use candidate.";
+    return true;
+  }
+}
+
+bool WebRtcSession::SetIceTransports(
+    PeerConnectionInterface::IceTransportsType type) {
+  return port_allocator()->set_candidate_filter(
+        ConvertIceTransportTypeToCandidateFilter(type));
+}
+
+cricket::IceConfig WebRtcSession::ParseIceConfig(
+    const PeerConnectionInterface::RTCConfiguration& config) const {
+  cricket::IceConfig ice_config;
+  ice_config.receiving_timeout_ms = config.ice_connection_receiving_timeout;
+  ice_config.backup_connection_ping_interval =
+      config.ice_backup_candidate_pair_ping_interval;
+  ice_config.gather_continually = (config.continual_gathering_policy ==
+                                   PeerConnectionInterface::GATHER_CONTINUALLY);
+  return ice_config;
+}
+
+void WebRtcSession::SetIceConfig(const cricket::IceConfig& config) {
+  transport_controller_->SetIceConfig(config);
+}
+
+void WebRtcSession::MaybeStartGathering() {
+  transport_controller_->MaybeStartGathering();
+}
+
+bool WebRtcSession::GetLocalTrackIdBySsrc(uint32_t ssrc,
+                                          std::string* track_id) {
+  if (!local_desc_) {
+    return false;
+  }
+  return webrtc::GetTrackIdBySsrc(local_desc_->description(), ssrc, track_id);
+}
+
+bool WebRtcSession::GetRemoteTrackIdBySsrc(uint32_t ssrc,
+                                           std::string* track_id) {
+  if (!remote_desc_) {
+    return false;
+  }
+  return webrtc::GetTrackIdBySsrc(remote_desc_->description(), ssrc, track_id);
+}
+
+std::string WebRtcSession::BadStateErrMsg(State state) {
+  std::ostringstream desc;
+  desc << "Called in wrong state: " << GetStateString(state);
+  return desc.str();
+}
+
+void WebRtcSession::SetAudioPlayout(uint32_t ssrc, bool enable) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!voice_channel_) {
+    LOG(LS_ERROR) << "SetAudioPlayout: No audio channel exists.";
+    return;
+  }
+  if (!voice_channel_->SetOutputVolume(ssrc, enable ? 1 : 0)) {
+    // Allow that SetOutputVolume fail if |enable| is false but assert
+    // otherwise. This in the normal case when the underlying media channel has
+    // already been deleted.
+    ASSERT(enable == false);
+  }
+}
+
+void WebRtcSession::SetAudioSend(uint32_t ssrc,
+                                 bool enable,
+                                 const cricket::AudioOptions& options,
+                                 cricket::AudioRenderer* renderer) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!voice_channel_) {
+    LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
+    return;
+  }
+  if (!voice_channel_->SetAudioSend(ssrc, enable, &options, renderer)) {
+    LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc;
+  }
+}
+
+void WebRtcSession::SetAudioPlayoutVolume(uint32_t ssrc, double volume) {
+  ASSERT(signaling_thread()->IsCurrent());
+  ASSERT(volume >= 0 && volume <= 10);
+  if (!voice_channel_) {
+    LOG(LS_ERROR) << "SetAudioPlayoutVolume: No audio channel exists.";
+    return;
+  }
+
+  if (!voice_channel_->SetOutputVolume(ssrc, volume)) {
+    ASSERT(false);
+  }
+}
+
+void WebRtcSession::SetRawAudioSink(uint32_t ssrc,
+                                    rtc::scoped_ptr<AudioSinkInterface> sink) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!voice_channel_)
+    return;
+
+  voice_channel_->SetRawAudioSink(ssrc, std::move(sink));
+}
+
+bool WebRtcSession::SetCaptureDevice(uint32_t ssrc,
+                                     cricket::VideoCapturer* camera) {
+  ASSERT(signaling_thread()->IsCurrent());
+
+  if (!video_channel_) {
+    // |video_channel_| doesnt't exist. Probably because the remote end doesnt't
+    // support video.
+    LOG(LS_WARNING) << "Video not used in this call.";
+    return false;
+  }
+  if (!video_channel_->SetCapturer(ssrc, camera)) {
+    // Allow that SetCapturer fail if |camera| is NULL but assert otherwise.
+    // This in the normal case when the underlying media channel has already
+    // been deleted.
+    ASSERT(camera == NULL);
+    return false;
+  }
+  return true;
+}
+
+void WebRtcSession::SetVideoPlayout(
+    uint32_t ssrc,
+    bool enable,
+    rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!video_channel_) {
+    LOG(LS_WARNING) << "SetVideoPlayout: No video channel exists.";
+    return;
+  }
+  if (!video_channel_->SetSink(ssrc, enable ? sink : NULL)) {
+    // Allow that SetSink fail if |sink| is NULL but assert otherwise.
+    // This in the normal case when the underlying media channel has already
+    // been deleted.
+    ASSERT(sink == NULL);
+  }
+}
+
+void WebRtcSession::SetVideoSend(uint32_t ssrc,
+                                 bool enable,
+                                 const cricket::VideoOptions* options) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!video_channel_) {
+    LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
+    return;
+  }
+  if (!video_channel_->SetVideoSend(ssrc, enable, options)) {
+    // Allow that MuteStream fail if |enable| is false but assert otherwise.
+    // This in the normal case when the underlying media channel has already
+    // been deleted.
+    ASSERT(enable == false);
+  }
+}
+
+bool WebRtcSession::CanInsertDtmf(const std::string& track_id) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!voice_channel_) {
+    LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
+    return false;
+  }
+  uint32_t send_ssrc = 0;
+  // The Dtmf is negotiated per channel not ssrc, so we only check if the ssrc
+  // exists.
+  if (!local_desc_ ||
+      !GetAudioSsrcByTrackId(local_desc_->description(), track_id,
+                             &send_ssrc)) {
+    LOG(LS_ERROR) << "CanInsertDtmf: Track does not exist: " << track_id;
+    return false;
+  }
+  return voice_channel_->CanInsertDtmf();
+}
+
+bool WebRtcSession::InsertDtmf(const std::string& track_id,
+                               int code, int duration) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (!voice_channel_) {
+    LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
+    return false;
+  }
+  uint32_t send_ssrc = 0;
+  if (!VERIFY(local_desc_ && GetAudioSsrcByTrackId(local_desc_->description(),
+                                                   track_id, &send_ssrc))) {
+    LOG(LS_ERROR) << "InsertDtmf: Track does not exist: " << track_id;
+    return false;
+  }
+  if (!voice_channel_->InsertDtmf(send_ssrc, code, duration)) {
+    LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
+    return false;
+  }
+  return true;
+}
+
+sigslot::signal0<>* WebRtcSession::GetOnDestroyedSignal() {
+  return &SignalDestroyed;
+}
+
+bool WebRtcSession::SendData(const cricket::SendDataParams& params,
+                             const rtc::Buffer& payload,
+                             cricket::SendDataResult* result) {
+  if (!data_channel_) {
+    LOG(LS_ERROR) << "SendData called when data_channel_ is NULL.";
+    return false;
+  }
+  return data_channel_->SendData(params, payload, result);
+}
+
+bool WebRtcSession::ConnectDataChannel(DataChannel* webrtc_data_channel) {
+  if (!data_channel_) {
+    LOG(LS_ERROR) << "ConnectDataChannel called when data_channel_ is NULL.";
+    return false;
+  }
+  data_channel_->SignalReadyToSendData.connect(webrtc_data_channel,
+                                               &DataChannel::OnChannelReady);
+  data_channel_->SignalDataReceived.connect(webrtc_data_channel,
+                                            &DataChannel::OnDataReceived);
+  data_channel_->SignalStreamClosedRemotely.connect(
+      webrtc_data_channel, &DataChannel::OnStreamClosedRemotely);
+  return true;
+}
+
+void WebRtcSession::DisconnectDataChannel(DataChannel* webrtc_data_channel) {
+  if (!data_channel_) {
+    LOG(LS_ERROR) << "DisconnectDataChannel called when data_channel_ is NULL.";
+    return;
+  }
+  data_channel_->SignalReadyToSendData.disconnect(webrtc_data_channel);
+  data_channel_->SignalDataReceived.disconnect(webrtc_data_channel);
+  data_channel_->SignalStreamClosedRemotely.disconnect(webrtc_data_channel);
+}
+
+void WebRtcSession::AddSctpDataStream(int sid) {
+  if (!data_channel_) {
+    LOG(LS_ERROR) << "AddDataChannelStreams called when data_channel_ is NULL.";
+    return;
+  }
+  data_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(sid));
+  data_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(sid));
+}
+
+void WebRtcSession::RemoveSctpDataStream(int sid) {
+  if (!data_channel_) {
+    LOG(LS_ERROR) << "RemoveDataChannelStreams called when data_channel_ is "
+                  << "NULL.";
+    return;
+  }
+  data_channel_->RemoveRecvStream(sid);
+  data_channel_->RemoveSendStream(sid);
+}
+
+bool WebRtcSession::ReadyToSendData() const {
+  return data_channel_ && data_channel_->ready_to_send_data();
+}
+
+cricket::DataChannelType WebRtcSession::data_channel_type() const {
+  return data_channel_type_;
+}
+
+bool WebRtcSession::IceRestartPending() const {
+  return ice_restart_latch_->Get();
+}
+
+void WebRtcSession::ResetIceRestartLatch() {
+  ice_restart_latch_->Reset();
+}
+
+void WebRtcSession::OnCertificateReady(
+    const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+  transport_controller_->SetLocalCertificate(certificate);
+}
+
+bool WebRtcSession::waiting_for_certificate_for_testing() const {
+  return webrtc_session_desc_factory_->waiting_for_certificate_for_testing();
+}
+
+const rtc::scoped_refptr<rtc::RTCCertificate>&
+WebRtcSession::certificate_for_testing() {
+  return transport_controller_->certificate_for_testing();
+}
+
+void WebRtcSession::SetIceConnectionState(
+      PeerConnectionInterface::IceConnectionState state) {
+  if (ice_connection_state_ == state) {
+    return;
+  }
+
+  // ASSERT that the requested transition is allowed.  Note that
+  // WebRtcSession does not implement "kIceConnectionClosed" (that is handled
+  // within PeerConnection).  This switch statement should compile away when
+  // ASSERTs are disabled.
+  LOG(LS_INFO) << "Changing IceConnectionState " << ice_connection_state_
+               << " => " << state;
+  switch (ice_connection_state_) {
+    case PeerConnectionInterface::kIceConnectionNew:
+      ASSERT(state == PeerConnectionInterface::kIceConnectionChecking);
+      break;
+    case PeerConnectionInterface::kIceConnectionChecking:
+      ASSERT(state == PeerConnectionInterface::kIceConnectionFailed ||
+             state == PeerConnectionInterface::kIceConnectionConnected);
+      break;
+    case PeerConnectionInterface::kIceConnectionConnected:
+      ASSERT(state == PeerConnectionInterface::kIceConnectionDisconnected ||
+             state == PeerConnectionInterface::kIceConnectionChecking ||
+             state == PeerConnectionInterface::kIceConnectionCompleted);
+      break;
+    case PeerConnectionInterface::kIceConnectionCompleted:
+      ASSERT(state == PeerConnectionInterface::kIceConnectionConnected ||
+             state == PeerConnectionInterface::kIceConnectionDisconnected);
+      break;
+    case PeerConnectionInterface::kIceConnectionFailed:
+      ASSERT(state == PeerConnectionInterface::kIceConnectionNew);
+      break;
+    case PeerConnectionInterface::kIceConnectionDisconnected:
+      ASSERT(state == PeerConnectionInterface::kIceConnectionChecking ||
+             state == PeerConnectionInterface::kIceConnectionConnected ||
+             state == PeerConnectionInterface::kIceConnectionCompleted ||
+             state == PeerConnectionInterface::kIceConnectionFailed);
+      break;
+    case PeerConnectionInterface::kIceConnectionClosed:
+      ASSERT(false);
+      break;
+    default:
+      ASSERT(false);
+      break;
+  }
+
+  ice_connection_state_ = state;
+  if (ice_observer_) {
+    ice_observer_->OnIceConnectionChange(ice_connection_state_);
+  }
+}
+
+void WebRtcSession::OnTransportControllerConnectionState(
+    cricket::IceConnectionState state) {
+  switch (state) {
+    case cricket::kIceConnectionConnecting:
+      // If the current state is Connected or Completed, then there were
+      // writable channels but now there are not, so the next state must
+      // be Disconnected.
+      // kIceConnectionConnecting is currently used as the default,
+      // un-connected state by the TransportController, so its only use is
+      // detecting disconnections.
+      if (ice_connection_state_ ==
+              PeerConnectionInterface::kIceConnectionConnected ||
+          ice_connection_state_ ==
+              PeerConnectionInterface::kIceConnectionCompleted) {
+        SetIceConnectionState(
+            PeerConnectionInterface::kIceConnectionDisconnected);
+      }
+      break;
+    case cricket::kIceConnectionFailed:
+      SetIceConnectionState(PeerConnectionInterface::kIceConnectionFailed);
+      break;
+    case cricket::kIceConnectionConnected:
+      LOG(LS_INFO) << "Changing to ICE connected state because "
+                   << "all transports are writable.";
+      SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected);
+      break;
+    case cricket::kIceConnectionCompleted:
+      LOG(LS_INFO) << "Changing to ICE completed state because "
+                   << "all transports are complete.";
+      if (ice_connection_state_ !=
+          PeerConnectionInterface::kIceConnectionConnected) {
+        // If jumping directly from "checking" to "connected",
+        // signal "connected" first.
+        SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected);
+      }
+      SetIceConnectionState(PeerConnectionInterface::kIceConnectionCompleted);
+      if (metrics_observer_) {
+        ReportTransportStats();
+      }
+      break;
+    default:
+      ASSERT(false);
+  }
+}
+
+void WebRtcSession::OnTransportControllerReceiving(bool receiving) {
+  SetIceConnectionReceiving(receiving);
+}
+
+void WebRtcSession::SetIceConnectionReceiving(bool receiving) {
+  if (ice_connection_receiving_ == receiving) {
+    return;
+  }
+  ice_connection_receiving_ = receiving;
+  if (ice_observer_) {
+    ice_observer_->OnIceConnectionReceivingChange(receiving);
+  }
+}
+
+void WebRtcSession::OnTransportControllerCandidatesGathered(
+    const std::string& transport_name,
+    const cricket::Candidates& candidates) {
+  ASSERT(signaling_thread()->IsCurrent());
+  int sdp_mline_index;
+  if (!GetLocalCandidateMediaIndex(transport_name, &sdp_mline_index)) {
+    LOG(LS_ERROR) << "OnTransportControllerCandidatesGathered: content name "
+                  << transport_name << " not found";
+    return;
+  }
+
+  for (cricket::Candidates::const_iterator citer = candidates.begin();
+       citer != candidates.end(); ++citer) {
+    // Use transport_name as the candidate media id.
+    JsepIceCandidate candidate(transport_name, sdp_mline_index, *citer);
+    if (ice_observer_) {
+      ice_observer_->OnIceCandidate(&candidate);
+    }
+    if (local_desc_) {
+      local_desc_->AddCandidate(&candidate);
+    }
+  }
+}
+
+// Enabling voice and video channel.
+void WebRtcSession::EnableChannels() {
+  if (voice_channel_ && !voice_channel_->enabled())
+    voice_channel_->Enable(true);
+
+  if (video_channel_ && !video_channel_->enabled())
+    video_channel_->Enable(true);
+
+  if (data_channel_ && !data_channel_->enabled())
+    data_channel_->Enable(true);
+}
+
+// Returns the media index for a local ice candidate given the content name.
+bool WebRtcSession::GetLocalCandidateMediaIndex(const std::string& content_name,
+                                                int* sdp_mline_index) {
+  if (!local_desc_ || !sdp_mline_index) {
+    return false;
+  }
+
+  bool content_found = false;
+  const ContentInfos& contents = local_desc_->description()->contents();
+  for (size_t index = 0; index < contents.size(); ++index) {
+    if (contents[index].name == content_name) {
+      *sdp_mline_index = static_cast<int>(index);
+      content_found = true;
+      break;
+    }
+  }
+  return content_found;
+}
+
+bool WebRtcSession::UseCandidatesInSessionDescription(
+    const SessionDescriptionInterface* remote_desc) {
+  if (!remote_desc) {
+    return true;
+  }
+  bool ret = true;
+
+  for (size_t m = 0; m < remote_desc->number_of_mediasections(); ++m) {
+    const IceCandidateCollection* candidates = remote_desc->candidates(m);
+    for (size_t n = 0; n < candidates->count(); ++n) {
+      const IceCandidateInterface* candidate = candidates->at(n);
+      bool valid = false;
+      if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) {
+        if (valid) {
+          LOG(LS_INFO) << "UseCandidatesInSessionDescription: Not ready to use "
+                       << "candidate.";
+        }
+        continue;
+      }
+      ret = UseCandidate(candidate);
+      if (!ret) {
+        break;
+      }
+    }
+  }
+  return ret;
+}
+
+bool WebRtcSession::UseCandidate(
+    const IceCandidateInterface* candidate) {
+
+  size_t mediacontent_index = static_cast<size_t>(candidate->sdp_mline_index());
+  size_t remote_content_size = remote_desc_->description()->contents().size();
+  if (mediacontent_index >= remote_content_size) {
+    LOG(LS_ERROR)
+        << "UseRemoteCandidateInSession: Invalid candidate media index.";
+    return false;
+  }
+
+  cricket::ContentInfo content =
+      remote_desc_->description()->contents()[mediacontent_index];
+  std::vector<cricket::Candidate> candidates;
+  candidates.push_back(candidate->candidate());
+  // Invoking BaseSession method to handle remote candidates.
+  std::string error;
+  if (transport_controller_->AddRemoteCandidates(content.name, candidates,
+                                                 &error)) {
+    // Candidates successfully submitted for checking.
+    if (ice_connection_state_ == PeerConnectionInterface::kIceConnectionNew ||
+        ice_connection_state_ ==
+            PeerConnectionInterface::kIceConnectionDisconnected) {
+      // If state is New, then the session has just gotten its first remote ICE
+      // candidates, so go to Checking.
+      // If state is Disconnected, the session is re-using old candidates or
+      // receiving additional ones, so go to Checking.
+      // If state is Connected, stay Connected.
+      // TODO(bemasc): If state is Connected, and the new candidates are for a
+      // newly added transport, then the state actually _should_ move to
+      // checking.  Add a way to distinguish that case.
+      SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking);
+    }
+    // TODO(bemasc): If state is Completed, go back to Connected.
+  } else {
+    if (!error.empty()) {
+      LOG(LS_WARNING) << error;
+    }
+  }
+  return true;
+}
+
+void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
+  // Destroy video_channel_ first since it may have a pointer to the
+  // voice_channel_.
+  const cricket::ContentInfo* video_info =
+      cricket::GetFirstVideoContent(desc);
+  if ((!video_info || video_info->rejected) && video_channel_) {
+    SignalVideoChannelDestroyed();
+    channel_manager_->DestroyVideoChannel(video_channel_.release());
+  }
+
+  const cricket::ContentInfo* voice_info =
+      cricket::GetFirstAudioContent(desc);
+  if ((!voice_info || voice_info->rejected) && voice_channel_) {
+    SignalVoiceChannelDestroyed();
+    channel_manager_->DestroyVoiceChannel(voice_channel_.release());
+  }
+
+  const cricket::ContentInfo* data_info =
+      cricket::GetFirstDataContent(desc);
+  if ((!data_info || data_info->rejected) && data_channel_) {
+    SignalDataChannelDestroyed();
+    channel_manager_->DestroyDataChannel(data_channel_.release());
+  }
+}
+
+// TODO(mallinath) - Add a correct error code if the channels are not created
+// due to BUNDLE is enabled but rtcp-mux is disabled.
+bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
+  // Creating the media channels and transport proxies.
+  const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(desc);
+  if (voice && !voice->rejected && !voice_channel_) {
+    if (!CreateVoiceChannel(voice)) {
+      LOG(LS_ERROR) << "Failed to create voice channel.";
+      return false;
+    }
+  }
+
+  const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc);
+  if (video && !video->rejected && !video_channel_) {
+    if (!CreateVideoChannel(video)) {
+      LOG(LS_ERROR) << "Failed to create video channel.";
+      return false;
+    }
+  }
+
+  const cricket::ContentInfo* data = cricket::GetFirstDataContent(desc);
+  if (data_channel_type_ != cricket::DCT_NONE &&
+      data && !data->rejected && !data_channel_) {
+    if (!CreateDataChannel(data)) {
+      LOG(LS_ERROR) << "Failed to create data channel.";
+      return false;
+    }
+  }
+
+  if (rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire) {
+    if (voice_channel()) {
+      voice_channel()->ActivateRtcpMux();
+    }
+    if (video_channel()) {
+      video_channel()->ActivateRtcpMux();
+    }
+    if (data_channel()) {
+      data_channel()->ActivateRtcpMux();
+    }
+  }
+
+  // Enable BUNDLE immediately when kBundlePolicyMaxBundle is in effect.
+  if (bundle_policy_ == PeerConnectionInterface::kBundlePolicyMaxBundle) {
+    const cricket::ContentGroup* bundle_group = desc->GetGroupByName(
+        cricket::GROUP_TYPE_BUNDLE);
+    if (!bundle_group) {
+      LOG(LS_WARNING) << "max-bundle specified without BUNDLE specified";
+      return false;
+    }
+    if (!EnableBundle(*bundle_group)) {
+      LOG(LS_WARNING) << "max-bundle failed to enable bundling.";
+      return false;
+    }
+  }
+
+  return true;
+}
+
+bool WebRtcSession::CreateVoiceChannel(const cricket::ContentInfo* content) {
+  voice_channel_.reset(channel_manager_->CreateVoiceChannel(
+      media_controller_, transport_controller_.get(), content->name, true,
+      audio_options_));
+  if (!voice_channel_) {
+    return false;
+  }
+
+  voice_channel_->SignalDtlsSetupFailure.connect(
+      this, &WebRtcSession::OnDtlsSetupFailure);
+
+  SignalVoiceChannelCreated();
+  voice_channel_->transport_channel()->SignalSentPacket.connect(
+      this, &WebRtcSession::OnSentPacket_w);
+  return true;
+}
+
+bool WebRtcSession::CreateVideoChannel(const cricket::ContentInfo* content) {
+  video_channel_.reset(channel_manager_->CreateVideoChannel(
+      media_controller_, transport_controller_.get(), content->name, true,
+      video_options_));
+  if (!video_channel_) {
+    return false;
+  }
+
+  video_channel_->SignalDtlsSetupFailure.connect(
+      this, &WebRtcSession::OnDtlsSetupFailure);
+
+  SignalVideoChannelCreated();
+  video_channel_->transport_channel()->SignalSentPacket.connect(
+      this, &WebRtcSession::OnSentPacket_w);
+  return true;
+}
+
+bool WebRtcSession::CreateDataChannel(const cricket::ContentInfo* content) {
+  bool sctp = (data_channel_type_ == cricket::DCT_SCTP);
+  data_channel_.reset(channel_manager_->CreateDataChannel(
+      transport_controller_.get(), content->name, !sctp, data_channel_type_));
+  if (!data_channel_) {
+    return false;
+  }
+
+  if (sctp) {
+    data_channel_->SignalDataReceived.connect(
+        this, &WebRtcSession::OnDataChannelMessageReceived);
+  }
+
+  data_channel_->SignalDtlsSetupFailure.connect(
+      this, &WebRtcSession::OnDtlsSetupFailure);
+
+  SignalDataChannelCreated();
+  data_channel_->transport_channel()->SignalSentPacket.connect(
+      this, &WebRtcSession::OnSentPacket_w);
+  return true;
+}
+
+void WebRtcSession::OnDtlsSetupFailure(cricket::BaseChannel*, bool rtcp) {
+  SetError(ERROR_TRANSPORT,
+           rtcp ? kDtlsSetupFailureRtcp : kDtlsSetupFailureRtp);
+}
+
+void WebRtcSession::OnDataChannelMessageReceived(
+    cricket::DataChannel* channel,
+    const cricket::ReceiveDataParams& params,
+    const rtc::Buffer& payload) {
+  RTC_DCHECK(data_channel_type_ == cricket::DCT_SCTP);
+  if (params.type == cricket::DMT_CONTROL && IsOpenMessage(payload)) {
+    // Received OPEN message; parse and signal that a new data channel should
+    // be created.
+    std::string label;
+    InternalDataChannelInit config;
+    config.id = params.ssrc;
+    if (!ParseDataChannelOpenMessage(payload, &label, &config)) {
+      LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
+                      << params.ssrc;
+      return;
+    }
+    config.open_handshake_role = InternalDataChannelInit::kAcker;
+    SignalDataChannelOpenMessage(label, config);
+  }
+  // Otherwise ignore the message.
+}
+
+// Returns false if bundle is enabled and rtcp_mux is disabled.
+bool WebRtcSession::ValidateBundleSettings(const SessionDescription* desc) {
+  bool bundle_enabled = desc->HasGroup(cricket::GROUP_TYPE_BUNDLE);
+  if (!bundle_enabled)
+    return true;
+
+  const cricket::ContentGroup* bundle_group =
+      desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+  ASSERT(bundle_group != NULL);
+
+  const cricket::ContentInfos& contents = desc->contents();
+  for (cricket::ContentInfos::const_iterator citer = contents.begin();
+       citer != contents.end(); ++citer) {
+    const cricket::ContentInfo* content = (&*citer);
+    ASSERT(content != NULL);
+    if (bundle_group->HasContentName(content->name) &&
+        !content->rejected && content->type == cricket::NS_JINGLE_RTP) {
+      if (!HasRtcpMuxEnabled(content))
+        return false;
+    }
+  }
+  // RTCP-MUX is enabled in all the contents.
+  return true;
+}
+
+bool WebRtcSession::HasRtcpMuxEnabled(
+    const cricket::ContentInfo* content) {
+  const cricket::MediaContentDescription* description =
+      static_cast<cricket::MediaContentDescription*>(content->description);
+  return description->rtcp_mux();
+}
+
+bool WebRtcSession::ValidateSessionDescription(
+    const SessionDescriptionInterface* sdesc,
+    cricket::ContentSource source, std::string* err_desc) {
+  std::string type;
+  if (error() != ERROR_NONE) {
+    return BadSdp(source, type, GetSessionErrorMsg(), err_desc);
+  }
+
+  if (!sdesc || !sdesc->description()) {
+    return BadSdp(source, type, kInvalidSdp, err_desc);
+  }
+
+  type = sdesc->type();
+  Action action = GetAction(sdesc->type());
+  if (source == cricket::CS_LOCAL) {
+    if (!ExpectSetLocalDescription(action))
+      return BadLocalSdp(type, BadStateErrMsg(state()), err_desc);
+  } else {
+    if (!ExpectSetRemoteDescription(action))
+      return BadRemoteSdp(type, BadStateErrMsg(state()), err_desc);
+  }
+
+  // Verify crypto settings.
+  std::string crypto_error;
+  if ((webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED ||
+       dtls_enabled_) &&
+      !VerifyCrypto(sdesc->description(), dtls_enabled_, &crypto_error)) {
+    return BadSdp(source, type, crypto_error, err_desc);
+  }
+
+  // Verify ice-ufrag and ice-pwd.
+  if (!VerifyIceUfragPwdPresent(sdesc->description())) {
+    return BadSdp(source, type, kSdpWithoutIceUfragPwd, err_desc);
+  }
+
+  if (!ValidateBundleSettings(sdesc->description())) {
+    return BadSdp(source, type, kBundleWithoutRtcpMux, err_desc);
+  }
+
+  // Verify m-lines in Answer when compared against Offer.
+  if (action == kAnswer) {
+    const cricket::SessionDescription* offer_desc =
+        (source == cricket::CS_LOCAL) ? remote_desc_->description()
+                                      : local_desc_->description();
+    if (!VerifyMediaDescriptions(sdesc->description(), offer_desc)) {
+      return BadAnswerSdp(source, kMlineMismatch, err_desc);
+    }
+  }
+
+  return true;
+}
+
+bool WebRtcSession::ExpectSetLocalDescription(Action action) {
+  return ((action == kOffer && state() == STATE_INIT) ||
+          // update local offer
+          (action == kOffer && state() == STATE_SENTOFFER) ||
+          // update the current ongoing session.
+          (action == kOffer && state() == STATE_INPROGRESS) ||
+          // accept remote offer
+          (action == kAnswer && state() == STATE_RECEIVEDOFFER) ||
+          (action == kAnswer && state() == STATE_SENTPRANSWER) ||
+          (action == kPrAnswer && state() == STATE_RECEIVEDOFFER) ||
+          (action == kPrAnswer && state() == STATE_SENTPRANSWER));
+}
+
+bool WebRtcSession::ExpectSetRemoteDescription(Action action) {
+  return ((action == kOffer && state() == STATE_INIT) ||
+          // update remote offer
+          (action == kOffer && state() == STATE_RECEIVEDOFFER) ||
+          // update the current ongoing session
+          (action == kOffer && state() == STATE_INPROGRESS) ||
+          // accept local offer
+          (action == kAnswer && state() == STATE_SENTOFFER) ||
+          (action == kAnswer && state() == STATE_RECEIVEDPRANSWER) ||
+          (action == kPrAnswer && state() == STATE_SENTOFFER) ||
+          (action == kPrAnswer && state() == STATE_RECEIVEDPRANSWER));
+}
+
+std::string WebRtcSession::GetSessionErrorMsg() {
+  std::ostringstream desc;
+  desc << kSessionError << GetErrorCodeString(error()) << ". ";
+  desc << kSessionErrorDesc << error_desc() << ".";
+  return desc.str();
+}
+
+// We need to check the local/remote description for the Transport instead of
+// the session, because a new Transport added during renegotiation may have
+// them unset while the session has them set from the previous negotiation.
+// Not doing so may trigger the auto generation of transport description and
+// mess up DTLS identity information, ICE credential, etc.
+bool WebRtcSession::ReadyToUseRemoteCandidate(
+    const IceCandidateInterface* candidate,
+    const SessionDescriptionInterface* remote_desc,
+    bool* valid) {
+  *valid = true;;
+
+  const SessionDescriptionInterface* current_remote_desc =
+      remote_desc ? remote_desc : remote_desc_.get();
+
+  if (!current_remote_desc) {
+    return false;
+  }
+
+  size_t mediacontent_index =
+      static_cast<size_t>(candidate->sdp_mline_index());
+  size_t remote_content_size =
+      current_remote_desc->description()->contents().size();
+  if (mediacontent_index >= remote_content_size) {
+    LOG(LS_ERROR)
+        << "ReadyToUseRemoteCandidate: Invalid candidate media index.";
+
+    *valid = false;
+    return false;
+  }
+
+  cricket::ContentInfo content =
+      current_remote_desc->description()->contents()[mediacontent_index];
+  cricket::BaseChannel* channel = GetChannel(content.name);
+  if (!channel) {
+    return false;
+  }
+
+  return transport_controller_->ReadyForRemoteCandidates(
+      channel->transport_name());
+}
+
+void WebRtcSession::OnTransportControllerGatheringState(
+    cricket::IceGatheringState state) {
+  ASSERT(signaling_thread()->IsCurrent());
+  if (state == cricket::kIceGatheringGathering) {
+    if (ice_observer_) {
+      ice_observer_->OnIceGatheringChange(
+          PeerConnectionInterface::kIceGatheringGathering);
+    }
+  } else if (state == cricket::kIceGatheringComplete) {
+    if (ice_observer_) {
+      ice_observer_->OnIceGatheringChange(
+          PeerConnectionInterface::kIceGatheringComplete);
+    }
+  }
+}
+
+void WebRtcSession::ReportTransportStats() {
+  // Use a set so we don't report the same stats twice if two channels share
+  // a transport.
+  std::set<std::string> transport_names;
+  if (voice_channel()) {
+    transport_names.insert(voice_channel()->transport_name());
+  }
+  if (video_channel()) {
+    transport_names.insert(video_channel()->transport_name());
+  }
+  if (data_channel()) {
+    transport_names.insert(data_channel()->transport_name());
+  }
+  for (const auto& name : transport_names) {
+    cricket::TransportStats stats;
+    if (transport_controller_->GetStats(name, &stats)) {
+      ReportBestConnectionState(stats);
+      ReportNegotiatedCiphers(stats);
+    }
+  }
+}
+// Walk through the ConnectionInfos to gather best connection usage
+// for IPv4 and IPv6.
+void WebRtcSession::ReportBestConnectionState(
+    const cricket::TransportStats& stats) {
+  RTC_DCHECK(metrics_observer_ != NULL);
+  for (cricket::TransportChannelStatsList::const_iterator it =
+         stats.channel_stats.begin();
+       it != stats.channel_stats.end(); ++it) {
+    for (cricket::ConnectionInfos::const_iterator it_info =
+           it->connection_infos.begin();
+         it_info != it->connection_infos.end(); ++it_info) {
+      if (!it_info->best_connection) {
+        continue;
+      }
+
+      PeerConnectionEnumCounterType type = kPeerConnectionEnumCounterMax;
+      const cricket::Candidate& local = it_info->local_candidate;
+      const cricket::Candidate& remote = it_info->remote_candidate;
+
+      // Increment the counter for IceCandidatePairType.
+      if (local.protocol() == cricket::TCP_PROTOCOL_NAME ||
+          (local.type() == RELAY_PORT_TYPE &&
+           local.relay_protocol() == cricket::TCP_PROTOCOL_NAME)) {
+        type = kEnumCounterIceCandidatePairTypeTcp;
+      } else if (local.protocol() == cricket::UDP_PROTOCOL_NAME) {
+        type = kEnumCounterIceCandidatePairTypeUdp;
+      } else {
+        RTC_CHECK(0);
+      }
+      metrics_observer_->IncrementEnumCounter(
+          type, GetIceCandidatePairCounter(local, remote),
+          kIceCandidatePairMax);
+
+      // Increment the counter for IP type.
+      if (local.address().family() == AF_INET) {
+        metrics_observer_->IncrementEnumCounter(
+            kEnumCounterAddressFamily, kBestConnections_IPv4,
+            kPeerConnectionAddressFamilyCounter_Max);
+
+      } else if (local.address().family() == AF_INET6) {
+        metrics_observer_->IncrementEnumCounter(
+            kEnumCounterAddressFamily, kBestConnections_IPv6,
+            kPeerConnectionAddressFamilyCounter_Max);
+      } else {
+        RTC_CHECK(0);
+      }
+
+      return;
+    }
+  }
+}
+
+void WebRtcSession::ReportNegotiatedCiphers(
+    const cricket::TransportStats& stats) {
+  RTC_DCHECK(metrics_observer_ != NULL);
+  if (!dtls_enabled_ || stats.channel_stats.empty()) {
+    return;
+  }
+
+  int srtp_crypto_suite = stats.channel_stats[0].srtp_crypto_suite;
+  int ssl_cipher_suite = stats.channel_stats[0].ssl_cipher_suite;
+  if (srtp_crypto_suite == rtc::SRTP_INVALID_CRYPTO_SUITE &&
+      ssl_cipher_suite == rtc::TLS_NULL_WITH_NULL_NULL) {
+    return;
+  }
+
+  PeerConnectionEnumCounterType srtp_counter_type;
+  PeerConnectionEnumCounterType ssl_counter_type;
+  if (stats.transport_name == cricket::CN_AUDIO) {
+    srtp_counter_type = kEnumCounterAudioSrtpCipher;
+    ssl_counter_type = kEnumCounterAudioSslCipher;
+  } else if (stats.transport_name == cricket::CN_VIDEO) {
+    srtp_counter_type = kEnumCounterVideoSrtpCipher;
+    ssl_counter_type = kEnumCounterVideoSslCipher;
+  } else if (stats.transport_name == cricket::CN_DATA) {
+    srtp_counter_type = kEnumCounterDataSrtpCipher;
+    ssl_counter_type = kEnumCounterDataSslCipher;
+  } else {
+    RTC_NOTREACHED();
+    return;
+  }
+
+  if (srtp_crypto_suite != rtc::SRTP_INVALID_CRYPTO_SUITE) {
+    metrics_observer_->IncrementSparseEnumCounter(srtp_counter_type,
+                                                  srtp_crypto_suite);
+  }
+  if (ssl_cipher_suite != rtc::TLS_NULL_WITH_NULL_NULL) {
+    metrics_observer_->IncrementSparseEnumCounter(ssl_counter_type,
+                                                  ssl_cipher_suite);
+  }
+}
+
+void WebRtcSession::OnSentPacket_w(cricket::TransportChannel* channel,
+                                   const rtc::SentPacket& sent_packet) {
+  RTC_DCHECK(worker_thread()->IsCurrent());
+  media_controller_->call_w()->OnSentPacket(sent_packet);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/api/webrtcsession.h b/webrtc/api/webrtcsession.h
new file mode 100644
index 0000000..0632fe2
--- /dev/null
+++ b/webrtc/api/webrtcsession.h
@@ -0,0 +1,522 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_WEBRTCSESSION_H_
+#define WEBRTC_API_WEBRTCSESSION_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/datachannel.h"
+#include "webrtc/api/dtmfsender.h"
+#include "webrtc/api/mediacontroller.h"
+#include "webrtc/api/mediastreamprovider.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/statstypes.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/media/base/mediachannel.h"
+#include "webrtc/p2p/base/transportcontroller.h"
+
+namespace cricket {
+
+class ChannelManager;
+class DataChannel;
+class StatsReport;
+class VideoCapturer;
+class VideoChannel;
+class VoiceChannel;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+class IceRestartAnswerLatch;
+class JsepIceCandidate;
+class MediaStreamSignaling;
+class WebRtcSessionDescriptionFactory;
+
+extern const char kBundleWithoutRtcpMux[];
+extern const char kCreateChannelFailed[];
+extern const char kInvalidCandidates[];
+extern const char kInvalidSdp[];
+extern const char kMlineMismatch[];
+extern const char kPushDownTDFailed[];
+extern const char kSdpWithoutDtlsFingerprint[];
+extern const char kSdpWithoutSdesCrypto[];
+extern const char kSdpWithoutIceUfragPwd[];
+extern const char kSdpWithoutSdesAndDtlsDisabled[];
+extern const char kSessionError[];
+extern const char kSessionErrorDesc[];
+extern const char kDtlsSetupFailureRtp[];
+extern const char kDtlsSetupFailureRtcp[];
+extern const char kEnableBundleFailed[];
+
+// Maximum number of received video streams that will be processed by webrtc
+// even if they are not signalled beforehand.
+extern const int kMaxUnsignalledRecvStreams;
+
+// ICE state callback interface.
+class IceObserver {
+ public:
+  IceObserver() {}
+  // Called any time the IceConnectionState changes
+  // TODO(honghaiz): Change the name to OnIceConnectionStateChange so as to
+  // conform to the w3c standard.
+  virtual void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) {}
+  // Called any time the IceGatheringState changes
+  virtual void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) {}
+  // New Ice candidate have been found.
+  virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0;
+
+  // Called whenever the state changes between receiving and not receiving.
+  virtual void OnIceConnectionReceivingChange(bool receiving) {}
+
+ protected:
+  ~IceObserver() {}
+
+ private:
+  RTC_DISALLOW_COPY_AND_ASSIGN(IceObserver);
+};
+
+// Statistics for all the transports of the session.
+typedef std::map<std::string, cricket::TransportStats> TransportStatsMap;
+typedef std::map<std::string, std::string> ProxyTransportMap;
+
+// TODO(pthatcher): Think of a better name for this.  We already have
+// a TransportStats in transport.h.  Perhaps TransportsStats?
+struct SessionStats {
+  ProxyTransportMap proxy_to_transport;
+  TransportStatsMap transport_stats;
+};
+
+// A WebRtcSession manages general session state. This includes negotiation
+// of both the application-level and network-level protocols:  the former
+// defines what will be sent and the latter defines how it will be sent.  Each
+// network-level protocol is represented by a Transport object.  Each Transport
+// participates in the network-level negotiation.  The individual streams of
+// packets are represented by TransportChannels.  The application-level protocol
+// is represented by SessionDecription objects.
+class WebRtcSession : public AudioProviderInterface,
+                      public VideoProviderInterface,
+                      public DtmfProviderInterface,
+                      public DataChannelProviderInterface,
+                      public sigslot::has_slots<> {
+ public:
+  enum State {
+    STATE_INIT = 0,
+    STATE_SENTOFFER,         // Sent offer, waiting for answer.
+    STATE_RECEIVEDOFFER,     // Received an offer. Need to send answer.
+    STATE_SENTPRANSWER,      // Sent provisional answer. Need to send answer.
+    STATE_RECEIVEDPRANSWER,  // Received provisional answer, waiting for answer.
+    STATE_INPROGRESS,        // Offer/answer exchange completed.
+    STATE_CLOSED,            // Close() was called.
+  };
+
+  enum Error {
+    ERROR_NONE = 0,       // no error
+    ERROR_CONTENT = 1,    // channel errors in SetLocalContent/SetRemoteContent
+    ERROR_TRANSPORT = 2,  // transport error of some kind
+  };
+
+  WebRtcSession(webrtc::MediaControllerInterface* media_controller,
+                rtc::Thread* signaling_thread,
+                rtc::Thread* worker_thread,
+                cricket::PortAllocator* port_allocator);
+  virtual ~WebRtcSession();
+
+  // These are const to allow them to be called from const methods.
+  rtc::Thread* signaling_thread() const { return signaling_thread_; }
+  rtc::Thread* worker_thread() const { return worker_thread_; }
+  cricket::PortAllocator* port_allocator() const { return port_allocator_; }
+
+  // The ID of this session.
+  const std::string& id() const { return sid_; }
+
+  bool Initialize(
+      const PeerConnectionFactoryInterface::Options& options,
+      const MediaConstraintsInterface* constraints,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+      const PeerConnectionInterface::RTCConfiguration& rtc_configuration);
+  // Deletes the voice, video and data channel and changes the session state
+  // to STATE_CLOSED.
+  void Close();
+
+  // Returns true if we were the initial offerer.
+  bool initial_offerer() const { return initial_offerer_; }
+
+  // Returns the current state of the session. See the enum above for details.
+  // Each time the state changes, we will fire this signal.
+  State state() const { return state_; }
+  sigslot::signal2<WebRtcSession*, State> SignalState;
+
+  // Returns the last error in the session. See the enum above for details.
+  Error error() const { return error_; }
+  const std::string& error_desc() const { return error_desc_; }
+
+  void RegisterIceObserver(IceObserver* observer) {
+    ice_observer_ = observer;
+  }
+
+  virtual cricket::VoiceChannel* voice_channel() {
+    return voice_channel_.get();
+  }
+  virtual cricket::VideoChannel* video_channel() {
+    return video_channel_.get();
+  }
+  virtual cricket::DataChannel* data_channel() {
+    return data_channel_.get();
+  }
+
+  void SetSdesPolicy(cricket::SecurePolicy secure_policy);
+  cricket::SecurePolicy SdesPolicy() const;
+
+  // Get current ssl role from transport.
+  bool GetSslRole(const std::string& transport_name, rtc::SSLRole* role);
+
+  // Get current SSL role for this channel's transport.
+  // If |transport| is null, returns false.
+  bool GetSslRole(const cricket::BaseChannel* channel, rtc::SSLRole* role);
+
+  void CreateOffer(
+      CreateSessionDescriptionObserver* observer,
+      const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+      const cricket::MediaSessionOptions& session_options);
+  void CreateAnswer(CreateSessionDescriptionObserver* observer,
+                    const MediaConstraintsInterface* constraints,
+                    const cricket::MediaSessionOptions& session_options);
+  // The ownership of |desc| will be transferred after this call.
+  bool SetLocalDescription(SessionDescriptionInterface* desc,
+                           std::string* err_desc);
+  // The ownership of |desc| will be transferred after this call.
+  bool SetRemoteDescription(SessionDescriptionInterface* desc,
+                            std::string* err_desc);
+  bool ProcessIceMessage(const IceCandidateInterface* ice_candidate);
+
+  bool SetIceTransports(PeerConnectionInterface::IceTransportsType type);
+
+  cricket::IceConfig ParseIceConfig(
+      const PeerConnectionInterface::RTCConfiguration& config) const;
+
+  void SetIceConfig(const cricket::IceConfig& ice_config);
+
+  // Start gathering candidates for any new transports, or transports doing an
+  // ICE restart.
+  void MaybeStartGathering();
+
+  const SessionDescriptionInterface* local_description() const {
+    return local_desc_.get();
+  }
+  const SessionDescriptionInterface* remote_description() const {
+    return remote_desc_.get();
+  }
+
+  // Get the id used as a media stream track's "id" field from ssrc.
+  virtual bool GetLocalTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
+  virtual bool GetRemoteTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
+
+  // AudioMediaProviderInterface implementation.
+  void SetAudioPlayout(uint32_t ssrc, bool enable) override;
+  void SetAudioSend(uint32_t ssrc,
+                    bool enable,
+                    const cricket::AudioOptions& options,
+                    cricket::AudioRenderer* renderer) override;
+  void SetAudioPlayoutVolume(uint32_t ssrc, double volume) override;
+  void SetRawAudioSink(uint32_t ssrc,
+                       rtc::scoped_ptr<AudioSinkInterface> sink) override;
+
+  // Implements VideoMediaProviderInterface.
+  bool SetCaptureDevice(uint32_t ssrc, cricket::VideoCapturer* camera) override;
+  void SetVideoPlayout(
+      uint32_t ssrc,
+      bool enable,
+      rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
+  void SetVideoSend(uint32_t ssrc,
+                    bool enable,
+                    const cricket::VideoOptions* options) override;
+
+  // Implements DtmfProviderInterface.
+  virtual bool CanInsertDtmf(const std::string& track_id);
+  virtual bool InsertDtmf(const std::string& track_id,
+                          int code, int duration);
+  virtual sigslot::signal0<>* GetOnDestroyedSignal();
+
+  // Implements DataChannelProviderInterface.
+  bool SendData(const cricket::SendDataParams& params,
+                const rtc::Buffer& payload,
+                cricket::SendDataResult* result) override;
+  bool ConnectDataChannel(DataChannel* webrtc_data_channel) override;
+  void DisconnectDataChannel(DataChannel* webrtc_data_channel) override;
+  void AddSctpDataStream(int sid) override;
+  void RemoveSctpDataStream(int sid) override;
+  bool ReadyToSendData() const override;
+
+  // Returns stats for all channels of all transports.
+  // This avoids exposing the internal structures used to track them.
+  virtual bool GetTransportStats(SessionStats* stats);
+
+  // Get stats for a specific channel
+  bool GetChannelTransportStats(cricket::BaseChannel* ch, SessionStats* stats);
+
+  // virtual so it can be mocked in unit tests
+  virtual bool GetLocalCertificate(
+      const std::string& transport_name,
+      rtc::scoped_refptr<rtc::RTCCertificate>* certificate);
+
+  // Caller owns returned certificate
+  virtual bool GetRemoteSSLCertificate(const std::string& transport_name,
+                                       rtc::SSLCertificate** cert);
+
+  cricket::DataChannelType data_channel_type() const;
+
+  bool IceRestartPending() const;
+
+  void ResetIceRestartLatch();
+
+  // Called when an RTCCertificate is generated or retrieved by
+  // WebRTCSessionDescriptionFactory. Should happen before setLocalDescription.
+  void OnCertificateReady(
+      const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
+  void OnDtlsSetupFailure(cricket::BaseChannel*, bool rtcp);
+
+  // For unit test.
+  bool waiting_for_certificate_for_testing() const;
+  const rtc::scoped_refptr<rtc::RTCCertificate>& certificate_for_testing();
+
+  void set_metrics_observer(
+      webrtc::MetricsObserverInterface* metrics_observer) {
+    metrics_observer_ = metrics_observer;
+  }
+
+  // Called when voice_channel_, video_channel_ and data_channel_ are created
+  // and destroyed. As a result of, for example, setting a new description.
+  sigslot::signal0<> SignalVoiceChannelCreated;
+  sigslot::signal0<> SignalVoiceChannelDestroyed;
+  sigslot::signal0<> SignalVideoChannelCreated;
+  sigslot::signal0<> SignalVideoChannelDestroyed;
+  sigslot::signal0<> SignalDataChannelCreated;
+  sigslot::signal0<> SignalDataChannelDestroyed;
+  // Called when the whole session is destroyed.
+  sigslot::signal0<> SignalDestroyed;
+
+  // Called when a valid data channel OPEN message is received.
+  // std::string represents the data channel label.
+  sigslot::signal2<const std::string&, const InternalDataChannelInit&>
+      SignalDataChannelOpenMessage;
+
+ private:
+  // Indicates the type of SessionDescription in a call to SetLocalDescription
+  // and SetRemoteDescription.
+  enum Action {
+    kOffer,
+    kPrAnswer,
+    kAnswer,
+  };
+
+  // Log session state.
+  void LogState(State old_state, State new_state);
+
+  // Updates the state, signaling if necessary.
+  virtual void SetState(State state);
+
+  // Updates the error state, signaling if necessary.
+  // TODO(ronghuawu): remove the SetError method that doesn't take |error_desc|.
+  virtual void SetError(Error error, const std::string& error_desc);
+
+  bool UpdateSessionState(Action action, cricket::ContentSource source,
+                          std::string* err_desc);
+  static Action GetAction(const std::string& type);
+  // Push the media parts of the local or remote session description
+  // down to all of the channels.
+  bool PushdownMediaDescription(cricket::ContentAction action,
+                                cricket::ContentSource source,
+                                std::string* error_desc);
+
+  bool PushdownTransportDescription(cricket::ContentSource source,
+                                    cricket::ContentAction action,
+                                    std::string* error_desc);
+
+  // Helper methods to push local and remote transport descriptions.
+  bool PushdownLocalTransportDescription(
+      const cricket::SessionDescription* sdesc,
+      cricket::ContentAction action,
+      std::string* error_desc);
+  bool PushdownRemoteTransportDescription(
+      const cricket::SessionDescription* sdesc,
+      cricket::ContentAction action,
+      std::string* error_desc);
+
+  // Returns true and the TransportInfo of the given |content_name|
+  // from |description|. Returns false if it's not available.
+  static bool GetTransportDescription(
+      const cricket::SessionDescription* description,
+      const std::string& content_name,
+      cricket::TransportDescription* info);
+
+  cricket::BaseChannel* GetChannel(const std::string& content_name);
+  // Cause all the BaseChannels in the bundle group to have the same
+  // transport channel.
+  bool EnableBundle(const cricket::ContentGroup& bundle);
+
+  // Enables media channels to allow sending of media.
+  void EnableChannels();
+  // Returns the media index for a local ice candidate given the content name.
+  // Returns false if the local session description does not have a media
+  // content called  |content_name|.
+  bool GetLocalCandidateMediaIndex(const std::string& content_name,
+                                   int* sdp_mline_index);
+  // Uses all remote candidates in |remote_desc| in this session.
+  bool UseCandidatesInSessionDescription(
+      const SessionDescriptionInterface* remote_desc);
+  // Uses |candidate| in this session.
+  bool UseCandidate(const IceCandidateInterface* candidate);
+  // Deletes the corresponding channel of contents that don't exist in |desc|.
+  // |desc| can be null. This means that all channels are deleted.
+  void RemoveUnusedChannels(const cricket::SessionDescription* desc);
+
+  // Allocates media channels based on the |desc|. If |desc| doesn't have
+  // the BUNDLE option, this method will disable BUNDLE in PortAllocator.
+  // This method will also delete any existing media channels before creating.
+  bool CreateChannels(const cricket::SessionDescription* desc);
+
+  // Helper methods to create media channels.
+  bool CreateVoiceChannel(const cricket::ContentInfo* content);
+  bool CreateVideoChannel(const cricket::ContentInfo* content);
+  bool CreateDataChannel(const cricket::ContentInfo* content);
+
+  // Listens to SCTP CONTROL messages on unused SIDs and process them as OPEN
+  // messages.
+  void OnDataChannelMessageReceived(cricket::DataChannel* channel,
+                                    const cricket::ReceiveDataParams& params,
+                                    const rtc::Buffer& payload);
+
+  std::string BadStateErrMsg(State state);
+  void SetIceConnectionState(PeerConnectionInterface::IceConnectionState state);
+  void SetIceConnectionReceiving(bool receiving);
+
+  bool ValidateBundleSettings(const cricket::SessionDescription* desc);
+  bool HasRtcpMuxEnabled(const cricket::ContentInfo* content);
+  // Below methods are helper methods which verifies SDP.
+  bool ValidateSessionDescription(const SessionDescriptionInterface* sdesc,
+                                  cricket::ContentSource source,
+                                  std::string* err_desc);
+
+  // Check if a call to SetLocalDescription is acceptable with |action|.
+  bool ExpectSetLocalDescription(Action action);
+  // Check if a call to SetRemoteDescription is acceptable with |action|.
+  bool ExpectSetRemoteDescription(Action action);
+  // Verifies a=setup attribute as per RFC 5763.
+  bool ValidateDtlsSetupAttribute(const cricket::SessionDescription* desc,
+                                  Action action);
+
+  // Returns true if we are ready to push down the remote candidate.
+  // |remote_desc| is the new remote description, or NULL if the current remote
+  // description should be used. Output |valid| is true if the candidate media
+  // index is valid.
+  bool ReadyToUseRemoteCandidate(const IceCandidateInterface* candidate,
+                                 const SessionDescriptionInterface* remote_desc,
+                                 bool* valid);
+
+  void OnTransportControllerConnectionState(cricket::IceConnectionState state);
+  void OnTransportControllerReceiving(bool receiving);
+  void OnTransportControllerGatheringState(cricket::IceGatheringState state);
+  void OnTransportControllerCandidatesGathered(
+      const std::string& transport_name,
+      const cricket::Candidates& candidates);
+
+  std::string GetSessionErrorMsg();
+
+  // Invoked when TransportController connection completion is signaled.
+  // Reports stats for all transports in use.
+  void ReportTransportStats();
+
+  // Gather the usage of IPv4/IPv6 as best connection.
+  void ReportBestConnectionState(const cricket::TransportStats& stats);
+
+  void ReportNegotiatedCiphers(const cricket::TransportStats& stats);
+
+  void OnSentPacket_w(cricket::TransportChannel* channel,
+                      const rtc::SentPacket& sent_packet);
+
+  rtc::Thread* const signaling_thread_;
+  rtc::Thread* const worker_thread_;
+  cricket::PortAllocator* const port_allocator_;
+
+  State state_ = STATE_INIT;
+  Error error_ = ERROR_NONE;
+  std::string error_desc_;
+
+  const std::string sid_;
+  bool initial_offerer_ = false;
+
+  rtc::scoped_ptr<cricket::TransportController> transport_controller_;
+  MediaControllerInterface* media_controller_;
+  rtc::scoped_ptr<cricket::VoiceChannel> voice_channel_;
+  rtc::scoped_ptr<cricket::VideoChannel> video_channel_;
+  rtc::scoped_ptr<cricket::DataChannel> data_channel_;
+  cricket::ChannelManager* channel_manager_;
+  IceObserver* ice_observer_;
+  PeerConnectionInterface::IceConnectionState ice_connection_state_;
+  bool ice_connection_receiving_;
+  rtc::scoped_ptr<SessionDescriptionInterface> local_desc_;
+  rtc::scoped_ptr<SessionDescriptionInterface> remote_desc_;
+  // If the remote peer is using a older version of implementation.
+  bool older_version_remote_peer_;
+  bool dtls_enabled_;
+  // Specifies which kind of data channel is allowed. This is controlled
+  // by the chrome command-line flag and constraints:
+  // 1. If chrome command-line switch 'enable-sctp-data-channels' is enabled,
+  // constraint kEnableDtlsSrtp is true, and constaint kEnableRtpDataChannels is
+  // not set or false, SCTP is allowed (DCT_SCTP);
+  // 2. If constraint kEnableRtpDataChannels is true, RTP is allowed (DCT_RTP);
+  // 3. If both 1&2 are false, data channel is not allowed (DCT_NONE).
+  cricket::DataChannelType data_channel_type_;
+  rtc::scoped_ptr<IceRestartAnswerLatch> ice_restart_latch_;
+
+  rtc::scoped_ptr<WebRtcSessionDescriptionFactory>
+      webrtc_session_desc_factory_;
+
+  // Member variables for caching global options.
+  cricket::AudioOptions audio_options_;
+  cricket::VideoOptions video_options_;
+  MetricsObserverInterface* metrics_observer_;
+
+  // Declares the bundle policy for the WebRTCSession.
+  PeerConnectionInterface::BundlePolicy bundle_policy_;
+
+  // Declares the RTCP mux policy for the WebRTCSession.
+  PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSession);
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_WEBRTCSESSION_H_
diff --git a/webrtc/api/webrtcsession_unittest.cc b/webrtc/api/webrtcsession_unittest.cc
new file mode 100644
index 0000000..250e415
--- /dev/null
+++ b/webrtc/api/webrtcsession_unittest.cc
@@ -0,0 +1,4302 @@
+/*
+ * libjingle
+ * Copyright 2012 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <utility>
+#include <vector>
+
+#include "talk/session/media/channelmanager.h"
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/fakemediacontroller.h"
+#include "webrtc/api/fakemetricsobserver.h"
+#include "webrtc/api/jsepicecandidate.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "webrtc/api/peerconnection.h"
+#include "webrtc/api/sctputils.h"
+#include "webrtc/api/streamcollection.h"
+#include "webrtc/api/streamcollection.h"
+#include "webrtc/api/test/fakeconstraints.h"
+#include "webrtc/api/test/fakedtlsidentitystore.h"
+#include "webrtc/api/videotrack.h"
+#include "webrtc/api/webrtcsession.h"
+#include "webrtc/api/webrtcsessiondescriptionfactory.h"
+#include "webrtc/base/fakenetwork.h"
+#include "webrtc/base/firewallsocketserver.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/network.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/virtualsocketserver.h"
+#include "webrtc/media/base/fakemediaengine.h"
+#include "webrtc/media/base/fakevideorenderer.h"
+#include "webrtc/media/base/mediachannel.h"
+#include "webrtc/media/webrtc/fakewebrtccall.h"
+#include "webrtc/p2p/base/stunserver.h"
+#include "webrtc/p2p/base/teststunserver.h"
+#include "webrtc/p2p/base/testturnserver.h"
+#include "webrtc/p2p/base/transportchannel.h"
+#include "webrtc/p2p/client/basicportallocator.h"
+
+#define MAYBE_SKIP_TEST(feature)                    \
+  if (!(feature())) {                               \
+    LOG(LS_INFO) << "Feature disabled... skipping"; \
+    return;                                         \
+  }
+
+using cricket::FakeVoiceMediaChannel;
+using cricket::TransportInfo;
+using rtc::SocketAddress;
+using rtc::scoped_ptr;
+using rtc::Thread;
+using webrtc::CreateSessionDescription;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::CreateSessionDescriptionRequest;
+using webrtc::DataChannel;
+using webrtc::DtlsIdentityStoreInterface;
+using webrtc::FakeConstraints;
+using webrtc::FakeMetricsObserver;
+using webrtc::IceCandidateCollection;
+using webrtc::InternalDataChannelInit;
+using webrtc::JsepIceCandidate;
+using webrtc::JsepSessionDescription;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SessionStats;
+using webrtc::StreamCollection;
+using webrtc::WebRtcSession;
+using webrtc::kBundleWithoutRtcpMux;
+using webrtc::kCreateChannelFailed;
+using webrtc::kInvalidSdp;
+using webrtc::kMlineMismatch;
+using webrtc::kPushDownTDFailed;
+using webrtc::kSdpWithoutIceUfragPwd;
+using webrtc::kSdpWithoutDtlsFingerprint;
+using webrtc::kSdpWithoutSdesCrypto;
+using webrtc::kSessionError;
+using webrtc::kSessionErrorDesc;
+using webrtc::kMaxUnsignalledRecvStreams;
+
+typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+
+static const int kClientAddrPort = 0;
+static const char kClientAddrHost1[] = "11.11.11.11";
+static const char kClientIPv6AddrHost1[] =
+    "2620:0:aaaa:bbbb:cccc:dddd:eeee:ffff";
+static const char kClientAddrHost2[] = "22.22.22.22";
+static const char kStunAddrHost[] = "99.99.99.1";
+static const SocketAddress kTurnUdpIntAddr("99.99.99.4", 3478);
+static const SocketAddress kTurnUdpExtAddr("99.99.99.6", 0);
+static const char kTurnUsername[] = "test";
+static const char kTurnPassword[] = "test";
+
+static const char kSessionVersion[] = "1";
+
+// Media index of candidates belonging to the first media content.
+static const int kMediaContentIndex0 = 0;
+static const char kMediaContentName0[] = "audio";
+
+// Media index of candidates belonging to the second media content.
+static const int kMediaContentIndex1 = 1;
+static const char kMediaContentName1[] = "video";
+
+static const int kIceCandidatesTimeout = 10000;
+
+static const char kFakeDtlsFingerprint[] =
+    "BB:CD:72:F7:2F:D0:BA:43:F3:68:B1:0C:23:72:B6:4A:"
+    "0F:DE:34:06:BC:E0:FE:01:BC:73:C8:6D:F4:65:D5:24";
+
+static const char kTooLongIceUfragPwd[] =
+    "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag"
+    "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag"
+    "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag"
+    "IceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfragIceUfrag";
+
+static const char kSdpWithRtx[] =
+    "v=0\r\n"
+    "o=- 4104004319237231850 2 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=msid-semantic: WMS stream1\r\n"
+    "m=video 9 RTP/SAVPF 0 96\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
+    "a=ice-ufrag:CerjGp19G7wpXwl7\r\n"
+    "a=ice-pwd:cMvOlFvQ6ochez1ZOoC2uBEC\r\n"
+    "a=mid:video\r\n"
+    "a=sendrecv\r\n"
+    "a=rtcp-mux\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:5/4N5CDvMiyDArHtBByUM71VIkguH17ZNoX60GrA\r\n"
+    "a=rtpmap:0 fake_video_codec/90000\r\n"
+    "a=rtpmap:96 rtx/90000\r\n"
+    "a=fmtp:96 apt=0\r\n";
+
+static const char kStream1[] = "stream1";
+static const char kVideoTrack1[] = "video1";
+static const char kAudioTrack1[] = "audio1";
+
+static const char kStream2[] = "stream2";
+static const char kVideoTrack2[] = "video2";
+static const char kAudioTrack2[] = "audio2";
+
+enum RTCCertificateGenerationMethod { ALREADY_GENERATED, DTLS_IDENTITY_STORE };
+
+class MockIceObserver : public webrtc::IceObserver {
+ public:
+  MockIceObserver()
+      : oncandidatesready_(false),
+        ice_connection_state_(PeerConnectionInterface::kIceConnectionNew),
+        ice_gathering_state_(PeerConnectionInterface::kIceGatheringNew) {
+  }
+
+  void OnIceConnectionChange(
+      PeerConnectionInterface::IceConnectionState new_state) override {
+    ice_connection_state_ = new_state;
+  }
+  void OnIceGatheringChange(
+      PeerConnectionInterface::IceGatheringState new_state) override {
+    // We can never transition back to "new".
+    EXPECT_NE(PeerConnectionInterface::kIceGatheringNew, new_state);
+    ice_gathering_state_ = new_state;
+    oncandidatesready_ =
+        new_state == PeerConnectionInterface::kIceGatheringComplete;
+  }
+
+  // Found a new candidate.
+  void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override {
+    switch (candidate->sdp_mline_index()) {
+      case kMediaContentIndex0:
+        mline_0_candidates_.push_back(candidate->candidate());
+        break;
+      case kMediaContentIndex1:
+        mline_1_candidates_.push_back(candidate->candidate());
+        break;
+      default:
+        ASSERT(false);
+    }
+
+    // The ICE gathering state should always be Gathering when a candidate is
+    // received (or possibly Completed in the case of the final candidate).
+    EXPECT_NE(PeerConnectionInterface::kIceGatheringNew, ice_gathering_state_);
+  }
+
+  bool oncandidatesready_;
+  std::vector<cricket::Candidate> mline_0_candidates_;
+  std::vector<cricket::Candidate> mline_1_candidates_;
+  PeerConnectionInterface::IceConnectionState ice_connection_state_;
+  PeerConnectionInterface::IceGatheringState ice_gathering_state_;
+};
+
+class WebRtcSessionForTest : public webrtc::WebRtcSession {
+ public:
+  WebRtcSessionForTest(webrtc::MediaControllerInterface* media_controller,
+                       rtc::Thread* signaling_thread,
+                       rtc::Thread* worker_thread,
+                       cricket::PortAllocator* port_allocator,
+                       webrtc::IceObserver* ice_observer)
+      : WebRtcSession(media_controller,
+                      signaling_thread,
+                      worker_thread,
+                      port_allocator) {
+    RegisterIceObserver(ice_observer);
+  }
+  virtual ~WebRtcSessionForTest() {}
+
+  // Note that these methods are only safe to use if the signaling thread
+  // is the same as the worker thread
+  cricket::TransportChannel* voice_rtp_transport_channel() {
+    return rtp_transport_channel(voice_channel());
+  }
+
+  cricket::TransportChannel* voice_rtcp_transport_channel() {
+    return rtcp_transport_channel(voice_channel());
+  }
+
+  cricket::TransportChannel* video_rtp_transport_channel() {
+    return rtp_transport_channel(video_channel());
+  }
+
+  cricket::TransportChannel* video_rtcp_transport_channel() {
+    return rtcp_transport_channel(video_channel());
+  }
+
+  cricket::TransportChannel* data_rtp_transport_channel() {
+    return rtp_transport_channel(data_channel());
+  }
+
+  cricket::TransportChannel* data_rtcp_transport_channel() {
+    return rtcp_transport_channel(data_channel());
+  }
+
+  using webrtc::WebRtcSession::SetAudioPlayout;
+  using webrtc::WebRtcSession::SetAudioSend;
+  using webrtc::WebRtcSession::SetCaptureDevice;
+  using webrtc::WebRtcSession::SetVideoPlayout;
+  using webrtc::WebRtcSession::SetVideoSend;
+
+ private:
+  cricket::TransportChannel* rtp_transport_channel(cricket::BaseChannel* ch) {
+    if (!ch) {
+      return nullptr;
+    }
+    return ch->transport_channel();
+  }
+
+  cricket::TransportChannel* rtcp_transport_channel(cricket::BaseChannel* ch) {
+    if (!ch) {
+      return nullptr;
+    }
+    return ch->rtcp_transport_channel();
+  }
+};
+
+class WebRtcSessionCreateSDPObserverForTest
+    : public rtc::RefCountedObject<CreateSessionDescriptionObserver> {
+ public:
+  enum State {
+    kInit,
+    kFailed,
+    kSucceeded,
+  };
+  WebRtcSessionCreateSDPObserverForTest() : state_(kInit) {}
+
+  // CreateSessionDescriptionObserver implementation.
+  virtual void OnSuccess(SessionDescriptionInterface* desc) {
+    description_.reset(desc);
+    state_ = kSucceeded;
+  }
+  virtual void OnFailure(const std::string& error) {
+    state_ = kFailed;
+  }
+
+  SessionDescriptionInterface* description() { return description_.get(); }
+
+  SessionDescriptionInterface* ReleaseDescription() {
+    return description_.release();
+  }
+
+  State state() const { return state_; }
+
+ protected:
+  ~WebRtcSessionCreateSDPObserverForTest() {}
+
+ private:
+  rtc::scoped_ptr<SessionDescriptionInterface> description_;
+  State state_;
+};
+
+class FakeAudioRenderer : public cricket::AudioRenderer {
+ public:
+  FakeAudioRenderer() : sink_(NULL) {}
+  virtual ~FakeAudioRenderer() {
+    if (sink_)
+      sink_->OnClose();
+  }
+
+  void SetSink(Sink* sink) override { sink_ = sink; }
+
+  cricket::AudioRenderer::Sink* sink() const { return sink_; }
+ private:
+  cricket::AudioRenderer::Sink* sink_;
+};
+
+class WebRtcSessionTest
+    : public testing::TestWithParam<RTCCertificateGenerationMethod>,
+      public sigslot::has_slots<> {
+ protected:
+  // TODO Investigate why ChannelManager crashes, if it's created
+  // after stun_server.
+  WebRtcSessionTest()
+      : media_engine_(new cricket::FakeMediaEngine()),
+        data_engine_(new cricket::FakeDataEngine()),
+        channel_manager_(
+            new cricket::ChannelManager(media_engine_,
+                                        data_engine_,
+                                        new cricket::CaptureManager(),
+                                        rtc::Thread::Current())),
+        fake_call_(webrtc::Call::Config()),
+        media_controller_(
+            webrtc::MediaControllerInterface::Create(rtc::Thread::Current(),
+                                                     channel_manager_.get())),
+        tdesc_factory_(new cricket::TransportDescriptionFactory()),
+        desc_factory_(
+            new cricket::MediaSessionDescriptionFactory(channel_manager_.get(),
+                                                        tdesc_factory_.get())),
+        pss_(new rtc::PhysicalSocketServer),
+        vss_(new rtc::VirtualSocketServer(pss_.get())),
+        fss_(new rtc::FirewallSocketServer(vss_.get())),
+        ss_scope_(fss_.get()),
+        stun_socket_addr_(
+            rtc::SocketAddress(kStunAddrHost, cricket::STUN_SERVER_PORT)),
+        stun_server_(cricket::TestStunServer::Create(Thread::Current(),
+                                                     stun_socket_addr_)),
+        turn_server_(Thread::Current(), kTurnUdpIntAddr, kTurnUdpExtAddr),
+        metrics_observer_(new rtc::RefCountedObject<FakeMetricsObserver>()) {
+    cricket::ServerAddresses stun_servers;
+    stun_servers.insert(stun_socket_addr_);
+    allocator_.reset(new cricket::BasicPortAllocator(
+        &network_manager_,
+        stun_servers,
+        SocketAddress(), SocketAddress(), SocketAddress()));
+    allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+                          cricket::PORTALLOCATOR_DISABLE_RELAY);
+    EXPECT_TRUE(channel_manager_->Init());
+    desc_factory_->set_add_legacy_streams(false);
+    allocator_->set_step_delay(cricket::kMinimumStepDelay);
+  }
+
+  void AddInterface(const SocketAddress& addr) {
+    network_manager_.AddInterface(addr);
+  }
+
+  // If |dtls_identity_store| != null or |rtc_configuration| contains
+  // |certificates| then DTLS will be enabled unless explicitly disabled by
+  // |rtc_configuration| options. When DTLS is enabled a certificate will be
+  // used if provided, otherwise one will be generated using the
+  // |dtls_identity_store|.
+  void Init(
+      rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store,
+      const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
+    ASSERT_TRUE(session_.get() == NULL);
+    session_.reset(new WebRtcSessionForTest(
+        media_controller_.get(), rtc::Thread::Current(), rtc::Thread::Current(),
+        allocator_.get(), &observer_));
+    session_->SignalDataChannelOpenMessage.connect(
+        this, &WebRtcSessionTest::OnDataChannelOpenMessage);
+    session_->GetOnDestroyedSignal()->connect(
+        this, &WebRtcSessionTest::OnSessionDestroyed);
+
+    EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
+        observer_.ice_connection_state_);
+    EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
+        observer_.ice_gathering_state_);
+
+    EXPECT_TRUE(session_->Initialize(options_, constraints_.get(),
+                                     std::move(dtls_identity_store),
+                                     rtc_configuration));
+    session_->set_metrics_observer(metrics_observer_);
+  }
+
+  void OnDataChannelOpenMessage(const std::string& label,
+                                const InternalDataChannelInit& config) {
+    last_data_channel_label_ = label;
+    last_data_channel_config_ = config;
+  }
+
+  void OnSessionDestroyed() { session_destroyed_ = true; }
+
+  void Init() {
+    PeerConnectionInterface::RTCConfiguration configuration;
+    Init(nullptr, configuration);
+  }
+
+  void InitWithIceTransport(
+      PeerConnectionInterface::IceTransportsType ice_transport_type) {
+    PeerConnectionInterface::RTCConfiguration configuration;
+    configuration.type = ice_transport_type;
+    Init(nullptr, configuration);
+  }
+
+  void InitWithBundlePolicy(
+      PeerConnectionInterface::BundlePolicy bundle_policy) {
+    PeerConnectionInterface::RTCConfiguration configuration;
+    configuration.bundle_policy = bundle_policy;
+    Init(nullptr, configuration);
+  }
+
+  void InitWithRtcpMuxPolicy(
+      PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy) {
+    PeerConnectionInterface::RTCConfiguration configuration;
+    configuration.rtcp_mux_policy = rtcp_mux_policy;
+    Init(nullptr, configuration);
+  }
+
+  // Successfully init with DTLS; with a certificate generated and supplied or
+  // with a store that generates it for us.
+  void InitWithDtls(RTCCertificateGenerationMethod cert_gen_method) {
+    rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store;
+    PeerConnectionInterface::RTCConfiguration configuration;
+    if (cert_gen_method == ALREADY_GENERATED) {
+      configuration.certificates.push_back(
+          FakeDtlsIdentityStore::GenerateCertificate());
+    } else if (cert_gen_method == DTLS_IDENTITY_STORE) {
+      dtls_identity_store.reset(new FakeDtlsIdentityStore());
+      dtls_identity_store->set_should_fail(false);
+    } else {
+      RTC_CHECK(false);
+    }
+    Init(std::move(dtls_identity_store), configuration);
+  }
+
+  // Init with DTLS with a store that will fail to generate a certificate.
+  void InitWithDtlsIdentityGenFail() {
+    rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+        new FakeDtlsIdentityStore());
+    dtls_identity_store->set_should_fail(true);
+    PeerConnectionInterface::RTCConfiguration configuration;
+    Init(std::move(dtls_identity_store), configuration);
+  }
+
+  void InitWithDtmfCodec() {
+    // Add kTelephoneEventCodec for dtmf test.
+    const cricket::AudioCodec kTelephoneEventCodec(
+        106, "telephone-event", 8000, 0, 1, 0);
+    std::vector<cricket::AudioCodec> codecs;
+    codecs.push_back(kTelephoneEventCodec);
+    media_engine_->SetAudioCodecs(codecs);
+    desc_factory_->set_audio_codecs(codecs);
+    Init();
+  }
+
+  void SendAudioVideoStream1() {
+    send_stream_1_ = true;
+    send_stream_2_ = false;
+    send_audio_ = true;
+    send_video_ = true;
+  }
+
+  void SendAudioVideoStream2() {
+    send_stream_1_ = false;
+    send_stream_2_ = true;
+    send_audio_ = true;
+    send_video_ = true;
+  }
+
+  void SendAudioVideoStream1And2() {
+    send_stream_1_ = true;
+    send_stream_2_ = true;
+    send_audio_ = true;
+    send_video_ = true;
+  }
+
+  void SendNothing() {
+    send_stream_1_ = false;
+    send_stream_2_ = false;
+    send_audio_ = false;
+    send_video_ = false;
+  }
+
+  void SendAudioOnlyStream2() {
+    send_stream_1_ = false;
+    send_stream_2_ = true;
+    send_audio_ = true;
+    send_video_ = false;
+  }
+
+  void SendVideoOnlyStream2() {
+    send_stream_1_ = false;
+    send_stream_2_ = true;
+    send_audio_ = false;
+    send_video_ = true;
+  }
+
+  void AddStreamsToOptions(cricket::MediaSessionOptions* session_options) {
+    if (send_stream_1_ && send_audio_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrack1,
+                                     kStream1);
+    }
+    if (send_stream_1_ && send_video_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrack1,
+                                     kStream1);
+    }
+    if (send_stream_2_ && send_audio_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrack2,
+                                     kStream2);
+    }
+    if (send_stream_2_ && send_video_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrack2,
+                                     kStream2);
+    }
+    if (data_channel_ && session_->data_channel_type() == cricket::DCT_RTP) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_DATA,
+                                     data_channel_->label(),
+                                     data_channel_->label());
+    }
+  }
+
+  void GetOptionsForOffer(
+      const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+      cricket::MediaSessionOptions* session_options) {
+    ASSERT_TRUE(ConvertRtcOptionsForOffer(rtc_options, session_options));
+
+    AddStreamsToOptions(session_options);
+    if (rtc_options.offer_to_receive_audio ==
+        RTCOfferAnswerOptions::kUndefined) {
+      session_options->recv_audio =
+          session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO);
+    }
+    if (rtc_options.offer_to_receive_video ==
+        RTCOfferAnswerOptions::kUndefined) {
+      session_options->recv_video =
+          session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO);
+    }
+    session_options->bundle_enabled =
+        session_options->bundle_enabled &&
+        (session_options->has_audio() || session_options->has_video() ||
+         session_options->has_data());
+
+    if (session_->data_channel_type() == cricket::DCT_SCTP && data_channel_) {
+      session_options->data_channel_type = cricket::DCT_SCTP;
+    }
+  }
+
+  void GetOptionsForAnswer(const webrtc::MediaConstraintsInterface* constraints,
+                           cricket::MediaSessionOptions* session_options) {
+    session_options->recv_audio = false;
+    session_options->recv_video = false;
+    ASSERT_TRUE(ParseConstraintsForAnswer(constraints, session_options));
+
+    AddStreamsToOptions(session_options);
+    session_options->bundle_enabled =
+        session_options->bundle_enabled &&
+        (session_options->has_audio() || session_options->has_video() ||
+         session_options->has_data());
+
+    if (session_->data_channel_type() == cricket::DCT_SCTP) {
+      session_options->data_channel_type = cricket::DCT_SCTP;
+    }
+  }
+
+  // Creates a local offer and applies it. Starts ICE.
+  // Call SendAudioVideoStreamX() before this function
+  // to decide which streams to create.
+  void InitiateCall() {
+    SessionDescriptionInterface* offer = CreateOffer();
+    SetLocalDescriptionWithoutError(offer);
+    EXPECT_TRUE_WAIT(PeerConnectionInterface::kIceGatheringNew !=
+        observer_.ice_gathering_state_,
+        kIceCandidatesTimeout);
+  }
+
+  SessionDescriptionInterface* CreateOffer() {
+    PeerConnectionInterface::RTCOfferAnswerOptions options;
+    options.offer_to_receive_audio =
+        RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+    return CreateOffer(options);
+  }
+
+  SessionDescriptionInterface* CreateOffer(
+      const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
+    rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest>
+        observer = new WebRtcSessionCreateSDPObserverForTest();
+    cricket::MediaSessionOptions session_options;
+    GetOptionsForOffer(options, &session_options);
+    session_->CreateOffer(observer, options, session_options);
+    EXPECT_TRUE_WAIT(
+        observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
+        2000);
+    return observer->ReleaseDescription();
+  }
+
+  SessionDescriptionInterface* CreateAnswer(
+      const webrtc::MediaConstraintsInterface* constraints) {
+    rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest> observer
+        = new WebRtcSessionCreateSDPObserverForTest();
+    cricket::MediaSessionOptions session_options;
+    GetOptionsForAnswer(constraints, &session_options);
+    session_->CreateAnswer(observer, constraints, session_options);
+    EXPECT_TRUE_WAIT(
+        observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
+        2000);
+    return observer->ReleaseDescription();
+  }
+
+  bool ChannelsExist() const {
+    return (session_->voice_channel() != NULL &&
+            session_->video_channel() != NULL);
+  }
+
+  void VerifyCryptoParams(const cricket::SessionDescription* sdp) {
+    ASSERT_TRUE(session_.get() != NULL);
+    const cricket::ContentInfo* content = cricket::GetFirstAudioContent(sdp);
+    ASSERT_TRUE(content != NULL);
+    const cricket::AudioContentDescription* audio_content =
+        static_cast<const cricket::AudioContentDescription*>(
+            content->description);
+    ASSERT_TRUE(audio_content != NULL);
+    ASSERT_EQ(1U, audio_content->cryptos().size());
+    ASSERT_EQ(47U, audio_content->cryptos()[0].key_params.size());
+    ASSERT_EQ("AES_CM_128_HMAC_SHA1_80",
+              audio_content->cryptos()[0].cipher_suite);
+    EXPECT_EQ(std::string(cricket::kMediaProtocolSavpf),
+              audio_content->protocol());
+
+    content = cricket::GetFirstVideoContent(sdp);
+    ASSERT_TRUE(content != NULL);
+    const cricket::VideoContentDescription* video_content =
+        static_cast<const cricket::VideoContentDescription*>(
+            content->description);
+    ASSERT_TRUE(video_content != NULL);
+    ASSERT_EQ(1U, video_content->cryptos().size());
+    ASSERT_EQ("AES_CM_128_HMAC_SHA1_80",
+              video_content->cryptos()[0].cipher_suite);
+    ASSERT_EQ(47U, video_content->cryptos()[0].key_params.size());
+    EXPECT_EQ(std::string(cricket::kMediaProtocolSavpf),
+              video_content->protocol());
+  }
+
+  void VerifyNoCryptoParams(const cricket::SessionDescription* sdp, bool dtls) {
+    const cricket::ContentInfo* content = cricket::GetFirstAudioContent(sdp);
+    ASSERT_TRUE(content != NULL);
+    const cricket::AudioContentDescription* audio_content =
+        static_cast<const cricket::AudioContentDescription*>(
+            content->description);
+    ASSERT_TRUE(audio_content != NULL);
+    ASSERT_EQ(0U, audio_content->cryptos().size());
+
+    content = cricket::GetFirstVideoContent(sdp);
+    ASSERT_TRUE(content != NULL);
+    const cricket::VideoContentDescription* video_content =
+        static_cast<const cricket::VideoContentDescription*>(
+            content->description);
+    ASSERT_TRUE(video_content != NULL);
+    ASSERT_EQ(0U, video_content->cryptos().size());
+
+    if (dtls) {
+      EXPECT_EQ(std::string(cricket::kMediaProtocolDtlsSavpf),
+                audio_content->protocol());
+      EXPECT_EQ(std::string(cricket::kMediaProtocolDtlsSavpf),
+                video_content->protocol());
+    } else {
+      EXPECT_EQ(std::string(cricket::kMediaProtocolAvpf),
+                audio_content->protocol());
+      EXPECT_EQ(std::string(cricket::kMediaProtocolAvpf),
+                video_content->protocol());
+    }
+  }
+
+  // Set the internal fake description factories to do DTLS-SRTP.
+  void SetFactoryDtlsSrtp() {
+    desc_factory_->set_secure(cricket::SEC_DISABLED);
+    std::string identity_name = "WebRTC" +
+        rtc::ToString(rtc::CreateRandomId());
+    // Confirmed to work with KT_RSA and KT_ECDSA.
+    tdesc_factory_->set_certificate(
+        rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+            rtc::SSLIdentity::Generate(identity_name, rtc::KT_DEFAULT))));
+    tdesc_factory_->set_secure(cricket::SEC_REQUIRED);
+  }
+
+  void VerifyFingerprintStatus(const cricket::SessionDescription* sdp,
+                               bool expected) {
+    const TransportInfo* audio = sdp->GetTransportInfoByName("audio");
+    ASSERT_TRUE(audio != NULL);
+    ASSERT_EQ(expected, audio->description.identity_fingerprint.get() != NULL);
+    const TransportInfo* video = sdp->GetTransportInfoByName("video");
+    ASSERT_TRUE(video != NULL);
+    ASSERT_EQ(expected, video->description.identity_fingerprint.get() != NULL);
+  }
+
+  void VerifyAnswerFromNonCryptoOffer() {
+    // Create an SDP without Crypto.
+    cricket::MediaSessionOptions options;
+    options.recv_video = true;
+    JsepSessionDescription* offer(
+        CreateRemoteOffer(options, cricket::SEC_DISABLED));
+    ASSERT_TRUE(offer != NULL);
+    VerifyNoCryptoParams(offer->description(), false);
+    SetRemoteDescriptionOfferExpectError(kSdpWithoutSdesCrypto,
+                                         offer);
+    const webrtc::SessionDescriptionInterface* answer = CreateAnswer(NULL);
+    // Answer should be NULL as no crypto params in offer.
+    ASSERT_TRUE(answer == NULL);
+  }
+
+  void VerifyAnswerFromCryptoOffer() {
+    cricket::MediaSessionOptions options;
+    options.recv_video = true;
+    options.bundle_enabled = true;
+    scoped_ptr<JsepSessionDescription> offer(
+        CreateRemoteOffer(options, cricket::SEC_REQUIRED));
+    ASSERT_TRUE(offer.get() != NULL);
+    VerifyCryptoParams(offer->description());
+    SetRemoteDescriptionWithoutError(offer.release());
+    scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+    ASSERT_TRUE(answer.get() != NULL);
+    VerifyCryptoParams(answer->description());
+  }
+
+  void CompareIceUfragAndPassword(const cricket::SessionDescription* desc1,
+                                  const cricket::SessionDescription* desc2,
+                                  bool expect_equal) {
+    if (desc1->contents().size() != desc2->contents().size()) {
+      EXPECT_FALSE(expect_equal);
+      return;
+    }
+
+    const cricket::ContentInfos& contents = desc1->contents();
+    cricket::ContentInfos::const_iterator it = contents.begin();
+
+    for (; it != contents.end(); ++it) {
+      const cricket::TransportDescription* transport_desc1 =
+          desc1->GetTransportDescriptionByName(it->name);
+      const cricket::TransportDescription* transport_desc2 =
+          desc2->GetTransportDescriptionByName(it->name);
+      if (!transport_desc1 || !transport_desc2) {
+        EXPECT_FALSE(expect_equal);
+        return;
+      }
+      if (transport_desc1->ice_pwd != transport_desc2->ice_pwd ||
+          transport_desc1->ice_ufrag != transport_desc2->ice_ufrag) {
+        EXPECT_FALSE(expect_equal);
+        return;
+      }
+    }
+    EXPECT_TRUE(expect_equal);
+  }
+
+  void RemoveIceUfragPwdLines(const SessionDescriptionInterface* current_desc,
+                              std::string *sdp) {
+    const cricket::SessionDescription* desc = current_desc->description();
+    EXPECT_TRUE(current_desc->ToString(sdp));
+
+    const cricket::ContentInfos& contents = desc->contents();
+    cricket::ContentInfos::const_iterator it = contents.begin();
+    // Replace ufrag and pwd lines with empty strings.
+    for (; it != contents.end(); ++it) {
+      const cricket::TransportDescription* transport_desc =
+          desc->GetTransportDescriptionByName(it->name);
+      std::string ufrag_line = "a=ice-ufrag:" + transport_desc->ice_ufrag
+          + "\r\n";
+      std::string pwd_line = "a=ice-pwd:" + transport_desc->ice_pwd
+          + "\r\n";
+      rtc::replace_substrs(ufrag_line.c_str(), ufrag_line.length(),
+                                 "", 0,
+                                 sdp);
+      rtc::replace_substrs(pwd_line.c_str(), pwd_line.length(),
+                                 "", 0,
+                                 sdp);
+    }
+  }
+
+  void ModifyIceUfragPwdLines(const SessionDescriptionInterface* current_desc,
+                              const std::string& modified_ice_ufrag,
+                              const std::string& modified_ice_pwd,
+                              std::string* sdp) {
+    const cricket::SessionDescription* desc = current_desc->description();
+    EXPECT_TRUE(current_desc->ToString(sdp));
+
+    const cricket::ContentInfos& contents = desc->contents();
+    cricket::ContentInfos::const_iterator it = contents.begin();
+    // Replace ufrag and pwd lines with |modified_ice_ufrag| and
+    // |modified_ice_pwd| strings.
+    for (; it != contents.end(); ++it) {
+      const cricket::TransportDescription* transport_desc =
+          desc->GetTransportDescriptionByName(it->name);
+      std::string ufrag_line = "a=ice-ufrag:" + transport_desc->ice_ufrag
+          + "\r\n";
+      std::string pwd_line = "a=ice-pwd:" + transport_desc->ice_pwd
+          + "\r\n";
+      std::string mod_ufrag = "a=ice-ufrag:" + modified_ice_ufrag + "\r\n";
+      std::string mod_pwd = "a=ice-pwd:" + modified_ice_pwd + "\r\n";
+      rtc::replace_substrs(ufrag_line.c_str(), ufrag_line.length(),
+                                 mod_ufrag.c_str(), mod_ufrag.length(),
+                                 sdp);
+      rtc::replace_substrs(pwd_line.c_str(), pwd_line.length(),
+                                 mod_pwd.c_str(), mod_pwd.length(),
+                                 sdp);
+    }
+  }
+
+  // Creates a remote offer and and applies it as a remote description,
+  // creates a local answer and applies is as a local description.
+  // Call SendAudioVideoStreamX() before this function
+  // to decide which local and remote streams to create.
+  void CreateAndSetRemoteOfferAndLocalAnswer() {
+    SessionDescriptionInterface* offer = CreateRemoteOffer();
+    SetRemoteDescriptionWithoutError(offer);
+    SessionDescriptionInterface* answer = CreateAnswer(NULL);
+    SetLocalDescriptionWithoutError(answer);
+  }
+  void SetLocalDescriptionWithoutError(SessionDescriptionInterface* desc) {
+    EXPECT_TRUE(session_->SetLocalDescription(desc, NULL));
+    session_->MaybeStartGathering();
+  }
+  void SetLocalDescriptionExpectState(SessionDescriptionInterface* desc,
+                                      WebRtcSession::State expected_state) {
+    SetLocalDescriptionWithoutError(desc);
+    EXPECT_EQ(expected_state, session_->state());
+  }
+  void SetLocalDescriptionExpectError(const std::string& action,
+                                      const std::string& expected_error,
+                                      SessionDescriptionInterface* desc) {
+    std::string error;
+    EXPECT_FALSE(session_->SetLocalDescription(desc, &error));
+    std::string sdp_type = "local ";
+    sdp_type.append(action);
+    EXPECT_NE(std::string::npos, error.find(sdp_type));
+    EXPECT_NE(std::string::npos, error.find(expected_error));
+  }
+  void SetLocalDescriptionOfferExpectError(const std::string& expected_error,
+                                           SessionDescriptionInterface* desc) {
+    SetLocalDescriptionExpectError(SessionDescriptionInterface::kOffer,
+                                   expected_error, desc);
+  }
+  void SetLocalDescriptionAnswerExpectError(const std::string& expected_error,
+                                            SessionDescriptionInterface* desc) {
+    SetLocalDescriptionExpectError(SessionDescriptionInterface::kAnswer,
+                                   expected_error, desc);
+  }
+  void SetRemoteDescriptionWithoutError(SessionDescriptionInterface* desc) {
+    EXPECT_TRUE(session_->SetRemoteDescription(desc, NULL));
+  }
+  void SetRemoteDescriptionExpectState(SessionDescriptionInterface* desc,
+                                       WebRtcSession::State expected_state) {
+    SetRemoteDescriptionWithoutError(desc);
+    EXPECT_EQ(expected_state, session_->state());
+  }
+  void SetRemoteDescriptionExpectError(const std::string& action,
+                                       const std::string& expected_error,
+                                       SessionDescriptionInterface* desc) {
+    std::string error;
+    EXPECT_FALSE(session_->SetRemoteDescription(desc, &error));
+    std::string sdp_type = "remote ";
+    sdp_type.append(action);
+    EXPECT_NE(std::string::npos, error.find(sdp_type));
+    EXPECT_NE(std::string::npos, error.find(expected_error));
+  }
+  void SetRemoteDescriptionOfferExpectError(
+      const std::string& expected_error, SessionDescriptionInterface* desc) {
+    SetRemoteDescriptionExpectError(SessionDescriptionInterface::kOffer,
+                                    expected_error, desc);
+  }
+  void SetRemoteDescriptionPranswerExpectError(
+      const std::string& expected_error, SessionDescriptionInterface* desc) {
+    SetRemoteDescriptionExpectError(SessionDescriptionInterface::kPrAnswer,
+                                    expected_error, desc);
+  }
+  void SetRemoteDescriptionAnswerExpectError(
+      const std::string& expected_error, SessionDescriptionInterface* desc) {
+    SetRemoteDescriptionExpectError(SessionDescriptionInterface::kAnswer,
+                                    expected_error, desc);
+  }
+
+  void CreateCryptoOfferAndNonCryptoAnswer(SessionDescriptionInterface** offer,
+      SessionDescriptionInterface** nocrypto_answer) {
+    // Create a SDP without Crypto.
+    cricket::MediaSessionOptions options;
+    options.recv_video = true;
+    options.bundle_enabled = true;
+    *offer = CreateRemoteOffer(options, cricket::SEC_ENABLED);
+    ASSERT_TRUE(*offer != NULL);
+    VerifyCryptoParams((*offer)->description());
+
+    *nocrypto_answer = CreateRemoteAnswer(*offer, options,
+                                          cricket::SEC_DISABLED);
+    EXPECT_TRUE(*nocrypto_answer != NULL);
+  }
+
+  void CreateDtlsOfferAndNonDtlsAnswer(SessionDescriptionInterface** offer,
+      SessionDescriptionInterface** nodtls_answer) {
+    cricket::MediaSessionOptions options;
+    options.recv_video = true;
+    options.bundle_enabled = true;
+
+    rtc::scoped_ptr<SessionDescriptionInterface> temp_offer(
+        CreateRemoteOffer(options, cricket::SEC_ENABLED));
+
+    *nodtls_answer =
+        CreateRemoteAnswer(temp_offer.get(), options, cricket::SEC_ENABLED);
+    EXPECT_TRUE(*nodtls_answer != NULL);
+    VerifyFingerprintStatus((*nodtls_answer)->description(), false);
+    VerifyCryptoParams((*nodtls_answer)->description());
+
+    SetFactoryDtlsSrtp();
+    *offer = CreateRemoteOffer(options, cricket::SEC_ENABLED);
+    ASSERT_TRUE(*offer != NULL);
+    VerifyFingerprintStatus((*offer)->description(), true);
+    VerifyCryptoParams((*offer)->description());
+  }
+
+  JsepSessionDescription* CreateRemoteOfferWithVersion(
+        cricket::MediaSessionOptions options,
+        cricket::SecurePolicy secure_policy,
+        const std::string& session_version,
+        const SessionDescriptionInterface* current_desc) {
+    std::string session_id = rtc::ToString(rtc::CreateRandomId64());
+    const cricket::SessionDescription* cricket_desc = NULL;
+    if (current_desc) {
+      cricket_desc = current_desc->description();
+      session_id = current_desc->session_id();
+    }
+
+    desc_factory_->set_secure(secure_policy);
+    JsepSessionDescription* offer(
+        new JsepSessionDescription(JsepSessionDescription::kOffer));
+    if (!offer->Initialize(desc_factory_->CreateOffer(options, cricket_desc),
+                           session_id, session_version)) {
+      delete offer;
+      offer = NULL;
+    }
+    return offer;
+  }
+  JsepSessionDescription* CreateRemoteOffer(
+      cricket::MediaSessionOptions options) {
+    return CreateRemoteOfferWithVersion(options, cricket::SEC_ENABLED,
+                                        kSessionVersion, NULL);
+  }
+  JsepSessionDescription* CreateRemoteOffer(
+      cricket::MediaSessionOptions options, cricket::SecurePolicy sdes_policy) {
+    return CreateRemoteOfferWithVersion(
+        options, sdes_policy, kSessionVersion, NULL);
+  }
+  JsepSessionDescription* CreateRemoteOffer(
+      cricket::MediaSessionOptions options,
+      const SessionDescriptionInterface* current_desc) {
+    return CreateRemoteOfferWithVersion(options, cricket::SEC_ENABLED,
+                                        kSessionVersion, current_desc);
+  }
+
+  JsepSessionDescription* CreateRemoteOfferWithSctpPort(
+      const char* sctp_stream_name, int new_port,
+      cricket::MediaSessionOptions options) {
+    options.data_channel_type = cricket::DCT_SCTP;
+    options.AddSendStream(cricket::MEDIA_TYPE_DATA, "datachannel",
+                          sctp_stream_name);
+    return ChangeSDPSctpPort(new_port, CreateRemoteOffer(options));
+  }
+
+  // Takes ownership of offer_basis (and deletes it).
+  JsepSessionDescription* ChangeSDPSctpPort(
+      int new_port, webrtc::SessionDescriptionInterface *offer_basis) {
+    // Stringify the input SDP, swap the 5000 for 'new_port' and create a new
+    // SessionDescription from the mutated string.
+    const char* default_port_str = "5000";
+    char new_port_str[16];
+    rtc::sprintfn(new_port_str, sizeof(new_port_str), "%d", new_port);
+    std::string offer_str;
+    offer_basis->ToString(&offer_str);
+    rtc::replace_substrs(default_port_str, strlen(default_port_str),
+                               new_port_str, strlen(new_port_str),
+                               &offer_str);
+    JsepSessionDescription* offer = new JsepSessionDescription(
+        offer_basis->type());
+    delete offer_basis;
+    offer->Initialize(offer_str, NULL);
+    return offer;
+  }
+
+  // Create a remote offer. Call SendAudioVideoStreamX()
+  // before this function to decide which streams to create.
+  JsepSessionDescription* CreateRemoteOffer() {
+    cricket::MediaSessionOptions options;
+    GetOptionsForAnswer(NULL, &options);
+    return CreateRemoteOffer(options, session_->remote_description());
+  }
+
+  JsepSessionDescription* CreateRemoteAnswer(
+      const SessionDescriptionInterface* offer,
+      cricket::MediaSessionOptions options,
+      cricket::SecurePolicy policy) {
+    desc_factory_->set_secure(policy);
+    const std::string session_id =
+        rtc::ToString(rtc::CreateRandomId64());
+    JsepSessionDescription* answer(
+        new JsepSessionDescription(JsepSessionDescription::kAnswer));
+    if (!answer->Initialize(desc_factory_->CreateAnswer(offer->description(),
+                                                        options, NULL),
+                            session_id, kSessionVersion)) {
+      delete answer;
+      answer = NULL;
+    }
+    return answer;
+  }
+
+  JsepSessionDescription* CreateRemoteAnswer(
+      const SessionDescriptionInterface* offer,
+      cricket::MediaSessionOptions options) {
+      return CreateRemoteAnswer(offer, options, cricket::SEC_REQUIRED);
+  }
+
+  // Creates an answer session description.
+  // Call SendAudioVideoStreamX() before this function
+  // to decide which streams to create.
+  JsepSessionDescription* CreateRemoteAnswer(
+      const SessionDescriptionInterface* offer) {
+    cricket::MediaSessionOptions options;
+    GetOptionsForAnswer(NULL, &options);
+    return CreateRemoteAnswer(offer, options, cricket::SEC_REQUIRED);
+  }
+
+  void TestSessionCandidatesWithBundleRtcpMux(bool bundle, bool rtcp_mux) {
+    AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+    Init();
+    SendAudioVideoStream1();
+
+    PeerConnectionInterface::RTCOfferAnswerOptions options;
+    options.use_rtp_mux = bundle;
+
+    SessionDescriptionInterface* offer = CreateOffer(options);
+    // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
+    // and answer.
+    SetLocalDescriptionWithoutError(offer);
+
+    rtc::scoped_ptr<SessionDescriptionInterface> answer(
+        CreateRemoteAnswer(session_->local_description()));
+    std::string sdp;
+    EXPECT_TRUE(answer->ToString(&sdp));
+
+    size_t expected_candidate_num = 2;
+    if (!rtcp_mux) {
+      // If rtcp_mux is enabled we should expect 4 candidates - host and srflex
+      // for rtp and rtcp.
+      expected_candidate_num = 4;
+      // Disable rtcp-mux from the answer
+      const std::string kRtcpMux = "a=rtcp-mux";
+      const std::string kXRtcpMux = "a=xrtcp-mux";
+      rtc::replace_substrs(kRtcpMux.c_str(), kRtcpMux.length(),
+                                 kXRtcpMux.c_str(), kXRtcpMux.length(),
+                                 &sdp);
+    }
+
+    SessionDescriptionInterface* new_answer = CreateSessionDescription(
+        JsepSessionDescription::kAnswer, sdp, NULL);
+
+    // SetRemoteDescription to enable rtcp mux.
+    SetRemoteDescriptionWithoutError(new_answer);
+    EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+    EXPECT_EQ(expected_candidate_num, observer_.mline_0_candidates_.size());
+    if (bundle) {
+      EXPECT_EQ(0, observer_.mline_1_candidates_.size());
+    } else {
+      EXPECT_EQ(expected_candidate_num, observer_.mline_1_candidates_.size());
+    }
+  }
+  // Tests that we can only send DTMF when the dtmf codec is supported.
+  void TestCanInsertDtmf(bool can) {
+    if (can) {
+      InitWithDtmfCodec();
+    } else {
+      Init();
+    }
+    SendAudioVideoStream1();
+    CreateAndSetRemoteOfferAndLocalAnswer();
+    EXPECT_FALSE(session_->CanInsertDtmf(""));
+    EXPECT_EQ(can, session_->CanInsertDtmf(kAudioTrack1));
+  }
+
+  // Helper class to configure loopback network and verify Best
+  // Connection using right IP protocol for TestLoopbackCall
+  // method. LoopbackNetworkManager applies firewall rules to block
+  // all ping traffic once ICE completed, and remove them to observe
+  // ICE reconnected again. This LoopbackNetworkConfiguration struct
+  // verifies the best connection is using the right IP protocol after
+  // initial ICE convergences.
+
+  class LoopbackNetworkConfiguration {
+   public:
+    LoopbackNetworkConfiguration()
+        : test_ipv6_network_(false),
+          test_extra_ipv4_network_(false),
+          best_connection_after_initial_ice_converged_(1, 0) {}
+
+    // Used to track the expected best connection count in each IP protocol.
+    struct ExpectedBestConnection {
+      ExpectedBestConnection(int ipv4_count, int ipv6_count)
+          : ipv4_count_(ipv4_count),
+            ipv6_count_(ipv6_count) {}
+
+      int ipv4_count_;
+      int ipv6_count_;
+    };
+
+    bool test_ipv6_network_;
+    bool test_extra_ipv4_network_;
+    ExpectedBestConnection best_connection_after_initial_ice_converged_;
+
+    void VerifyBestConnectionAfterIceConverge(
+        const rtc::scoped_refptr<FakeMetricsObserver> metrics_observer) const {
+      Verify(metrics_observer, best_connection_after_initial_ice_converged_);
+    }
+
+   private:
+    void Verify(const rtc::scoped_refptr<FakeMetricsObserver> metrics_observer,
+                const ExpectedBestConnection& expected) const {
+      EXPECT_EQ(
+          metrics_observer->GetEnumCounter(webrtc::kEnumCounterAddressFamily,
+                                           webrtc::kBestConnections_IPv4),
+          expected.ipv4_count_);
+      EXPECT_EQ(
+          metrics_observer->GetEnumCounter(webrtc::kEnumCounterAddressFamily,
+                                           webrtc::kBestConnections_IPv6),
+          expected.ipv6_count_);
+      // This is used in the loopback call so there is only single host to host
+      // candidate pair.
+      EXPECT_EQ(metrics_observer->GetEnumCounter(
+                    webrtc::kEnumCounterIceCandidatePairTypeUdp,
+                    webrtc::kIceCandidatePairHostHost),
+                0);
+      EXPECT_EQ(metrics_observer->GetEnumCounter(
+                    webrtc::kEnumCounterIceCandidatePairTypeUdp,
+                    webrtc::kIceCandidatePairHostPublicHostPublic),
+                1);
+    }
+  };
+
+  class LoopbackNetworkManager {
+   public:
+    LoopbackNetworkManager(WebRtcSessionTest* session,
+                           const LoopbackNetworkConfiguration& config)
+        : config_(config) {
+      session->AddInterface(
+          rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+      if (config_.test_extra_ipv4_network_) {
+        session->AddInterface(
+            rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+      }
+      if (config_.test_ipv6_network_) {
+        session->AddInterface(
+            rtc::SocketAddress(kClientIPv6AddrHost1, kClientAddrPort));
+      }
+    }
+
+    void ApplyFirewallRules(rtc::FirewallSocketServer* fss) {
+      fss->AddRule(false, rtc::FP_ANY, rtc::FD_ANY,
+                   rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+      if (config_.test_extra_ipv4_network_) {
+        fss->AddRule(false, rtc::FP_ANY, rtc::FD_ANY,
+                     rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+      }
+      if (config_.test_ipv6_network_) {
+        fss->AddRule(false, rtc::FP_ANY, rtc::FD_ANY,
+                     rtc::SocketAddress(kClientIPv6AddrHost1, kClientAddrPort));
+      }
+    }
+
+    void ClearRules(rtc::FirewallSocketServer* fss) { fss->ClearRules(); }
+
+   private:
+    LoopbackNetworkConfiguration config_;
+  };
+
+  // The method sets up a call from the session to itself, in a loopback
+  // arrangement.  It also uses a firewall rule to create a temporary
+  // disconnection, and then a permanent disconnection.
+  // This code is placed in a method so that it can be invoked
+  // by multiple tests with different allocators (e.g. with and without BUNDLE).
+  // While running the call, this method also checks if the session goes through
+  // the correct sequence of ICE states when a connection is established,
+  // broken, and re-established.
+  // The Connection state should go:
+  // New -> Checking -> (Connected) -> Completed -> Disconnected -> Completed
+  //     -> Failed.
+  // The Gathering state should go: New -> Gathering -> Completed.
+
+  void SetupLoopbackCall() {
+    Init();
+    SendAudioVideoStream1();
+    SessionDescriptionInterface* offer = CreateOffer();
+
+    EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
+              observer_.ice_gathering_state_);
+    SetLocalDescriptionWithoutError(offer);
+    EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
+              observer_.ice_connection_state_);
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceGatheringGathering,
+                   observer_.ice_gathering_state_, kIceCandidatesTimeout);
+    EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete,
+                   observer_.ice_gathering_state_, kIceCandidatesTimeout);
+
+    std::string sdp;
+    offer->ToString(&sdp);
+    SessionDescriptionInterface* desc = webrtc::CreateSessionDescription(
+        JsepSessionDescription::kAnswer, sdp, nullptr);
+    ASSERT_TRUE(desc != NULL);
+    SetRemoteDescriptionWithoutError(desc);
+
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking,
+                   observer_.ice_connection_state_, kIceCandidatesTimeout);
+
+    // The ice connection state is "Connected" too briefly to catch in a test.
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+                   observer_.ice_connection_state_, kIceCandidatesTimeout);
+  }
+
+  void TestLoopbackCall(const LoopbackNetworkConfiguration& config) {
+    LoopbackNetworkManager loopback_network_manager(this, config);
+    SetupLoopbackCall();
+    config.VerifyBestConnectionAfterIceConverge(metrics_observer_);
+    // Adding firewall rule to block ping requests, which should cause
+    // transport channel failure.
+
+    loopback_network_manager.ApplyFirewallRules(fss_.get());
+
+    LOG(LS_INFO) << "Firewall Rules applied";
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+                   observer_.ice_connection_state_,
+                   kIceCandidatesTimeout);
+
+    metrics_observer_->Reset();
+
+    // Clearing the rules, session should move back to completed state.
+    loopback_network_manager.ClearRules(fss_.get());
+
+    LOG(LS_INFO) << "Firewall Rules cleared";
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+                   observer_.ice_connection_state_,
+                   kIceCandidatesTimeout);
+
+    // Now we block ping requests and wait until the ICE connection transitions
+    // to the Failed state.  This will take at least 30 seconds because it must
+    // wait for the Port to timeout.
+    int port_timeout = 30000;
+
+    loopback_network_manager.ApplyFirewallRules(fss_.get());
+    LOG(LS_INFO) << "Firewall Rules applied again";
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected,
+                   observer_.ice_connection_state_,
+                   kIceCandidatesTimeout + port_timeout);
+  }
+
+  void TestLoopbackCall() {
+    LoopbackNetworkConfiguration config;
+    TestLoopbackCall(config);
+  }
+
+  void TestPacketOptions() {
+    media_controller_.reset(
+        new cricket::FakeMediaController(channel_manager_.get(), &fake_call_));
+    LoopbackNetworkConfiguration config;
+    LoopbackNetworkManager loopback_network_manager(this, config);
+
+    SetupLoopbackCall();
+
+    uint8_t test_packet[15] = {0};
+    rtc::PacketOptions options;
+    options.packet_id = 10;
+    media_engine_->GetVideoChannel(0)
+        ->SendRtp(test_packet, sizeof(test_packet), options);
+
+    const int kPacketTimeout = 2000;
+    EXPECT_EQ_WAIT(fake_call_.last_sent_packet().packet_id, 10, kPacketTimeout);
+    EXPECT_GT(fake_call_.last_sent_packet().send_time_ms, -1);
+  }
+
+  // Adds CN codecs to FakeMediaEngine and MediaDescriptionFactory.
+  void AddCNCodecs() {
+    const cricket::AudioCodec kCNCodec1(102, "CN", 8000, 0, 1, 0);
+    const cricket::AudioCodec kCNCodec2(103, "CN", 16000, 0, 1, 0);
+
+    // Add kCNCodec for dtmf test.
+    std::vector<cricket::AudioCodec> codecs = media_engine_->audio_codecs();;
+    codecs.push_back(kCNCodec1);
+    codecs.push_back(kCNCodec2);
+    media_engine_->SetAudioCodecs(codecs);
+    desc_factory_->set_audio_codecs(codecs);
+  }
+
+  bool VerifyNoCNCodecs(const cricket::ContentInfo* content) {
+    const cricket::ContentDescription* description = content->description;
+    ASSERT(description != NULL);
+    const cricket::AudioContentDescription* audio_content_desc =
+        static_cast<const cricket::AudioContentDescription*>(description);
+    ASSERT(audio_content_desc != NULL);
+    for (size_t i = 0; i < audio_content_desc->codecs().size(); ++i) {
+      if (audio_content_desc->codecs()[i].name == "CN")
+        return false;
+    }
+    return true;
+  }
+
+  void CreateDataChannel() {
+    webrtc::InternalDataChannelInit dci;
+    dci.reliable = session_->data_channel_type() == cricket::DCT_SCTP;
+    data_channel_ = DataChannel::Create(
+        session_.get(), session_->data_channel_type(), "datachannel", dci);
+  }
+
+  void SetLocalDescriptionWithDataChannel() {
+    CreateDataChannel();
+    SessionDescriptionInterface* offer = CreateOffer();
+    SetLocalDescriptionWithoutError(offer);
+  }
+
+  void VerifyMultipleAsyncCreateDescription(
+      RTCCertificateGenerationMethod cert_gen_method,
+      CreateSessionDescriptionRequest::Type type) {
+    InitWithDtls(cert_gen_method);
+    VerifyMultipleAsyncCreateDescriptionAfterInit(true, type);
+  }
+
+  void VerifyMultipleAsyncCreateDescriptionIdentityGenFailure(
+      CreateSessionDescriptionRequest::Type type) {
+    InitWithDtlsIdentityGenFail();
+    VerifyMultipleAsyncCreateDescriptionAfterInit(false, type);
+  }
+
+  void VerifyMultipleAsyncCreateDescriptionAfterInit(
+      bool success, CreateSessionDescriptionRequest::Type type) {
+    RTC_CHECK(session_);
+    SetFactoryDtlsSrtp();
+    if (type == CreateSessionDescriptionRequest::kAnswer) {
+      cricket::MediaSessionOptions options;
+      scoped_ptr<JsepSessionDescription> offer(
+            CreateRemoteOffer(options, cricket::SEC_DISABLED));
+      ASSERT_TRUE(offer.get() != NULL);
+      SetRemoteDescriptionWithoutError(offer.release());
+    }
+
+    PeerConnectionInterface::RTCOfferAnswerOptions options;
+    cricket::MediaSessionOptions session_options;
+    const int kNumber = 3;
+    rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest>
+        observers[kNumber];
+    for (int i = 0; i < kNumber; ++i) {
+      observers[i] = new WebRtcSessionCreateSDPObserverForTest();
+      if (type == CreateSessionDescriptionRequest::kOffer) {
+        session_->CreateOffer(observers[i], options, session_options);
+      } else {
+        session_->CreateAnswer(observers[i], nullptr, session_options);
+      }
+    }
+
+    WebRtcSessionCreateSDPObserverForTest::State expected_state =
+        success ? WebRtcSessionCreateSDPObserverForTest::kSucceeded :
+                  WebRtcSessionCreateSDPObserverForTest::kFailed;
+
+    for (int i = 0; i < kNumber; ++i) {
+      EXPECT_EQ_WAIT(expected_state, observers[i]->state(), 1000);
+      if (success) {
+        EXPECT_TRUE(observers[i]->description() != NULL);
+      } else {
+        EXPECT_TRUE(observers[i]->description() == NULL);
+      }
+    }
+  }
+
+  void ConfigureAllocatorWithTurn() {
+    cricket::RelayServerConfig turn_server(cricket::RELAY_TURN);
+    cricket::RelayCredentials credentials(kTurnUsername, kTurnPassword);
+    turn_server.credentials = credentials;
+    turn_server.ports.push_back(
+        cricket::ProtocolAddress(kTurnUdpIntAddr, cricket::PROTO_UDP, false));
+    allocator_->AddTurnServer(turn_server);
+    allocator_->set_step_delay(cricket::kMinimumStepDelay);
+    allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP);
+  }
+
+  cricket::FakeMediaEngine* media_engine_;
+  cricket::FakeDataEngine* data_engine_;
+  rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+  cricket::FakeCall fake_call_;
+  rtc::scoped_ptr<webrtc::MediaControllerInterface> media_controller_;
+  rtc::scoped_ptr<cricket::TransportDescriptionFactory> tdesc_factory_;
+  rtc::scoped_ptr<cricket::MediaSessionDescriptionFactory> desc_factory_;
+  rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
+  rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
+  rtc::scoped_ptr<rtc::FirewallSocketServer> fss_;
+  rtc::SocketServerScope ss_scope_;
+  rtc::SocketAddress stun_socket_addr_;
+  rtc::scoped_ptr<cricket::TestStunServer> stun_server_;
+  cricket::TestTurnServer turn_server_;
+  rtc::FakeNetworkManager network_manager_;
+  rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
+  PeerConnectionFactoryInterface::Options options_;
+  rtc::scoped_ptr<FakeConstraints> constraints_;
+  rtc::scoped_ptr<WebRtcSessionForTest> session_;
+  MockIceObserver observer_;
+  cricket::FakeVideoMediaChannel* video_channel_;
+  cricket::FakeVoiceMediaChannel* voice_channel_;
+  rtc::scoped_refptr<FakeMetricsObserver> metrics_observer_;
+  // The following flags affect options created for CreateOffer/CreateAnswer.
+  bool send_stream_1_ = false;
+  bool send_stream_2_ = false;
+  bool send_audio_ = false;
+  bool send_video_ = false;
+  rtc::scoped_refptr<DataChannel> data_channel_;
+  // Last values received from data channel creation signal.
+  std::string last_data_channel_label_;
+  InternalDataChannelInit last_data_channel_config_;
+  bool session_destroyed_ = false;
+};
+
+TEST_P(WebRtcSessionTest, TestInitializeWithDtls) {
+  InitWithDtls(GetParam());
+  // SDES is disabled when DTLS is on.
+  EXPECT_EQ(cricket::SEC_DISABLED, session_->SdesPolicy());
+}
+
+TEST_F(WebRtcSessionTest, TestInitializeWithoutDtls) {
+  Init();
+  // SDES is required if DTLS is off.
+  EXPECT_EQ(cricket::SEC_REQUIRED, session_->SdesPolicy());
+}
+
+TEST_F(WebRtcSessionTest, TestSessionCandidates) {
+  TestSessionCandidatesWithBundleRtcpMux(false, false);
+}
+
+// Below test cases (TestSessionCandidatesWith*) verify the candidates gathered
+// with rtcp-mux and/or bundle.
+TEST_F(WebRtcSessionTest, TestSessionCandidatesWithRtcpMux) {
+  TestSessionCandidatesWithBundleRtcpMux(false, true);
+}
+
+TEST_F(WebRtcSessionTest, TestSessionCandidatesWithBundleRtcpMux) {
+  TestSessionCandidatesWithBundleRtcpMux(true, true);
+}
+
+TEST_F(WebRtcSessionTest, TestMultihomeCandidates) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  AddInterface(rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+  Init();
+  SendAudioVideoStream1();
+  InitiateCall();
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+  EXPECT_EQ(8u, observer_.mline_0_candidates_.size());
+  EXPECT_EQ(8u, observer_.mline_1_candidates_.size());
+}
+
+// Crashes on Win only. See webrtc:5411.
+#if defined(WEBRTC_WIN)
+#define MAYBE_TestStunError DISABLED_TestStunError
+#else
+#define MAYBE_TestStunError TestStunError
+#endif
+TEST_F(WebRtcSessionTest, MAYBE_TestStunError) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  AddInterface(rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
+  fss_->AddRule(false,
+                rtc::FP_UDP,
+                rtc::FD_ANY,
+                rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  Init();
+  SendAudioVideoStream1();
+  InitiateCall();
+  // Since kClientAddrHost1 is blocked, not expecting stun candidates for it.
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+  EXPECT_EQ(6u, observer_.mline_0_candidates_.size());
+  EXPECT_EQ(6u, observer_.mline_1_candidates_.size());
+}
+
+// Test session delivers no candidates gathered when constraint set to "none".
+TEST_F(WebRtcSessionTest, TestIceTransportsNone) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  InitWithIceTransport(PeerConnectionInterface::kNone);
+  SendAudioVideoStream1();
+  InitiateCall();
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+  EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
+  EXPECT_EQ(0u, observer_.mline_1_candidates_.size());
+}
+
+// Test session delivers only relay candidates gathered when constaint set to
+// "relay".
+TEST_F(WebRtcSessionTest, TestIceTransportsRelay) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  ConfigureAllocatorWithTurn();
+  InitWithIceTransport(PeerConnectionInterface::kRelay);
+  SendAudioVideoStream1();
+  InitiateCall();
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+  EXPECT_EQ(2u, observer_.mline_0_candidates_.size());
+  EXPECT_EQ(2u, observer_.mline_1_candidates_.size());
+  for (size_t i = 0; i < observer_.mline_0_candidates_.size(); ++i) {
+    EXPECT_EQ(cricket::RELAY_PORT_TYPE,
+              observer_.mline_0_candidates_[i].type());
+  }
+  for (size_t i = 0; i < observer_.mline_1_candidates_.size(); ++i) {
+    EXPECT_EQ(cricket::RELAY_PORT_TYPE,
+              observer_.mline_1_candidates_[i].type());
+  }
+}
+
+// Test session delivers all candidates gathered when constaint set to "all".
+TEST_F(WebRtcSessionTest, TestIceTransportsAll) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  InitWithIceTransport(PeerConnectionInterface::kAll);
+  SendAudioVideoStream1();
+  InitiateCall();
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+  // Host + STUN. By default allocator is disabled to gather relay candidates.
+  EXPECT_EQ(4u, observer_.mline_0_candidates_.size());
+  EXPECT_EQ(4u, observer_.mline_1_candidates_.size());
+}
+
+TEST_F(WebRtcSessionTest, SetSdpFailedOnInvalidSdp) {
+  Init();
+  SessionDescriptionInterface* offer = NULL;
+  // Since |offer| is NULL, there's no way to tell if it's an offer or answer.
+  std::string unknown_action;
+  SetLocalDescriptionExpectError(unknown_action, kInvalidSdp, offer);
+  SetRemoteDescriptionExpectError(unknown_action, kInvalidSdp, offer);
+}
+
+// Test creating offers and receive answers and make sure the
+// media engine creates the expected send and receive streams.
+TEST_F(WebRtcSessionTest, TestCreateSdesOfferReceiveSdesAnswer) {
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+  const std::string session_id_orig = offer->session_id();
+  const std::string session_version_orig = offer->session_version();
+  SetLocalDescriptionWithoutError(offer);
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_EQ(1u, video_channel_->recv_streams().size());
+  EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
+
+  ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+  EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[0].id);
+
+  ASSERT_EQ(1u, video_channel_->send_streams().size());
+  EXPECT_TRUE(kVideoTrack1 == video_channel_->send_streams()[0].id);
+  ASSERT_EQ(1u, voice_channel_->send_streams().size());
+  EXPECT_TRUE(kAudioTrack1 == voice_channel_->send_streams()[0].id);
+
+  // Create new offer without send streams.
+  SendNothing();
+  offer = CreateOffer();
+
+  // Verify the session id is the same and the session version is
+  // increased.
+  EXPECT_EQ(session_id_orig, offer->session_id());
+  EXPECT_LT(rtc::FromString<uint64_t>(session_version_orig),
+            rtc::FromString<uint64_t>(offer->session_version()));
+
+  SetLocalDescriptionWithoutError(offer);
+  EXPECT_EQ(0u, video_channel_->send_streams().size());
+  EXPECT_EQ(0u, voice_channel_->send_streams().size());
+
+  SendAudioVideoStream2();
+  answer = CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  // Make sure the receive streams have not changed.
+  ASSERT_EQ(1u, video_channel_->recv_streams().size());
+  EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
+  ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+  EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[0].id);
+}
+
+// Test receiving offers and creating answers and make sure the
+// media engine creates the expected send and receive streams.
+TEST_F(WebRtcSessionTest, TestReceiveSdesOfferCreateSdesAnswer) {
+  Init();
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* offer = CreateOffer();
+  VerifyCryptoParams(offer->description());
+  SetRemoteDescriptionWithoutError(offer);
+
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  VerifyCryptoParams(answer->description());
+  SetLocalDescriptionWithoutError(answer);
+
+  const std::string session_id_orig = answer->session_id();
+  const std::string session_version_orig = answer->session_version();
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_EQ(1u, video_channel_->recv_streams().size());
+  EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[0].id);
+
+  ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+  EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[0].id);
+
+  ASSERT_EQ(1u, video_channel_->send_streams().size());
+  EXPECT_TRUE(kVideoTrack1 == video_channel_->send_streams()[0].id);
+  ASSERT_EQ(1u, voice_channel_->send_streams().size());
+  EXPECT_TRUE(kAudioTrack1 == voice_channel_->send_streams()[0].id);
+
+  SendAudioVideoStream1And2();
+  offer = CreateOffer();
+  SetRemoteDescriptionWithoutError(offer);
+
+  // Answer by turning off all send streams.
+  SendNothing();
+  answer = CreateAnswer(NULL);
+
+  // Verify the session id is the same and the session version is
+  // increased.
+  EXPECT_EQ(session_id_orig, answer->session_id());
+  EXPECT_LT(rtc::FromString<uint64_t>(session_version_orig),
+            rtc::FromString<uint64_t>(answer->session_version()));
+  SetLocalDescriptionWithoutError(answer);
+
+  ASSERT_EQ(2u, video_channel_->recv_streams().size());
+  EXPECT_TRUE(kVideoTrack1 == video_channel_->recv_streams()[0].id);
+  EXPECT_TRUE(kVideoTrack2 == video_channel_->recv_streams()[1].id);
+  ASSERT_EQ(2u, voice_channel_->recv_streams().size());
+  EXPECT_TRUE(kAudioTrack1 == voice_channel_->recv_streams()[0].id);
+  EXPECT_TRUE(kAudioTrack2 == voice_channel_->recv_streams()[1].id);
+
+  // Make sure we have no send streams.
+  EXPECT_EQ(0u, video_channel_->send_streams().size());
+  EXPECT_EQ(0u, voice_channel_->send_streams().size());
+}
+
+TEST_F(WebRtcSessionTest, SetLocalSdpFailedOnCreateChannel) {
+  Init();
+  media_engine_->set_fail_create_channel(true);
+
+  SessionDescriptionInterface* offer = CreateOffer();
+  ASSERT_TRUE(offer != NULL);
+  // SetRemoteDescription and SetLocalDescription will take the ownership of
+  // the offer.
+  SetRemoteDescriptionOfferExpectError(kCreateChannelFailed, offer);
+  offer = CreateOffer();
+  ASSERT_TRUE(offer != NULL);
+  SetLocalDescriptionOfferExpectError(kCreateChannelFailed, offer);
+}
+
+//
+// Tests for creating/setting SDP under different SDES/DTLS polices:
+//
+// --DTLS off and SDES on
+// TestCreateSdesOfferReceiveSdesAnswer/TestReceiveSdesOfferCreateSdesAnswer:
+//     set local/remote offer/answer with crypto --> success
+// TestSetNonSdesOfferWhenSdesOn: set local/remote offer without crypto --->
+//     failure
+// TestSetLocalNonSdesAnswerWhenSdesOn: set local answer without crypto -->
+//     failure
+// TestSetRemoteNonSdesAnswerWhenSdesOn: set remote answer without crypto -->
+//     failure
+//
+// --DTLS on and SDES off
+// TestCreateDtlsOfferReceiveDtlsAnswer/TestReceiveDtlsOfferCreateDtlsAnswer:
+//     set local/remote offer/answer with DTLS fingerprint --> success
+// TestReceiveNonDtlsOfferWhenDtlsOn: set local/remote offer without DTLS
+//     fingerprint --> failure
+// TestSetLocalNonDtlsAnswerWhenDtlsOn: set local answer without fingerprint
+//     --> failure
+// TestSetRemoteNonDtlsAnswerWhenDtlsOn: set remote answer without fingerprint
+//     --> failure
+//
+// --Encryption disabled: DTLS off and SDES off
+// TestCreateOfferReceiveAnswerWithoutEncryption: set local offer and remote
+//     answer without SDES or DTLS --> success
+// TestCreateAnswerReceiveOfferWithoutEncryption: set remote offer and local
+//     answer without SDES or DTLS --> success
+//
+
+// Test that we return a failure when applying a remote/local offer that doesn't
+// have cryptos enabled when DTLS is off.
+TEST_F(WebRtcSessionTest, TestSetNonSdesOfferWhenSdesOn) {
+  Init();
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  JsepSessionDescription* offer = CreateRemoteOffer(
+      options, cricket::SEC_DISABLED);
+  ASSERT_TRUE(offer != NULL);
+  VerifyNoCryptoParams(offer->description(), false);
+  // SetRemoteDescription and SetLocalDescription will take the ownership of
+  // the offer.
+  SetRemoteDescriptionOfferExpectError(kSdpWithoutSdesCrypto, offer);
+  offer = CreateRemoteOffer(options, cricket::SEC_DISABLED);
+  ASSERT_TRUE(offer != NULL);
+  SetLocalDescriptionOfferExpectError(kSdpWithoutSdesCrypto, offer);
+}
+
+// Test that we return a failure when applying a local answer that doesn't have
+// cryptos enabled when DTLS is off.
+TEST_F(WebRtcSessionTest, TestSetLocalNonSdesAnswerWhenSdesOn) {
+  Init();
+  SessionDescriptionInterface* offer = NULL;
+  SessionDescriptionInterface* answer = NULL;
+  CreateCryptoOfferAndNonCryptoAnswer(&offer, &answer);
+  // SetRemoteDescription and SetLocalDescription will take the ownership of
+  // the offer.
+  SetRemoteDescriptionWithoutError(offer);
+  SetLocalDescriptionAnswerExpectError(kSdpWithoutSdesCrypto, answer);
+}
+
+// Test we will return fail when apply an remote answer that doesn't have
+// crypto enabled when DTLS is off.
+TEST_F(WebRtcSessionTest, TestSetRemoteNonSdesAnswerWhenSdesOn) {
+  Init();
+  SessionDescriptionInterface* offer = NULL;
+  SessionDescriptionInterface* answer = NULL;
+  CreateCryptoOfferAndNonCryptoAnswer(&offer, &answer);
+  // SetRemoteDescription and SetLocalDescription will take the ownership of
+  // the offer.
+  SetLocalDescriptionWithoutError(offer);
+  SetRemoteDescriptionAnswerExpectError(kSdpWithoutSdesCrypto, answer);
+}
+
+// Test that we accept an offer with a DTLS fingerprint when DTLS is on
+// and that we return an answer with a DTLS fingerprint.
+TEST_P(WebRtcSessionTest, TestReceiveDtlsOfferCreateDtlsAnswer) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  SendAudioVideoStream1();
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  JsepSessionDescription* offer =
+      CreateRemoteOffer(options, cricket::SEC_DISABLED);
+  ASSERT_TRUE(offer != NULL);
+  VerifyFingerprintStatus(offer->description(), true);
+  VerifyNoCryptoParams(offer->description(), true);
+
+  // SetRemoteDescription will take the ownership of the offer.
+  SetRemoteDescriptionWithoutError(offer);
+
+  // Verify that we get a crypto fingerprint in the answer.
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  ASSERT_TRUE(answer != NULL);
+  VerifyFingerprintStatus(answer->description(), true);
+  // Check that we don't have an a=crypto line in the answer.
+  VerifyNoCryptoParams(answer->description(), true);
+
+  // Now set the local description, which should work, even without a=crypto.
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// Test that we set a local offer with a DTLS fingerprint when DTLS is on
+// and then we accept a remote answer with a DTLS fingerprint successfully.
+TEST_P(WebRtcSessionTest, TestCreateDtlsOfferReceiveDtlsAnswer) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  SendAudioVideoStream1();
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+
+  // Verify that we get a crypto fingerprint in the answer.
+  SessionDescriptionInterface* offer = CreateOffer();
+  ASSERT_TRUE(offer != NULL);
+  VerifyFingerprintStatus(offer->description(), true);
+  // Check that we don't have an a=crypto line in the offer.
+  VerifyNoCryptoParams(offer->description(), true);
+
+  // Now set the local description, which should work, even without a=crypto.
+  SetLocalDescriptionWithoutError(offer);
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  JsepSessionDescription* answer =
+      CreateRemoteAnswer(offer, options, cricket::SEC_DISABLED);
+  ASSERT_TRUE(answer != NULL);
+  VerifyFingerprintStatus(answer->description(), true);
+  VerifyNoCryptoParams(answer->description(), true);
+
+  // SetRemoteDescription will take the ownership of the answer.
+  SetRemoteDescriptionWithoutError(answer);
+}
+
+// Test that if we support DTLS and the other side didn't offer a fingerprint,
+// we will fail to set the remote description.
+TEST_P(WebRtcSessionTest, TestReceiveNonDtlsOfferWhenDtlsOn) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  options.bundle_enabled = true;
+  JsepSessionDescription* offer = CreateRemoteOffer(
+      options, cricket::SEC_REQUIRED);
+  ASSERT_TRUE(offer != NULL);
+  VerifyFingerprintStatus(offer->description(), false);
+  VerifyCryptoParams(offer->description());
+
+  // SetRemoteDescription will take the ownership of the offer.
+  SetRemoteDescriptionOfferExpectError(
+      kSdpWithoutDtlsFingerprint, offer);
+
+  offer = CreateRemoteOffer(options, cricket::SEC_REQUIRED);
+  // SetLocalDescription will take the ownership of the offer.
+  SetLocalDescriptionOfferExpectError(
+      kSdpWithoutDtlsFingerprint, offer);
+}
+
+// Test that we return a failure when applying a local answer that doesn't have
+// a DTLS fingerprint when DTLS is required.
+TEST_P(WebRtcSessionTest, TestSetLocalNonDtlsAnswerWhenDtlsOn) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+  SessionDescriptionInterface* offer = NULL;
+  SessionDescriptionInterface* answer = NULL;
+  CreateDtlsOfferAndNonDtlsAnswer(&offer, &answer);
+
+  // SetRemoteDescription and SetLocalDescription will take the ownership of
+  // the offer and answer.
+  SetRemoteDescriptionWithoutError(offer);
+  SetLocalDescriptionAnswerExpectError(
+      kSdpWithoutDtlsFingerprint, answer);
+}
+
+// Test that we return a failure when applying a remote answer that doesn't have
+// a DTLS fingerprint when DTLS is required.
+TEST_P(WebRtcSessionTest, TestSetRemoteNonDtlsAnswerWhenDtlsOn) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+  SessionDescriptionInterface* offer = CreateOffer();
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  rtc::scoped_ptr<SessionDescriptionInterface> temp_offer(
+      CreateRemoteOffer(options, cricket::SEC_ENABLED));
+  JsepSessionDescription* answer =
+      CreateRemoteAnswer(temp_offer.get(), options, cricket::SEC_ENABLED);
+
+  // SetRemoteDescription and SetLocalDescription will take the ownership of
+  // the offer and answer.
+  SetLocalDescriptionWithoutError(offer);
+  SetRemoteDescriptionAnswerExpectError(
+      kSdpWithoutDtlsFingerprint, answer);
+}
+
+// Test that we create a local offer without SDES or DTLS and accept a remote
+// answer without SDES or DTLS when encryption is disabled.
+TEST_P(WebRtcSessionTest, TestCreateOfferReceiveAnswerWithoutEncryption) {
+  SendAudioVideoStream1();
+  options_.disable_encryption = true;
+  InitWithDtls(GetParam());
+
+  // Verify that we get a crypto fingerprint in the answer.
+  SessionDescriptionInterface* offer = CreateOffer();
+  ASSERT_TRUE(offer != NULL);
+  VerifyFingerprintStatus(offer->description(), false);
+  // Check that we don't have an a=crypto line in the offer.
+  VerifyNoCryptoParams(offer->description(), false);
+
+  // Now set the local description, which should work, even without a=crypto.
+  SetLocalDescriptionWithoutError(offer);
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  JsepSessionDescription* answer =
+      CreateRemoteAnswer(offer, options, cricket::SEC_DISABLED);
+  ASSERT_TRUE(answer != NULL);
+  VerifyFingerprintStatus(answer->description(), false);
+  VerifyNoCryptoParams(answer->description(), false);
+
+  // SetRemoteDescription will take the ownership of the answer.
+  SetRemoteDescriptionWithoutError(answer);
+}
+
+// Test that we create a local answer without SDES or DTLS and accept a remote
+// offer without SDES or DTLS when encryption is disabled.
+TEST_P(WebRtcSessionTest, TestCreateAnswerReceiveOfferWithoutEncryption) {
+  options_.disable_encryption = true;
+  InitWithDtls(GetParam());
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  JsepSessionDescription* offer =
+      CreateRemoteOffer(options, cricket::SEC_DISABLED);
+  ASSERT_TRUE(offer != NULL);
+  VerifyFingerprintStatus(offer->description(), false);
+  VerifyNoCryptoParams(offer->description(), false);
+
+  // SetRemoteDescription will take the ownership of the offer.
+  SetRemoteDescriptionWithoutError(offer);
+
+  // Verify that we get a crypto fingerprint in the answer.
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  ASSERT_TRUE(answer != NULL);
+  VerifyFingerprintStatus(answer->description(), false);
+  // Check that we don't have an a=crypto line in the answer.
+  VerifyNoCryptoParams(answer->description(), false);
+
+  // Now set the local description, which should work, even without a=crypto.
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// Test that we can create and set an answer correctly when different
+// SSL roles have been negotiated for different transports.
+// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4525
+TEST_P(WebRtcSessionTest, TestCreateAnswerWithDifferentSslRoles) {
+  SendAudioVideoStream1();
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+
+  // First, negotiate different SSL roles.
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(offer, options, cricket::SEC_DISABLED);
+  TransportInfo* audio_transport_info =
+      answer->description()->GetTransportInfoByName("audio");
+  audio_transport_info->description.connection_role =
+      cricket::CONNECTIONROLE_ACTIVE;
+  TransportInfo* video_transport_info =
+      answer->description()->GetTransportInfoByName("video");
+  video_transport_info->description.connection_role =
+      cricket::CONNECTIONROLE_PASSIVE;
+  SetRemoteDescriptionWithoutError(answer);
+
+  // Now create an offer in the reverse direction, and ensure the initial
+  // offerer responds with an answer with correct SSL roles.
+  offer = CreateRemoteOfferWithVersion(options, cricket::SEC_DISABLED,
+                                       kSessionVersion,
+                                       session_->remote_description());
+  SetRemoteDescriptionWithoutError(offer);
+
+  answer = CreateAnswer(nullptr);
+  audio_transport_info = answer->description()->GetTransportInfoByName("audio");
+  EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+            audio_transport_info->description.connection_role);
+  video_transport_info = answer->description()->GetTransportInfoByName("video");
+  EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE,
+            video_transport_info->description.connection_role);
+  SetLocalDescriptionWithoutError(answer);
+
+  // Lastly, start BUNDLE-ing on "audio", expecting that the "passive" role of
+  // audio is transferred over to video in the answer that completes the BUNDLE
+  // negotiation.
+  options.bundle_enabled = true;
+  offer = CreateRemoteOfferWithVersion(options, cricket::SEC_DISABLED,
+                                       kSessionVersion,
+                                       session_->remote_description());
+  SetRemoteDescriptionWithoutError(offer);
+  answer = CreateAnswer(nullptr);
+  audio_transport_info = answer->description()->GetTransportInfoByName("audio");
+  EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+            audio_transport_info->description.connection_role);
+  video_transport_info = answer->description()->GetTransportInfoByName("video");
+  EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE,
+            video_transport_info->description.connection_role);
+  SetLocalDescriptionWithoutError(answer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalOfferTwice) {
+  Init();
+  SendNothing();
+  // SetLocalDescription take ownership of offer.
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+
+  // SetLocalDescription take ownership of offer.
+  SessionDescriptionInterface* offer2 = CreateOffer();
+  SetLocalDescriptionWithoutError(offer2);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemoteOfferTwice) {
+  Init();
+  SendNothing();
+  // SetLocalDescription take ownership of offer.
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetRemoteDescriptionWithoutError(offer);
+
+  SessionDescriptionInterface* offer2 = CreateOffer();
+  SetRemoteDescriptionWithoutError(offer2);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteOffer) {
+  Init();
+  SendNothing();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+  offer = CreateOffer();
+  SetRemoteDescriptionOfferExpectError("Called in wrong state: STATE_SENTOFFER",
+                                       offer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemoteAndLocalOffer) {
+  Init();
+  SendNothing();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetRemoteDescriptionWithoutError(offer);
+  offer = CreateOffer();
+  SetLocalDescriptionOfferExpectError(
+      "Called in wrong state: STATE_RECEIVEDOFFER", offer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalPrAnswer) {
+  Init();
+  SendNothing();
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  SetRemoteDescriptionExpectState(offer, WebRtcSession::STATE_RECEIVEDOFFER);
+
+  JsepSessionDescription* pranswer = static_cast<JsepSessionDescription*>(
+      CreateAnswer(NULL));
+  pranswer->set_type(SessionDescriptionInterface::kPrAnswer);
+  SetLocalDescriptionExpectState(pranswer, WebRtcSession::STATE_SENTPRANSWER);
+
+  SendAudioVideoStream1();
+  JsepSessionDescription* pranswer2 = static_cast<JsepSessionDescription*>(
+      CreateAnswer(NULL));
+  pranswer2->set_type(SessionDescriptionInterface::kPrAnswer);
+
+  SetLocalDescriptionExpectState(pranswer2, WebRtcSession::STATE_SENTPRANSWER);
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  SetLocalDescriptionExpectState(answer, WebRtcSession::STATE_INPROGRESS);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemotePrAnswer) {
+  Init();
+  SendNothing();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionExpectState(offer, WebRtcSession::STATE_SENTOFFER);
+
+  JsepSessionDescription* pranswer =
+      CreateRemoteAnswer(session_->local_description());
+  pranswer->set_type(SessionDescriptionInterface::kPrAnswer);
+
+  SetRemoteDescriptionExpectState(pranswer,
+                                  WebRtcSession::STATE_RECEIVEDPRANSWER);
+
+  SendAudioVideoStream1();
+  JsepSessionDescription* pranswer2 =
+      CreateRemoteAnswer(session_->local_description());
+  pranswer2->set_type(SessionDescriptionInterface::kPrAnswer);
+
+  SetRemoteDescriptionExpectState(pranswer2,
+                                  WebRtcSession::STATE_RECEIVEDPRANSWER);
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionExpectState(answer, WebRtcSession::STATE_INPROGRESS);
+}
+
+TEST_F(WebRtcSessionTest, TestSetLocalAnswerWithoutOffer) {
+  Init();
+  SendNothing();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(offer.get());
+  SetLocalDescriptionAnswerExpectError("Called in wrong state: STATE_INIT",
+                                       answer);
+}
+
+TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithoutOffer) {
+  Init();
+  SendNothing();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(offer.get());
+  SetRemoteDescriptionAnswerExpectError(
+      "Called in wrong state: STATE_INIT", answer);
+}
+
+TEST_F(WebRtcSessionTest, TestAddRemoteCandidate) {
+  Init();
+  SendAudioVideoStream1();
+
+  cricket::Candidate candidate;
+  candidate.set_component(1);
+  JsepIceCandidate ice_candidate1(kMediaContentName0, 0, candidate);
+
+  // Fail since we have not set a remote description.
+  EXPECT_FALSE(session_->ProcessIceMessage(&ice_candidate1));
+
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+
+  // Fail since we have not set a remote description.
+  EXPECT_FALSE(session_->ProcessIceMessage(&ice_candidate1));
+
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(
+      session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+  candidate.set_component(2);
+  JsepIceCandidate ice_candidate2(kMediaContentName0, 0, candidate);
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+
+  // Verifying the candidates are copied properly from internal vector.
+  const SessionDescriptionInterface* remote_desc =
+      session_->remote_description();
+  ASSERT_TRUE(remote_desc != NULL);
+  ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+  const IceCandidateCollection* candidates =
+      remote_desc->candidates(kMediaContentIndex0);
+  ASSERT_EQ(2u, candidates->count());
+  EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+  EXPECT_EQ(kMediaContentName0, candidates->at(0)->sdp_mid());
+  EXPECT_EQ(1, candidates->at(0)->candidate().component());
+  EXPECT_EQ(2, candidates->at(1)->candidate().component());
+
+  // |ice_candidate3| is identical to |ice_candidate2|.  It can be added
+  // successfully, but the total count of candidates will not increase.
+  candidate.set_component(2);
+  JsepIceCandidate ice_candidate3(kMediaContentName0, 0, candidate);
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate3));
+  ASSERT_EQ(2u, candidates->count());
+
+  JsepIceCandidate bad_ice_candidate("bad content name", 99, candidate);
+  EXPECT_FALSE(session_->ProcessIceMessage(&bad_ice_candidate));
+}
+
+// Test that a remote candidate is added to the remote session description and
+// that it is retained if the remote session description is changed.
+TEST_F(WebRtcSessionTest, TestRemoteCandidatesAddedToSessionDescription) {
+  Init();
+  cricket::Candidate candidate1;
+  candidate1.set_component(1);
+  JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+  const SessionDescriptionInterface* remote_desc =
+      session_->remote_description();
+  ASSERT_TRUE(remote_desc != NULL);
+  ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+  const IceCandidateCollection* candidates =
+      remote_desc->candidates(kMediaContentIndex0);
+  ASSERT_EQ(1u, candidates->count());
+  EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+
+  // Update the RemoteSessionDescription with a new session description and
+  // a candidate and check that the new remote session description contains both
+  // candidates.
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  cricket::Candidate candidate2;
+  JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+                                  candidate2);
+  EXPECT_TRUE(offer->AddCandidate(&ice_candidate2));
+  SetRemoteDescriptionWithoutError(offer);
+
+  remote_desc = session_->remote_description();
+  ASSERT_TRUE(remote_desc != NULL);
+  ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+  candidates = remote_desc->candidates(kMediaContentIndex0);
+  ASSERT_EQ(2u, candidates->count());
+  EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+  // Username and password have be updated with the TransportInfo of the
+  // SessionDescription, won't be equal to the original one.
+  candidate2.set_username(candidates->at(0)->candidate().username());
+  candidate2.set_password(candidates->at(0)->candidate().password());
+  EXPECT_TRUE(candidate2.IsEquivalent(candidates->at(0)->candidate()));
+  EXPECT_EQ(kMediaContentIndex0, candidates->at(1)->sdp_mline_index());
+  // No need to verify the username and password.
+  candidate1.set_username(candidates->at(1)->candidate().username());
+  candidate1.set_password(candidates->at(1)->candidate().password());
+  EXPECT_TRUE(candidate1.IsEquivalent(candidates->at(1)->candidate()));
+
+  // Test that the candidate is ignored if we can add the same candidate again.
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+}
+
+// Test that local candidates are added to the local session description and
+// that they are retained if the local session description is changed.
+TEST_F(WebRtcSessionTest, TestLocalCandidatesAddedToSessionDescription) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  const SessionDescriptionInterface* local_desc = session_->local_description();
+  const IceCandidateCollection* candidates =
+      local_desc->candidates(kMediaContentIndex0);
+  ASSERT_TRUE(candidates != NULL);
+  EXPECT_EQ(0u, candidates->count());
+
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+
+  local_desc = session_->local_description();
+  candidates = local_desc->candidates(kMediaContentIndex0);
+  ASSERT_TRUE(candidates != NULL);
+  EXPECT_LT(0u, candidates->count());
+  candidates = local_desc->candidates(1);
+  ASSERT_TRUE(candidates != NULL);
+  EXPECT_EQ(0u, candidates->count());
+
+  // Update the session descriptions.
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  local_desc = session_->local_description();
+  candidates = local_desc->candidates(kMediaContentIndex0);
+  ASSERT_TRUE(candidates != NULL);
+  EXPECT_LT(0u, candidates->count());
+  candidates = local_desc->candidates(1);
+  ASSERT_TRUE(candidates != NULL);
+  EXPECT_EQ(0u, candidates->count());
+}
+
+// Test that we can set a remote session description with remote candidates.
+TEST_F(WebRtcSessionTest, TestSetRemoteSessionDescriptionWithCandidates) {
+  Init();
+
+  cricket::Candidate candidate1;
+  candidate1.set_component(1);
+  JsepIceCandidate ice_candidate(kMediaContentName0, kMediaContentIndex0,
+                                 candidate1);
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+
+  EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
+  SetRemoteDescriptionWithoutError(offer);
+
+  const SessionDescriptionInterface* remote_desc =
+      session_->remote_description();
+  ASSERT_TRUE(remote_desc != NULL);
+  ASSERT_EQ(2u, remote_desc->number_of_mediasections());
+  const IceCandidateCollection* candidates =
+      remote_desc->candidates(kMediaContentIndex0);
+  ASSERT_EQ(1u, candidates->count());
+  EXPECT_EQ(kMediaContentIndex0, candidates->at(0)->sdp_mline_index());
+
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// Test that offers and answers contains ice candidates when Ice candidates have
+// been gathered.
+TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteDescriptionWithCandidates) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+  Init();
+  SendAudioVideoStream1();
+  // Ice is started but candidates are not provided until SetLocalDescription
+  // is called.
+  EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
+  EXPECT_EQ(0u, observer_.mline_1_candidates_.size());
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  // Wait until at least one local candidate has been collected.
+  EXPECT_TRUE_WAIT(0u < observer_.mline_0_candidates_.size(),
+                   kIceCandidatesTimeout);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> local_offer(CreateOffer());
+
+  ASSERT_TRUE(local_offer->candidates(kMediaContentIndex0) != NULL);
+  EXPECT_LT(0u, local_offer->candidates(kMediaContentIndex0)->count());
+
+  SessionDescriptionInterface* remote_offer(CreateRemoteOffer());
+  SetRemoteDescriptionWithoutError(remote_offer);
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  ASSERT_TRUE(answer->candidates(kMediaContentIndex0) != NULL);
+  EXPECT_LT(0u, answer->candidates(kMediaContentIndex0)->count());
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// Verifies TransportProxy and media channels are created with content names
+// present in the SessionDescription.
+TEST_F(WebRtcSessionTest, TestChannelCreationsWithContentNames) {
+  Init();
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  // CreateOffer creates session description with the content names "audio" and
+  // "video". Goal is to modify these content names and verify transport
+  // channels
+  // in the WebRtcSession, as channels are created with the content names
+  // present in SDP.
+  std::string sdp;
+  EXPECT_TRUE(offer->ToString(&sdp));
+  const std::string kAudioMid = "a=mid:audio";
+  const std::string kAudioMidReplaceStr = "a=mid:audio_content_name";
+  const std::string kVideoMid = "a=mid:video";
+  const std::string kVideoMidReplaceStr = "a=mid:video_content_name";
+
+  // Replacing |audio| with |audio_content_name|.
+  rtc::replace_substrs(kAudioMid.c_str(), kAudioMid.length(),
+                             kAudioMidReplaceStr.c_str(),
+                             kAudioMidReplaceStr.length(),
+                             &sdp);
+  // Replacing |video| with |video_content_name|.
+  rtc::replace_substrs(kVideoMid.c_str(), kVideoMid.length(),
+                             kVideoMidReplaceStr.c_str(),
+                             kVideoMidReplaceStr.length(),
+                             &sdp);
+
+  SessionDescriptionInterface* modified_offer =
+      CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+
+  SetRemoteDescriptionWithoutError(modified_offer);
+
+  SessionDescriptionInterface* answer =
+      CreateAnswer(NULL);
+  SetLocalDescriptionWithoutError(answer);
+
+  cricket::TransportChannel* voice_transport_channel =
+      session_->voice_rtp_transport_channel();
+  EXPECT_TRUE(voice_transport_channel != NULL);
+  EXPECT_EQ(voice_transport_channel->transport_name(), "audio_content_name");
+  cricket::TransportChannel* video_transport_channel =
+      session_->video_rtp_transport_channel();
+  EXPECT_TRUE(video_transport_channel != NULL);
+  EXPECT_EQ(video_transport_channel->transport_name(), "video_content_name");
+  EXPECT_TRUE((video_channel_ = media_engine_->GetVideoChannel(0)) != NULL);
+  EXPECT_TRUE((voice_channel_ = media_engine_->GetVoiceChannel(0)) != NULL);
+}
+
+// Test that an offer contains the correct media content descriptions based on
+// the send streams when no constraints have been set.
+TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraintsOrStreams) {
+  Init();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  ASSERT_TRUE(offer != NULL);
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+}
+
+// Test that an offer contains the correct media content descriptions based on
+// the send streams when no constraints have been set.
+TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraints) {
+  Init();
+  // Test Audio only offer.
+  SendAudioOnlyStream2();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+
+  // Test Audio / Video offer.
+  SendAudioVideoStream1();
+  offer.reset(CreateOffer());
+  content = cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+}
+
+// Test that an offer contains no media content descriptions if
+// kOfferToReceiveVideo and kOfferToReceiveAudio constraints are set to false.
+TEST_F(WebRtcSessionTest, CreateOfferWithConstraintsWithoutStreams) {
+  Init();
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.offer_to_receive_audio = 0;
+  options.offer_to_receive_video = 0;
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(
+      CreateOffer(options));
+
+  ASSERT_TRUE(offer != NULL);
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+}
+
+// Test that an offer contains only audio media content descriptions if
+// kOfferToReceiveAudio constraints are set to true.
+TEST_F(WebRtcSessionTest, CreateAudioOnlyOfferWithConstraints) {
+  Init();
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.offer_to_receive_audio =
+      RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(
+        CreateOffer(options));
+
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+}
+
+// Test that an offer contains audio and video media content descriptions if
+// kOfferToReceiveAudio and kOfferToReceiveVideo constraints are set to true.
+TEST_F(WebRtcSessionTest, CreateOfferWithConstraints) {
+  Init();
+  // Test Audio / Video offer.
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.offer_to_receive_audio =
+      RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+  options.offer_to_receive_video =
+      RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(
+      CreateOffer(options));
+
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+
+  // Sets constraints to false and verifies that audio/video contents are
+  // removed.
+  options.offer_to_receive_audio = 0;
+  options.offer_to_receive_video = 0;
+  offer.reset(CreateOffer(options));
+
+  content = cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+  content = cricket::GetFirstVideoContent(offer->description());
+  EXPECT_TRUE(content == NULL);
+}
+
+// Test that an answer can not be created if the last remote description is not
+// an offer.
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutAnOffer) {
+  Init();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+  SetRemoteDescriptionWithoutError(answer);
+  EXPECT_TRUE(CreateAnswer(NULL) == NULL);
+}
+
+// Test that an answer contains the correct media content descriptions when no
+// constraints have been set.
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraintsOrStreams) {
+  Init();
+  // Create a remote offer with audio and video content.
+  rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+  SetRemoteDescriptionWithoutError(offer.release());
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(NULL));
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+
+  content = cricket::GetFirstVideoContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+}
+
+// Test that an answer contains the correct media content descriptions when no
+// constraints have been set and the offer only contain audio.
+TEST_F(WebRtcSessionTest, CreateAudioAnswerWithoutConstraintsOrStreams) {
+  Init();
+  // Create a remote offer with audio only.
+  cricket::MediaSessionOptions options;
+
+  rtc::scoped_ptr<JsepSessionDescription> offer(
+      CreateRemoteOffer(options));
+  ASSERT_TRUE(cricket::GetFirstVideoContent(offer->description()) == NULL);
+  ASSERT_TRUE(cricket::GetFirstAudioContent(offer->description()) != NULL);
+
+  SetRemoteDescriptionWithoutError(offer.release());
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(NULL));
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+
+  EXPECT_TRUE(cricket::GetFirstVideoContent(answer->description()) == NULL);
+}
+
+// Test that an answer contains the correct media content descriptions when no
+// constraints have been set.
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraints) {
+  Init();
+  // Create a remote offer with audio and video content.
+  rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+  SetRemoteDescriptionWithoutError(offer.release());
+  // Test with a stream with tracks.
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(NULL));
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+
+  content = cricket::GetFirstVideoContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+}
+
+// Test that an answer contains the correct media content descriptions when
+// constraints have been set but no stream is sent.
+TEST_F(WebRtcSessionTest, CreateAnswerWithConstraintsWithoutStreams) {
+  Init();
+  // Create a remote offer with audio and video content.
+  rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+  SetRemoteDescriptionWithoutError(offer.release());
+
+  webrtc::FakeConstraints constraints_no_receive;
+  constraints_no_receive.SetMandatoryReceiveAudio(false);
+  constraints_no_receive.SetMandatoryReceiveVideo(false);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(&constraints_no_receive));
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_TRUE(content->rejected);
+
+  content = cricket::GetFirstVideoContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_TRUE(content->rejected);
+}
+
+// Test that an answer contains the correct media content descriptions when
+// constraints have been set and streams are sent.
+TEST_F(WebRtcSessionTest, CreateAnswerWithConstraints) {
+  Init();
+  // Create a remote offer with audio and video content.
+  rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+  SetRemoteDescriptionWithoutError(offer.release());
+
+  webrtc::FakeConstraints constraints_no_receive;
+  constraints_no_receive.SetMandatoryReceiveAudio(false);
+  constraints_no_receive.SetMandatoryReceiveVideo(false);
+
+  // Test with a stream with tracks.
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(&constraints_no_receive));
+
+  // TODO(perkj): Should the direction be set to SEND_ONLY?
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+
+  // TODO(perkj): Should the direction be set to SEND_ONLY?
+  content = cricket::GetFirstVideoContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_FALSE(content->rejected);
+}
+
+TEST_F(WebRtcSessionTest, CreateOfferWithoutCNCodecs) {
+  AddCNCodecs();
+  Init();
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.offer_to_receive_audio =
+      RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+  options.voice_activity_detection = false;
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(
+      CreateOffer(options));
+
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(offer->description());
+  EXPECT_TRUE(content != NULL);
+  EXPECT_TRUE(VerifyNoCNCodecs(content));
+}
+
+TEST_F(WebRtcSessionTest, CreateAnswerWithoutCNCodecs) {
+  AddCNCodecs();
+  Init();
+  // Create a remote offer with audio and video content.
+  rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+  SetRemoteDescriptionWithoutError(offer.release());
+
+  webrtc::FakeConstraints constraints;
+  constraints.SetOptionalVAD(false);
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(&constraints));
+  const cricket::ContentInfo* content =
+      cricket::GetFirstAudioContent(answer->description());
+  ASSERT_TRUE(content != NULL);
+  EXPECT_TRUE(VerifyNoCNCodecs(content));
+}
+
+// This test verifies the call setup when remote answer with audio only and
+// later updates with video.
+TEST_F(WebRtcSessionTest, TestAVOfferWithAudioOnlyAnswer) {
+  Init();
+  EXPECT_TRUE(media_engine_->GetVideoChannel(0) == NULL);
+  EXPECT_TRUE(media_engine_->GetVoiceChannel(0) == NULL);
+
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+
+  cricket::MediaSessionOptions options;
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(offer, options);
+
+  // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
+  // and answer;
+  SetLocalDescriptionWithoutError(offer);
+  SetRemoteDescriptionWithoutError(answer);
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_TRUE(video_channel_ == NULL);
+
+  ASSERT_EQ(0u, voice_channel_->recv_streams().size());
+  ASSERT_EQ(1u, voice_channel_->send_streams().size());
+  EXPECT_EQ(kAudioTrack1, voice_channel_->send_streams()[0].id);
+
+  // Let the remote end update the session descriptions, with Audio and Video.
+  SendAudioVideoStream2();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_TRUE(video_channel_ != NULL);
+  ASSERT_TRUE(voice_channel_ != NULL);
+
+  ASSERT_EQ(1u, video_channel_->recv_streams().size());
+  ASSERT_EQ(1u, video_channel_->send_streams().size());
+  EXPECT_EQ(kVideoTrack2, video_channel_->recv_streams()[0].id);
+  EXPECT_EQ(kVideoTrack2, video_channel_->send_streams()[0].id);
+  ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+  ASSERT_EQ(1u, voice_channel_->send_streams().size());
+  EXPECT_EQ(kAudioTrack2, voice_channel_->recv_streams()[0].id);
+  EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
+
+  // Change session back to audio only.
+  SendAudioOnlyStream2();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  EXPECT_EQ(0u, video_channel_->recv_streams().size());
+  ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+  EXPECT_EQ(kAudioTrack2, voice_channel_->recv_streams()[0].id);
+  ASSERT_EQ(1u, voice_channel_->send_streams().size());
+  EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
+}
+
+// This test verifies the call setup when remote answer with video only and
+// later updates with audio.
+TEST_F(WebRtcSessionTest, TestAVOfferWithVideoOnlyAnswer) {
+  Init();
+  EXPECT_TRUE(media_engine_->GetVideoChannel(0) == NULL);
+  EXPECT_TRUE(media_engine_->GetVoiceChannel(0) == NULL);
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+
+  cricket::MediaSessionOptions options;
+  options.recv_audio = false;
+  options.recv_video = true;
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(
+      offer, options, cricket::SEC_ENABLED);
+
+  // SetLocalDescription and SetRemoteDescriptions takes ownership of offer
+  // and answer.
+  SetLocalDescriptionWithoutError(offer);
+  SetRemoteDescriptionWithoutError(answer);
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_TRUE(voice_channel_ == NULL);
+  ASSERT_TRUE(video_channel_ != NULL);
+
+  EXPECT_EQ(0u, video_channel_->recv_streams().size());
+  ASSERT_EQ(1u, video_channel_->send_streams().size());
+  EXPECT_EQ(kVideoTrack1, video_channel_->send_streams()[0].id);
+
+  // Update the session descriptions, with Audio and Video.
+  SendAudioVideoStream2();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+  ASSERT_TRUE(voice_channel_ != NULL);
+
+  ASSERT_EQ(1u, voice_channel_->recv_streams().size());
+  ASSERT_EQ(1u, voice_channel_->send_streams().size());
+  EXPECT_EQ(kAudioTrack2, voice_channel_->recv_streams()[0].id);
+  EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
+
+  // Change session back to video only.
+  SendVideoOnlyStream2();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_EQ(1u, video_channel_->recv_streams().size());
+  EXPECT_EQ(kVideoTrack2, video_channel_->recv_streams()[0].id);
+  ASSERT_EQ(1u, video_channel_->send_streams().size());
+  EXPECT_EQ(kVideoTrack2, video_channel_->send_streams()[0].id);
+}
+
+TEST_F(WebRtcSessionTest, VerifyCryptoParamsInSDP) {
+  Init();
+  SendAudioVideoStream1();
+  scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+  VerifyCryptoParams(offer->description());
+  SetRemoteDescriptionWithoutError(offer.release());
+  scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+  VerifyCryptoParams(answer->description());
+}
+
+TEST_F(WebRtcSessionTest, VerifyNoCryptoParamsInSDP) {
+  options_.disable_encryption = true;
+  Init();
+  SendAudioVideoStream1();
+  scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+  VerifyNoCryptoParams(offer->description(), false);
+}
+
+TEST_F(WebRtcSessionTest, VerifyAnswerFromNonCryptoOffer) {
+  Init();
+  VerifyAnswerFromNonCryptoOffer();
+}
+
+TEST_F(WebRtcSessionTest, VerifyAnswerFromCryptoOffer) {
+  Init();
+  VerifyAnswerFromCryptoOffer();
+}
+
+// This test verifies that setLocalDescription fails if
+// no a=ice-ufrag and a=ice-pwd lines are present in the SDP.
+TEST_F(WebRtcSessionTest, TestSetLocalDescriptionWithoutIce) {
+  Init();
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  std::string sdp;
+  RemoveIceUfragPwdLines(offer.get(), &sdp);
+  SessionDescriptionInterface* modified_offer =
+    CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  SetLocalDescriptionOfferExpectError(kSdpWithoutIceUfragPwd, modified_offer);
+}
+
+// This test verifies that setRemoteDescription fails if
+// no a=ice-ufrag and a=ice-pwd lines are present in the SDP.
+TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithoutIce) {
+  Init();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+  std::string sdp;
+  RemoveIceUfragPwdLines(offer.get(), &sdp);
+  SessionDescriptionInterface* modified_offer =
+    CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  SetRemoteDescriptionOfferExpectError(kSdpWithoutIceUfragPwd, modified_offer);
+}
+
+// This test verifies that setLocalDescription fails if local offer has
+// too short ice ufrag and pwd strings.
+TEST_F(WebRtcSessionTest, TestSetLocalDescriptionInvalidIceCredentials) {
+  Init();
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  std::string sdp;
+  // Modifying ice ufrag and pwd in local offer with strings smaller than the
+  // recommended values of 4 and 22 bytes respectively.
+  ModifyIceUfragPwdLines(offer.get(), "ice", "icepwd", &sdp);
+  SessionDescriptionInterface* modified_offer =
+      CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  std::string error;
+  EXPECT_FALSE(session_->SetLocalDescription(modified_offer, &error));
+
+  // Test with string greater than 256.
+  sdp.clear();
+  ModifyIceUfragPwdLines(offer.get(), kTooLongIceUfragPwd, kTooLongIceUfragPwd,
+                         &sdp);
+  modified_offer = CreateSessionDescription(JsepSessionDescription::kOffer, sdp,
+                                            NULL);
+  EXPECT_FALSE(session_->SetLocalDescription(modified_offer, &error));
+}
+
+// This test verifies that setRemoteDescription fails if remote offer has
+// too short ice ufrag and pwd strings.
+TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionInvalidIceCredentials) {
+  Init();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+  std::string sdp;
+  // Modifying ice ufrag and pwd in remote offer with strings smaller than the
+  // recommended values of 4 and 22 bytes respectively.
+  ModifyIceUfragPwdLines(offer.get(), "ice", "icepwd", &sdp);
+  SessionDescriptionInterface* modified_offer =
+     CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  std::string error;
+  EXPECT_FALSE(session_->SetRemoteDescription(modified_offer, &error));
+
+  sdp.clear();
+  ModifyIceUfragPwdLines(offer.get(), kTooLongIceUfragPwd, kTooLongIceUfragPwd,
+                         &sdp);
+  modified_offer = CreateSessionDescription(JsepSessionDescription::kOffer, sdp,
+                                            NULL);
+  EXPECT_FALSE(session_->SetRemoteDescription(modified_offer, &error));
+}
+
+// Test that if the remote offer indicates the peer requested ICE restart (via
+// a new ufrag or pwd), the old ICE candidates are not copied, and vice versa.
+TEST_F(WebRtcSessionTest, TestSetRemoteOfferWithIceRestart) {
+  Init();
+  scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+
+  // Create the first offer.
+  std::string sdp;
+  ModifyIceUfragPwdLines(offer.get(), "0123456789012345",
+                         "abcdefghijklmnopqrstuvwx", &sdp);
+  SessionDescriptionInterface* offer1 =
+      CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  cricket::Candidate candidate1(1, "udp", rtc::SocketAddress("1.1.1.1", 5000),
+                                0, "", "", "relay", 0, "");
+  JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  EXPECT_TRUE(offer1->AddCandidate(&ice_candidate1));
+  SetRemoteDescriptionWithoutError(offer1);
+  EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+  // The second offer has the same ufrag and pwd but different address.
+  sdp.clear();
+  ModifyIceUfragPwdLines(offer.get(), "0123456789012345",
+                         "abcdefghijklmnopqrstuvwx", &sdp);
+  SessionDescriptionInterface* offer2 =
+      CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+  JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  EXPECT_TRUE(offer2->AddCandidate(&ice_candidate2));
+  SetRemoteDescriptionWithoutError(offer2);
+  EXPECT_EQ(2, session_->remote_description()->candidates(0)->count());
+
+  // The third offer has a different ufrag and different address.
+  sdp.clear();
+  ModifyIceUfragPwdLines(offer.get(), "0123456789012333",
+                         "abcdefghijklmnopqrstuvwx", &sdp);
+  SessionDescriptionInterface* offer3 =
+      CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 7000));
+  JsepIceCandidate ice_candidate3(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  EXPECT_TRUE(offer3->AddCandidate(&ice_candidate3));
+  SetRemoteDescriptionWithoutError(offer3);
+  EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+  // The fourth offer has no candidate but a different ufrag/pwd.
+  sdp.clear();
+  ModifyIceUfragPwdLines(offer.get(), "0123456789012444",
+                         "abcdefghijklmnopqrstuvyz", &sdp);
+  SessionDescriptionInterface* offer4 =
+      CreateSessionDescription(JsepSessionDescription::kOffer, sdp, NULL);
+  SetRemoteDescriptionWithoutError(offer4);
+  EXPECT_EQ(0, session_->remote_description()->candidates(0)->count());
+}
+
+// Test that if the remote answer indicates the peer requested ICE restart (via
+// a new ufrag or pwd), the old ICE candidates are not copied, and vice versa.
+TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithIceRestart) {
+  Init();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+  scoped_ptr<SessionDescriptionInterface> answer(CreateRemoteAnswer(offer));
+
+  // Create the first answer.
+  std::string sdp;
+  ModifyIceUfragPwdLines(answer.get(), "0123456789012345",
+                         "abcdefghijklmnopqrstuvwx", &sdp);
+  SessionDescriptionInterface* answer1 =
+      CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+  cricket::Candidate candidate1(1, "udp", rtc::SocketAddress("1.1.1.1", 5000),
+                                0, "", "", "relay", 0, "");
+  JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  EXPECT_TRUE(answer1->AddCandidate(&ice_candidate1));
+  SetRemoteDescriptionWithoutError(answer1);
+  EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+  // The second answer has the same ufrag and pwd but different address.
+  sdp.clear();
+  ModifyIceUfragPwdLines(answer.get(), "0123456789012345",
+                         "abcdefghijklmnopqrstuvwx", &sdp);
+  SessionDescriptionInterface* answer2 =
+      CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+  JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  EXPECT_TRUE(answer2->AddCandidate(&ice_candidate2));
+  SetRemoteDescriptionWithoutError(answer2);
+  EXPECT_EQ(2, session_->remote_description()->candidates(0)->count());
+
+  // The third answer has a different ufrag and different address.
+  sdp.clear();
+  ModifyIceUfragPwdLines(answer.get(), "0123456789012333",
+                         "abcdefghijklmnopqrstuvwx", &sdp);
+  SessionDescriptionInterface* answer3 =
+      CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 7000));
+  JsepIceCandidate ice_candidate3(kMediaContentName0, kMediaContentIndex0,
+                                  candidate1);
+  EXPECT_TRUE(answer3->AddCandidate(&ice_candidate3));
+  SetRemoteDescriptionWithoutError(answer3);
+  EXPECT_EQ(1, session_->remote_description()->candidates(0)->count());
+
+  // The fourth answer has no candidate but a different ufrag/pwd.
+  sdp.clear();
+  ModifyIceUfragPwdLines(answer.get(), "0123456789012444",
+                         "abcdefghijklmnopqrstuvyz", &sdp);
+  SessionDescriptionInterface* offer4 =
+      CreateSessionDescription(JsepSessionDescription::kPrAnswer, sdp, NULL);
+  SetRemoteDescriptionWithoutError(offer4);
+  EXPECT_EQ(0, session_->remote_description()->candidates(0)->count());
+}
+
+// Test that candidates sent to the "video" transport do not get pushed down to
+// the "audio" transport channel when bundling.
+TEST_F(WebRtcSessionTest, TestIgnoreCandidatesForUnusedTransportWhenBundling) {
+  AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  SetRemoteDescriptionWithoutError(offer);
+
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  SetLocalDescriptionWithoutError(answer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  cricket::BaseChannel* voice_channel = session_->voice_channel();
+  ASSERT(voice_channel != NULL);
+
+  // Checks if one of the transport channels contains a connection using a given
+  // port.
+  auto connection_with_remote_port = [this, voice_channel](int port) {
+    SessionStats stats;
+    session_->GetChannelTransportStats(voice_channel, &stats);
+    for (auto& kv : stats.transport_stats) {
+      for (auto& chan_stat : kv.second.channel_stats) {
+        for (auto& conn_info : chan_stat.connection_infos) {
+          if (conn_info.remote_candidate.address().port() == port) {
+            return true;
+          }
+        }
+      }
+    }
+    return false;
+  };
+
+  EXPECT_FALSE(connection_with_remote_port(5000));
+  EXPECT_FALSE(connection_with_remote_port(5001));
+  EXPECT_FALSE(connection_with_remote_port(6000));
+
+  // The way the *_WAIT checks work is they only wait if the condition fails,
+  // which does not help in the case where state is not changing. This is
+  // problematic in this test since we want to verify that adding a video
+  // candidate does _not_ change state. So we interleave candidates and assume
+  // that messages are executed in the order they were posted.
+
+  // First audio candidate.
+  cricket::Candidate candidate0;
+  candidate0.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+  candidate0.set_component(1);
+  candidate0.set_protocol("udp");
+  JsepIceCandidate ice_candidate0(kMediaContentName0, kMediaContentIndex0,
+                                  candidate0);
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate0));
+
+  // Video candidate.
+  cricket::Candidate candidate1;
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+  candidate1.set_component(1);
+  candidate1.set_protocol("udp");
+  JsepIceCandidate ice_candidate1(kMediaContentName1, kMediaContentIndex1,
+                                  candidate1);
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+
+  // Second audio candidate.
+  cricket::Candidate candidate2;
+  candidate2.set_address(rtc::SocketAddress("1.1.1.1", 5001));
+  candidate2.set_component(1);
+  candidate2.set_protocol("udp");
+  JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+                                  candidate2);
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+
+  EXPECT_TRUE_WAIT(connection_with_remote_port(5000), 1000);
+  EXPECT_TRUE_WAIT(connection_with_remote_port(5001), 1000);
+
+  // No need here for a _WAIT check since we are checking that state hasn't
+  // changed: if this is false we would be doing waits for nothing and if this
+  // is true then there will be no messages processed anyways.
+  EXPECT_FALSE(connection_with_remote_port(6000));
+}
+
+// kBundlePolicyBalanced BUNDLE policy and answer contains BUNDLE.
+TEST_F(WebRtcSessionTest, TestBalancedBundleInAnswer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyBalanced BUNDLE policy but no BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestBalancedNoBundleInAnswer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+
+  // Remove BUNDLE from the answer.
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateRemoteAnswer(session_->local_description()));
+  cricket::SessionDescription* answer_copy = answer->description()->Copy();
+  answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+  JsepSessionDescription* modified_answer =
+      new JsepSessionDescription(JsepSessionDescription::kAnswer);
+  modified_answer->Initialize(answer_copy, "1", "1");
+  SetRemoteDescriptionWithoutError(modified_answer);  //
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy with BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestMaxBundleBundleInAnswer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy with BUNDLE in the answer, but no
+// audio content in the answer.
+TEST_F(WebRtcSessionTest, TestMaxBundleRejectAudio) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+  cricket::MediaSessionOptions recv_options;
+  recv_options.recv_audio = false;
+  recv_options.recv_video = true;
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description(), recv_options);
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_TRUE(nullptr == session_->voice_channel());
+  EXPECT_TRUE(nullptr != session_->video_rtp_transport_channel());
+
+  session_->Close();
+  EXPECT_TRUE(nullptr == session_->voice_rtp_transport_channel());
+  EXPECT_TRUE(nullptr == session_->voice_rtcp_transport_channel());
+  EXPECT_TRUE(nullptr == session_->video_rtp_transport_channel());
+  EXPECT_TRUE(nullptr == session_->video_rtcp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy but no BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInAnswer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+
+  // Remove BUNDLE from the answer.
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateRemoteAnswer(session_->local_description()));
+  cricket::SessionDescription* answer_copy = answer->description()->Copy();
+  answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+  JsepSessionDescription* modified_answer =
+      new JsepSessionDescription(JsepSessionDescription::kAnswer);
+  modified_answer->Initialize(answer_copy, "1", "1");
+  SetRemoteDescriptionWithoutError(modified_answer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy with BUNDLE in the remote offer.
+TEST_F(WebRtcSessionTest, TestMaxBundleBundleInRemoteOffer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+  SendAudioVideoStream1();
+
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  SetRemoteDescriptionWithoutError(offer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer = CreateAnswer(nullptr);
+  SetLocalDescriptionWithoutError(answer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxBundle policy but no BUNDLE in the remote offer.
+TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInRemoteOffer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+  SendAudioVideoStream1();
+
+  // Remove BUNDLE from the offer.
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+  cricket::SessionDescription* offer_copy = offer->description()->Copy();
+  offer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+  JsepSessionDescription* modified_offer =
+      new JsepSessionDescription(JsepSessionDescription::kOffer);
+  modified_offer->Initialize(offer_copy, "1", "1");
+
+  // Expect an error when applying the remote description
+  SetRemoteDescriptionExpectError(JsepSessionDescription::kOffer,
+                                  kCreateChannelFailed, modified_offer);
+}
+
+// kBundlePolicyMaxCompat bundle policy and answer contains BUNDLE.
+TEST_F(WebRtcSessionTest, TestMaxCompatBundleInAnswer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxCompat);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  // This should lead to an audio-only call but isn't implemented
+  // correctly yet.
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxCompat BUNDLE policy but no BUNDLE in the answer.
+TEST_F(WebRtcSessionTest, TestMaxCompatNoBundleInAnswer) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxCompat);
+  SendAudioVideoStream1();
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+
+  // Remove BUNDLE from the answer.
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateRemoteAnswer(session_->local_description()));
+  cricket::SessionDescription* answer_copy = answer->description()->Copy();
+  answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
+  JsepSessionDescription* modified_answer =
+      new JsepSessionDescription(JsepSessionDescription::kAnswer);
+  modified_answer->Initialize(answer_copy, "1", "1");
+  SetRemoteDescriptionWithoutError(modified_answer);  //
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+// kBundlePolicyMaxbundle and then we call SetRemoteDescription first.
+TEST_F(WebRtcSessionTest, TestMaxBundleWithSetRemoteDescriptionFirst) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetRemoteDescriptionWithoutError(offer);
+
+  EXPECT_EQ(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+}
+
+TEST_F(WebRtcSessionTest, TestRequireRtcpMux) {
+  InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyRequire);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
+  EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
+  EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
+}
+
+TEST_F(WebRtcSessionTest, TestNegotiateRtcpMux) {
+  InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyNegotiate);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  EXPECT_TRUE(session_->voice_rtcp_transport_channel() != NULL);
+  EXPECT_TRUE(session_->video_rtcp_transport_channel() != NULL);
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
+  EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
+}
+
+// This test verifies that SetLocalDescription and SetRemoteDescription fails
+// if BUNDLE is enabled but rtcp-mux is disabled in m-lines.
+TEST_F(WebRtcSessionTest, TestDisabledRtcpMuxWithBundleEnabled) {
+  Init();
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  std::string offer_str;
+  offer->ToString(&offer_str);
+  // Disable rtcp-mux
+  const std::string rtcp_mux = "rtcp-mux";
+  const std::string xrtcp_mux = "xrtcp-mux";
+  rtc::replace_substrs(rtcp_mux.c_str(), rtcp_mux.length(),
+                             xrtcp_mux.c_str(), xrtcp_mux.length(),
+                             &offer_str);
+  JsepSessionDescription* local_offer =
+      new JsepSessionDescription(JsepSessionDescription::kOffer);
+  EXPECT_TRUE((local_offer)->Initialize(offer_str, NULL));
+  SetLocalDescriptionOfferExpectError(kBundleWithoutRtcpMux, local_offer);
+  JsepSessionDescription* remote_offer =
+      new JsepSessionDescription(JsepSessionDescription::kOffer);
+  EXPECT_TRUE((remote_offer)->Initialize(offer_str, NULL));
+  SetRemoteDescriptionOfferExpectError(kBundleWithoutRtcpMux, remote_offer);
+  // Trying unmodified SDP.
+  SetLocalDescriptionWithoutError(offer);
+}
+
+TEST_F(WebRtcSessionTest, SetAudioPlayout) {
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+  ASSERT_TRUE(channel != NULL);
+  ASSERT_EQ(1u, channel->recv_streams().size());
+  uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
+  double volume;
+  EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
+  EXPECT_EQ(1, volume);
+  session_->SetAudioPlayout(receive_ssrc, false);
+  EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
+  EXPECT_EQ(0, volume);
+  session_->SetAudioPlayout(receive_ssrc, true);
+  EXPECT_TRUE(channel->GetOutputVolume(receive_ssrc, &volume));
+  EXPECT_EQ(1, volume);
+}
+
+TEST_F(WebRtcSessionTest, SetAudioSend) {
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+  ASSERT_TRUE(channel != NULL);
+  ASSERT_EQ(1u, channel->send_streams().size());
+  uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+  EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+
+  cricket::AudioOptions options;
+  options.echo_cancellation = rtc::Optional<bool>(true);
+
+  rtc::scoped_ptr<FakeAudioRenderer> renderer(new FakeAudioRenderer());
+  session_->SetAudioSend(send_ssrc, false, options, renderer.get());
+  EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
+  EXPECT_EQ(rtc::Optional<bool>(), channel->options().echo_cancellation);
+  EXPECT_TRUE(renderer->sink() != NULL);
+
+  // This will trigger SetSink(NULL) to the |renderer|.
+  session_->SetAudioSend(send_ssrc, true, options, NULL);
+  EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+  EXPECT_EQ(rtc::Optional<bool>(true), channel->options().echo_cancellation);
+  EXPECT_TRUE(renderer->sink() == NULL);
+}
+
+TEST_F(WebRtcSessionTest, AudioRendererForLocalStream) {
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+  ASSERT_TRUE(channel != NULL);
+  ASSERT_EQ(1u, channel->send_streams().size());
+  uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+
+  rtc::scoped_ptr<FakeAudioRenderer> renderer(new FakeAudioRenderer());
+  cricket::AudioOptions options;
+  session_->SetAudioSend(send_ssrc, true, options, renderer.get());
+  EXPECT_TRUE(renderer->sink() != NULL);
+
+  // Delete the |renderer| and it will trigger OnClose() to the sink, and this
+  // will invalidate the |renderer_| pointer in the sink and prevent getting a
+  // SetSink(NULL) callback afterwards.
+  renderer.reset();
+
+  // This will trigger SetSink(NULL) if no OnClose() callback.
+  session_->SetAudioSend(send_ssrc, true, options, NULL);
+}
+
+TEST_F(WebRtcSessionTest, SetVideoPlayout) {
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
+  ASSERT_TRUE(channel != NULL);
+  ASSERT_LT(0u, channel->sinks().size());
+  EXPECT_TRUE(channel->sinks().begin()->second == NULL);
+  ASSERT_EQ(1u, channel->recv_streams().size());
+  uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
+  cricket::FakeVideoRenderer renderer;
+  session_->SetVideoPlayout(receive_ssrc, true, &renderer);
+  EXPECT_TRUE(channel->sinks().begin()->second == &renderer);
+  session_->SetVideoPlayout(receive_ssrc, false, &renderer);
+  EXPECT_TRUE(channel->sinks().begin()->second == NULL);
+}
+
+TEST_F(WebRtcSessionTest, SetVideoSend) {
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
+  ASSERT_TRUE(channel != NULL);
+  ASSERT_EQ(1u, channel->send_streams().size());
+  uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+  EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+  cricket::VideoOptions* options = NULL;
+  session_->SetVideoSend(send_ssrc, false, options);
+  EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
+  session_->SetVideoSend(send_ssrc, true, options);
+  EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
+}
+
+TEST_F(WebRtcSessionTest, CanNotInsertDtmf) {
+  TestCanInsertDtmf(false);
+}
+
+TEST_F(WebRtcSessionTest, CanInsertDtmf) {
+  TestCanInsertDtmf(true);
+}
+
+TEST_F(WebRtcSessionTest, InsertDtmf) {
+  // Setup
+  Init();
+  SendAudioVideoStream1();
+  CreateAndSetRemoteOfferAndLocalAnswer();
+  FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
+  EXPECT_EQ(0U, channel->dtmf_info_queue().size());
+
+  // Insert DTMF
+  const int expected_duration = 90;
+  session_->InsertDtmf(kAudioTrack1, 0, expected_duration);
+  session_->InsertDtmf(kAudioTrack1, 1, expected_duration);
+  session_->InsertDtmf(kAudioTrack1, 2, expected_duration);
+
+  // Verify
+  ASSERT_EQ(3U, channel->dtmf_info_queue().size());
+  const uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
+  EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[0], send_ssrc, 0,
+                              expected_duration));
+  EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[1], send_ssrc, 1,
+                              expected_duration));
+  EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[2], send_ssrc, 2,
+                              expected_duration));
+}
+
+// This test verifies the |initial_offerer| flag when session initiates the
+// call.
+TEST_F(WebRtcSessionTest, TestInitiatorFlagAsOriginator) {
+  Init();
+  EXPECT_FALSE(session_->initial_offerer());
+  SessionDescriptionInterface* offer = CreateOffer();
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+  SetLocalDescriptionWithoutError(offer);
+  EXPECT_TRUE(session_->initial_offerer());
+  SetRemoteDescriptionWithoutError(answer);
+  EXPECT_TRUE(session_->initial_offerer());
+}
+
+// This test verifies the |initial_offerer| flag when session receives the call.
+TEST_F(WebRtcSessionTest, TestInitiatorFlagAsReceiver) {
+  Init();
+  EXPECT_FALSE(session_->initial_offerer());
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  SetRemoteDescriptionWithoutError(offer);
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+
+  EXPECT_FALSE(session_->initial_offerer());
+  SetLocalDescriptionWithoutError(answer);
+  EXPECT_FALSE(session_->initial_offerer());
+}
+
+// Verifing local offer and remote answer have matching m-lines as per RFC 3264.
+TEST_F(WebRtcSessionTest, TestIncorrectMLinesInRemoteAnswer) {
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateRemoteAnswer(session_->local_description()));
+
+  cricket::SessionDescription* answer_copy = answer->description()->Copy();
+  answer_copy->RemoveContentByName("video");
+  JsepSessionDescription* modified_answer =
+      new JsepSessionDescription(JsepSessionDescription::kAnswer);
+
+  EXPECT_TRUE(modified_answer->Initialize(answer_copy,
+                                          answer->session_id(),
+                                          answer->session_version()));
+  SetRemoteDescriptionAnswerExpectError(kMlineMismatch, modified_answer);
+
+  // Different content names.
+  std::string sdp;
+  EXPECT_TRUE(answer->ToString(&sdp));
+  const std::string kAudioMid = "a=mid:audio";
+  const std::string kAudioMidReplaceStr = "a=mid:audio_content_name";
+  rtc::replace_substrs(kAudioMid.c_str(), kAudioMid.length(),
+                             kAudioMidReplaceStr.c_str(),
+                             kAudioMidReplaceStr.length(),
+                             &sdp);
+  SessionDescriptionInterface* modified_answer1 =
+      CreateSessionDescription(JsepSessionDescription::kAnswer, sdp, NULL);
+  SetRemoteDescriptionAnswerExpectError(kMlineMismatch, modified_answer1);
+
+  // Different media types.
+  EXPECT_TRUE(answer->ToString(&sdp));
+  const std::string kAudioMline = "m=audio";
+  const std::string kAudioMlineReplaceStr = "m=video";
+  rtc::replace_substrs(kAudioMline.c_str(), kAudioMline.length(),
+                             kAudioMlineReplaceStr.c_str(),
+                             kAudioMlineReplaceStr.length(),
+                             &sdp);
+  SessionDescriptionInterface* modified_answer2 =
+      CreateSessionDescription(JsepSessionDescription::kAnswer, sdp, NULL);
+  SetRemoteDescriptionAnswerExpectError(kMlineMismatch, modified_answer2);
+
+  SetRemoteDescriptionWithoutError(answer.release());
+}
+
+// Verifying remote offer and local answer have matching m-lines as per
+// RFC 3264.
+TEST_F(WebRtcSessionTest, TestIncorrectMLinesInLocalAnswer) {
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  SetRemoteDescriptionWithoutError(offer);
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+
+  cricket::SessionDescription* answer_copy = answer->description()->Copy();
+  answer_copy->RemoveContentByName("video");
+  JsepSessionDescription* modified_answer =
+      new JsepSessionDescription(JsepSessionDescription::kAnswer);
+
+  EXPECT_TRUE(modified_answer->Initialize(answer_copy,
+                                          answer->session_id(),
+                                          answer->session_version()));
+  SetLocalDescriptionAnswerExpectError(kMlineMismatch, modified_answer);
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// This test verifies that WebRtcSession does not start candidate allocation
+// before SetLocalDescription is called.
+TEST_F(WebRtcSessionTest, TestIceStartAfterSetLocalDescriptionOnly) {
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateRemoteOffer();
+  cricket::Candidate candidate;
+  candidate.set_component(1);
+  JsepIceCandidate ice_candidate(kMediaContentName0, kMediaContentIndex0,
+                                 candidate);
+  EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
+  cricket::Candidate candidate1;
+  candidate1.set_component(1);
+  JsepIceCandidate ice_candidate1(kMediaContentName1, kMediaContentIndex1,
+                                  candidate1);
+  EXPECT_TRUE(offer->AddCandidate(&ice_candidate1));
+  SetRemoteDescriptionWithoutError(offer);
+  ASSERT_TRUE(session_->voice_rtp_transport_channel() != NULL);
+  ASSERT_TRUE(session_->video_rtp_transport_channel() != NULL);
+
+  // Pump for 1 second and verify that no candidates are generated.
+  rtc::Thread::Current()->ProcessMessages(1000);
+  EXPECT_TRUE(observer_.mline_0_candidates_.empty());
+  EXPECT_TRUE(observer_.mline_1_candidates_.empty());
+
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  SetLocalDescriptionWithoutError(answer);
+  EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
+}
+
+// This test verifies that crypto parameter is updated in local session
+// description as per security policy set in MediaSessionDescriptionFactory.
+TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescription) {
+  Init();
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  // Making sure SetLocalDescription correctly sets crypto value in
+  // SessionDescription object after de-serialization of sdp string. The value
+  // will be set as per MediaSessionDescriptionFactory.
+  std::string offer_str;
+  offer->ToString(&offer_str);
+  SessionDescriptionInterface* jsep_offer_str =
+      CreateSessionDescription(JsepSessionDescription::kOffer, offer_str, NULL);
+  SetLocalDescriptionWithoutError(jsep_offer_str);
+  EXPECT_TRUE(session_->voice_channel()->secure_required());
+  EXPECT_TRUE(session_->video_channel()->secure_required());
+}
+
+// This test verifies the crypto parameter when security is disabled.
+TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescriptionWithDisabled) {
+  options_.disable_encryption = true;
+  Init();
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  // Making sure SetLocalDescription correctly sets crypto value in
+  // SessionDescription object after de-serialization of sdp string. The value
+  // will be set as per MediaSessionDescriptionFactory.
+  std::string offer_str;
+  offer->ToString(&offer_str);
+  SessionDescriptionInterface* jsep_offer_str =
+      CreateSessionDescription(JsepSessionDescription::kOffer, offer_str, NULL);
+  SetLocalDescriptionWithoutError(jsep_offer_str);
+  EXPECT_FALSE(session_->voice_channel()->secure_required());
+  EXPECT_FALSE(session_->video_channel()->secure_required());
+}
+
+// This test verifies that an answer contains new ufrag and password if an offer
+// with new ufrag and password is received.
+TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
+  Init();
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  rtc::scoped_ptr<JsepSessionDescription> offer(
+      CreateRemoteOffer(options));
+  SetRemoteDescriptionWithoutError(offer.release());
+
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(NULL));
+  SetLocalDescriptionWithoutError(answer.release());
+
+  // Receive an offer with new ufrag and password.
+  options.audio_transport_options.ice_restart = true;
+  options.video_transport_options.ice_restart = true;
+  options.data_transport_options.ice_restart = true;
+  rtc::scoped_ptr<JsepSessionDescription> updated_offer1(
+      CreateRemoteOffer(options, session_->remote_description()));
+  SetRemoteDescriptionWithoutError(updated_offer1.release());
+
+  rtc::scoped_ptr<SessionDescriptionInterface> updated_answer1(
+      CreateAnswer(NULL));
+
+  CompareIceUfragAndPassword(updated_answer1->description(),
+                             session_->local_description()->description(),
+                             false);
+
+  SetLocalDescriptionWithoutError(updated_answer1.release());
+}
+
+// This test verifies that an answer contains old ufrag and password if an offer
+// with old ufrag and password is received.
+TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) {
+  Init();
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  rtc::scoped_ptr<JsepSessionDescription> offer(
+      CreateRemoteOffer(options));
+  SetRemoteDescriptionWithoutError(offer.release());
+
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(
+      CreateAnswer(NULL));
+  SetLocalDescriptionWithoutError(answer.release());
+
+  // Receive an offer without changed ufrag or password.
+  options.audio_transport_options.ice_restart = false;
+  options.video_transport_options.ice_restart = false;
+  options.data_transport_options.ice_restart = false;
+  rtc::scoped_ptr<JsepSessionDescription> updated_offer2(
+      CreateRemoteOffer(options, session_->remote_description()));
+  SetRemoteDescriptionWithoutError(updated_offer2.release());
+
+  rtc::scoped_ptr<SessionDescriptionInterface> updated_answer2(
+      CreateAnswer(NULL));
+
+  CompareIceUfragAndPassword(updated_answer2->description(),
+                             session_->local_description()->description(),
+                             true);
+
+  SetLocalDescriptionWithoutError(updated_answer2.release());
+}
+
+TEST_F(WebRtcSessionTest, TestSessionContentError) {
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+  const std::string session_id_orig = offer->session_id();
+  const std::string session_version_orig = offer->session_version();
+  SetLocalDescriptionWithoutError(offer);
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  video_channel_->set_fail_set_send_codecs(true);
+
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionAnswerExpectError("ERROR_CONTENT", answer);
+
+  // Test that after a content error, setting any description will
+  // result in an error.
+  video_channel_->set_fail_set_send_codecs(false);
+  answer = CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionExpectError("", "ERROR_CONTENT", answer);
+  offer = CreateRemoteOffer();
+  SetLocalDescriptionExpectError("", "ERROR_CONTENT", offer);
+}
+
+// Runs the loopback call test with BUNDLE and STUN disabled.
+TEST_F(WebRtcSessionTest, TestIceStatesBasic) {
+  // Lets try with only UDP ports.
+  allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+                        cricket::PORTALLOCATOR_DISABLE_STUN |
+                        cricket::PORTALLOCATOR_DISABLE_RELAY);
+  TestLoopbackCall();
+}
+
+TEST_F(WebRtcSessionTest, TestIceStatesBasicIPv6) {
+  allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+                        cricket::PORTALLOCATOR_DISABLE_STUN |
+                        cricket::PORTALLOCATOR_ENABLE_IPV6 |
+                        cricket::PORTALLOCATOR_DISABLE_RELAY);
+
+  // best connection is IPv6 since it has higher network preference.
+  LoopbackNetworkConfiguration config;
+  config.test_ipv6_network_ = true;
+  config.best_connection_after_initial_ice_converged_ =
+      LoopbackNetworkConfiguration::ExpectedBestConnection(0, 1);
+
+  TestLoopbackCall(config);
+}
+
+// Runs the loopback call test with BUNDLE and STUN enabled.
+TEST_F(WebRtcSessionTest, TestIceStatesBundle) {
+  allocator_->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP |
+                        cricket::PORTALLOCATOR_DISABLE_RELAY);
+  TestLoopbackCall();
+}
+
+TEST_F(WebRtcSessionTest, TestRtpDataChannel) {
+  constraints_.reset(new FakeConstraints());
+  constraints_->AddOptional(
+      webrtc::MediaConstraintsInterface::kEnableRtpDataChannels, true);
+  Init();
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_RTP, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestRtpDataChannelConstraintTakesPrecedence) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  constraints_.reset(new FakeConstraints());
+  constraints_->AddOptional(
+      webrtc::MediaConstraintsInterface::kEnableRtpDataChannels, true);
+  options_.disable_sctp_data_channels = false;
+
+  InitWithDtls(GetParam());
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_RTP, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestCreateOfferWithSctpEnabledWithoutStreams) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  InitWithDtls(GetParam());
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+  EXPECT_TRUE(offer->description()->GetContentByName("data") == NULL);
+  EXPECT_TRUE(offer->description()->GetTransportInfoByName("data") == NULL);
+}
+
+TEST_P(WebRtcSessionTest, TestCreateAnswerWithSctpInOfferAndNoStreams) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  SetFactoryDtlsSrtp();
+  InitWithDtls(GetParam());
+
+  // Create remote offer with SCTP.
+  cricket::MediaSessionOptions options;
+  options.data_channel_type = cricket::DCT_SCTP;
+  JsepSessionDescription* offer =
+      CreateRemoteOffer(options, cricket::SEC_DISABLED);
+  SetRemoteDescriptionWithoutError(offer);
+
+  // Verifies the answer contains SCTP.
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+  EXPECT_TRUE(answer != NULL);
+  EXPECT_TRUE(answer->description()->GetContentByName("data") != NULL);
+  EXPECT_TRUE(answer->description()->GetTransportInfoByName("data") != NULL);
+}
+
+TEST_P(WebRtcSessionTest, TestSctpDataChannelWithoutDtls) {
+  constraints_.reset(new FakeConstraints());
+  constraints_->AddOptional(
+      webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, false);
+  InitWithDtls(GetParam());
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_NONE, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestSctpDataChannelWithDtls) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  InitWithDtls(GetParam());
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestDisableSctpDataChannels) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  options_.disable_sctp_data_channels = true;
+  InitWithDtls(GetParam());
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_NONE, data_engine_->last_channel_type());
+}
+
+TEST_P(WebRtcSessionTest, TestSctpDataChannelSendPortParsing) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  const int new_send_port = 9998;
+  const int new_recv_port = 7775;
+
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+
+  // By default, don't actually add the codecs to desc_factory_; they don't
+  // actually get serialized for SCTP in BuildMediaDescription().  Instead,
+  // let the session description get parsed.  That'll get the proper codecs
+  // into the stream.
+  cricket::MediaSessionOptions options;
+  JsepSessionDescription* offer = CreateRemoteOfferWithSctpPort(
+      "stream1", new_send_port, options);
+
+  // SetRemoteDescription will take the ownership of the offer.
+  SetRemoteDescriptionWithoutError(offer);
+
+  SessionDescriptionInterface* answer = ChangeSDPSctpPort(
+      new_recv_port, CreateAnswer(NULL));
+  ASSERT_TRUE(answer != NULL);
+
+  // Now set the local description, which'll take ownership of the answer.
+  SetLocalDescriptionWithoutError(answer);
+
+  // TEST PLAN: Set the port number to something new, set it in the SDP,
+  // and pass it all the way down.
+  EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+  CreateDataChannel();
+
+  cricket::FakeDataMediaChannel* ch = data_engine_->GetChannel(0);
+  int portnum = -1;
+  ASSERT_TRUE(ch != NULL);
+  ASSERT_EQ(1UL, ch->send_codecs().size());
+  EXPECT_EQ(cricket::kGoogleSctpDataCodecId, ch->send_codecs()[0].id);
+  EXPECT_EQ(0, strcmp(cricket::kGoogleSctpDataCodecName,
+                      ch->send_codecs()[0].name.c_str()));
+  EXPECT_TRUE(ch->send_codecs()[0].GetParam(cricket::kCodecParamPort,
+                                            &portnum));
+  EXPECT_EQ(new_send_port, portnum);
+
+  ASSERT_EQ(1UL, ch->recv_codecs().size());
+  EXPECT_EQ(cricket::kGoogleSctpDataCodecId, ch->recv_codecs()[0].id);
+  EXPECT_EQ(0, strcmp(cricket::kGoogleSctpDataCodecName,
+                      ch->recv_codecs()[0].name.c_str()));
+  EXPECT_TRUE(ch->recv_codecs()[0].GetParam(cricket::kCodecParamPort,
+                                            &portnum));
+  EXPECT_EQ(new_recv_port, portnum);
+}
+
+// Verifies that when a session's DataChannel receives an OPEN message,
+// WebRtcSession signals the DataChannel creation request with the expected
+// config.
+TEST_P(WebRtcSessionTest, TestSctpDataChannelOpenMessage) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  InitWithDtls(GetParam());
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+
+  webrtc::DataChannelInit config;
+  config.id = 1;
+  rtc::Buffer payload;
+  webrtc::WriteDataChannelOpenMessage("a", config, &payload);
+  cricket::ReceiveDataParams params;
+  params.ssrc = config.id;
+  params.type = cricket::DMT_CONTROL;
+
+  cricket::DataChannel* data_channel = session_->data_channel();
+  data_channel->SignalDataReceived(data_channel, params, payload);
+
+  EXPECT_EQ("a", last_data_channel_label_);
+  EXPECT_EQ(config.id, last_data_channel_config_.id);
+  EXPECT_FALSE(last_data_channel_config_.negotiated);
+  EXPECT_EQ(webrtc::InternalDataChannelInit::kAcker,
+            last_data_channel_config_.open_handshake_role);
+}
+
+TEST_P(WebRtcSessionTest, TestUsesProvidedCertificate) {
+  rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+      FakeDtlsIdentityStore::GenerateCertificate();
+
+  PeerConnectionInterface::RTCConfiguration configuration;
+  configuration.certificates.push_back(certificate);
+  Init(nullptr, configuration);
+  EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
+
+  EXPECT_EQ(session_->certificate_for_testing(), certificate);
+}
+
+// Verifies that CreateOffer succeeds when CreateOffer is called before async
+// identity generation is finished (even if a certificate is provided this is
+// an async op).
+TEST_P(WebRtcSessionTest, TestCreateOfferBeforeIdentityRequestReturnSuccess) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+
+  EXPECT_TRUE(session_->waiting_for_certificate_for_testing());
+  SendAudioVideoStream1();
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+
+  EXPECT_TRUE(offer != NULL);
+  VerifyNoCryptoParams(offer->description(), true);
+  VerifyFingerprintStatus(offer->description(), true);
+}
+
+// Verifies that CreateAnswer succeeds when CreateOffer is called before async
+// identity generation is finished (even if a certificate is provided this is
+// an async op).
+TEST_P(WebRtcSessionTest, TestCreateAnswerBeforeIdentityRequestReturnSuccess) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  scoped_ptr<JsepSessionDescription> offer(
+        CreateRemoteOffer(options, cricket::SEC_DISABLED));
+  ASSERT_TRUE(offer.get() != NULL);
+  SetRemoteDescriptionWithoutError(offer.release());
+
+  rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(NULL));
+  EXPECT_TRUE(answer != NULL);
+  VerifyNoCryptoParams(answer->description(), true);
+  VerifyFingerprintStatus(answer->description(), true);
+}
+
+// Verifies that CreateOffer succeeds when CreateOffer is called after async
+// identity generation is finished (even if a certificate is provided this is
+// an async op).
+TEST_P(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnSuccess) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+
+  EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+  EXPECT_TRUE(offer != NULL);
+}
+
+// Verifies that CreateOffer fails when CreateOffer is called after async
+// identity generation fails.
+TEST_F(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnFailure) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtlsIdentityGenFail();
+
+  EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+  EXPECT_TRUE(offer == NULL);
+}
+
+// Verifies that CreateOffer succeeds when Multiple CreateOffer calls are made
+// before async identity generation is finished.
+TEST_P(WebRtcSessionTest,
+       TestMultipleCreateOfferBeforeIdentityRequestReturnSuccess) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  VerifyMultipleAsyncCreateDescription(GetParam(),
+                                       CreateSessionDescriptionRequest::kOffer);
+}
+
+// Verifies that CreateOffer fails when Multiple CreateOffer calls are made
+// before async identity generation fails.
+TEST_F(WebRtcSessionTest,
+       TestMultipleCreateOfferBeforeIdentityRequestReturnFailure) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  VerifyMultipleAsyncCreateDescriptionIdentityGenFailure(
+      CreateSessionDescriptionRequest::kOffer);
+}
+
+// Verifies that CreateAnswer succeeds when Multiple CreateAnswer calls are made
+// before async identity generation is finished.
+TEST_P(WebRtcSessionTest,
+       TestMultipleCreateAnswerBeforeIdentityRequestReturnSuccess) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  VerifyMultipleAsyncCreateDescription(
+      GetParam(), CreateSessionDescriptionRequest::kAnswer);
+}
+
+// Verifies that CreateAnswer fails when Multiple CreateAnswer calls are made
+// before async identity generation fails.
+TEST_F(WebRtcSessionTest,
+       TestMultipleCreateAnswerBeforeIdentityRequestReturnFailure) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  VerifyMultipleAsyncCreateDescriptionIdentityGenFailure(
+      CreateSessionDescriptionRequest::kAnswer);
+}
+
+// Verifies that setRemoteDescription fails when DTLS is disabled and the remote
+// offer has no SDES crypto but only DTLS fingerprint.
+TEST_F(WebRtcSessionTest, TestSetRemoteOfferFailIfDtlsDisabledAndNoCrypto) {
+  // Init without DTLS.
+  Init();
+  // Create a remote offer with secured transport disabled.
+  cricket::MediaSessionOptions options;
+  JsepSessionDescription* offer(CreateRemoteOffer(
+      options, cricket::SEC_DISABLED));
+  // Adds a DTLS fingerprint to the remote offer.
+  cricket::SessionDescription* sdp = offer->description();
+  TransportInfo* audio = sdp->GetTransportInfoByName("audio");
+  ASSERT_TRUE(audio != NULL);
+  ASSERT_TRUE(audio->description.identity_fingerprint.get() == NULL);
+  audio->description.identity_fingerprint.reset(
+      rtc::SSLFingerprint::CreateFromRfc4572(
+          rtc::DIGEST_SHA_256, kFakeDtlsFingerprint));
+  SetRemoteDescriptionOfferExpectError(kSdpWithoutSdesCrypto,
+                                       offer);
+}
+
+// This test verifies DSCP is properly applied on the media channels.
+TEST_F(WebRtcSessionTest, TestDscpConstraint) {
+  constraints_.reset(new FakeConstraints());
+  constraints_->AddOptional(
+      webrtc::MediaConstraintsInterface::kEnableDscp, true);
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+
+  SetLocalDescriptionWithoutError(offer);
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_TRUE(video_channel_ != NULL);
+  ASSERT_TRUE(voice_channel_ != NULL);
+  const cricket::AudioOptions& audio_options = voice_channel_->options();
+  const cricket::VideoOptions& video_options = video_channel_->options();
+  EXPECT_EQ(rtc::Optional<bool>(true), audio_options.dscp);
+  EXPECT_EQ(rtc::Optional<bool>(true), video_options.dscp);
+}
+
+TEST_F(WebRtcSessionTest, TestSuspendBelowMinBitrateConstraint) {
+  constraints_.reset(new FakeConstraints());
+  constraints_->AddOptional(
+      webrtc::MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
+      true);
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+
+  SetLocalDescriptionWithoutError(offer);
+
+  video_channel_ = media_engine_->GetVideoChannel(0);
+
+  ASSERT_TRUE(video_channel_ != NULL);
+  const cricket::VideoOptions& video_options = video_channel_->options();
+  EXPECT_EQ(rtc::Optional<bool>(true), video_options.suspend_below_min_bitrate);
+}
+
+TEST_F(WebRtcSessionTest, TestCombinedAudioVideoBweConstraint) {
+  constraints_.reset(new FakeConstraints());
+  constraints_->AddOptional(
+      webrtc::MediaConstraintsInterface::kCombinedAudioVideoBwe,
+      true);
+  Init();
+  SendAudioVideoStream1();
+  SessionDescriptionInterface* offer = CreateOffer();
+
+  SetLocalDescriptionWithoutError(offer);
+
+  voice_channel_ = media_engine_->GetVoiceChannel(0);
+
+  ASSERT_TRUE(voice_channel_ != NULL);
+  const cricket::AudioOptions& audio_options = voice_channel_->options();
+  EXPECT_EQ(rtc::Optional<bool>(true), audio_options.combined_audio_video_bwe);
+}
+
+// Tests that we can renegotiate new media content with ICE candidates in the
+// new remote SDP.
+TEST_P(WebRtcSessionTest, TestRenegotiateNewMediaWithCandidatesInSdp) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+
+  SendAudioOnlyStream2();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+  SetRemoteDescriptionWithoutError(answer);
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  offer = CreateRemoteOffer(options, cricket::SEC_DISABLED);
+
+  cricket::Candidate candidate1;
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+  candidate1.set_component(1);
+  JsepIceCandidate ice_candidate(kMediaContentName1, kMediaContentIndex1,
+                                 candidate1);
+  EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
+  SetRemoteDescriptionWithoutError(offer);
+
+  answer = CreateAnswer(NULL);
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// Tests that we can renegotiate new media content with ICE candidates separated
+// from the remote SDP.
+TEST_P(WebRtcSessionTest, TestRenegotiateNewMediaWithCandidatesSeparated) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+  InitWithDtls(GetParam());
+  SetFactoryDtlsSrtp();
+
+  SendAudioOnlyStream2();
+  SessionDescriptionInterface* offer = CreateOffer();
+  SetLocalDescriptionWithoutError(offer);
+
+  SessionDescriptionInterface* answer = CreateRemoteAnswer(offer);
+  SetRemoteDescriptionWithoutError(answer);
+
+  cricket::MediaSessionOptions options;
+  options.recv_video = true;
+  offer = CreateRemoteOffer(options, cricket::SEC_DISABLED);
+  SetRemoteDescriptionWithoutError(offer);
+
+  cricket::Candidate candidate1;
+  candidate1.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+  candidate1.set_component(1);
+  JsepIceCandidate ice_candidate(kMediaContentName1, kMediaContentIndex1,
+                                 candidate1);
+  EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate));
+
+  answer = CreateAnswer(NULL);
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// Flaky on Win and Mac only. See webrtc:4943
+#if defined(WEBRTC_WIN) || defined(WEBRTC_MAC)
+#define MAYBE_TestRtxRemovedByCreateAnswer DISABLED_TestRtxRemovedByCreateAnswer
+#else
+#define MAYBE_TestRtxRemovedByCreateAnswer TestRtxRemovedByCreateAnswer
+#endif
+// Tests that RTX codec is removed from the answer when it isn't supported
+// by local side.
+TEST_F(WebRtcSessionTest, MAYBE_TestRtxRemovedByCreateAnswer) {
+  Init();
+  SendAudioVideoStream1();
+  std::string offer_sdp(kSdpWithRtx);
+
+  SessionDescriptionInterface* offer =
+      CreateSessionDescription(JsepSessionDescription::kOffer, offer_sdp, NULL);
+  EXPECT_TRUE(offer->ToString(&offer_sdp));
+
+  // Offer SDP contains the RTX codec.
+  EXPECT_TRUE(offer_sdp.find("rtx") != std::string::npos);
+  SetRemoteDescriptionWithoutError(offer);
+
+  SessionDescriptionInterface* answer = CreateAnswer(NULL);
+  std::string answer_sdp;
+  answer->ToString(&answer_sdp);
+  // Answer SDP removes the unsupported RTX codec.
+  EXPECT_TRUE(answer_sdp.find("rtx") == std::string::npos);
+  SetLocalDescriptionWithoutError(answer);
+}
+
+// This verifies that the voice channel after bundle has both options from video
+// and voice channels.
+TEST_F(WebRtcSessionTest, TestSetSocketOptionBeforeBundle) {
+  InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+  SendAudioVideoStream1();
+
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.use_rtp_mux = true;
+
+  SessionDescriptionInterface* offer = CreateOffer(options);
+  SetLocalDescriptionWithoutError(offer);
+
+  session_->video_channel()->SetOption(cricket::BaseChannel::ST_RTP,
+                                       rtc::Socket::Option::OPT_SNDBUF, 4000);
+
+  session_->voice_channel()->SetOption(cricket::BaseChannel::ST_RTP,
+                                       rtc::Socket::Option::OPT_RCVBUF, 8000);
+
+  int option_val;
+  EXPECT_TRUE(session_->video_rtp_transport_channel()->GetOption(
+      rtc::Socket::Option::OPT_SNDBUF, &option_val));
+  EXPECT_EQ(4000, option_val);
+  EXPECT_FALSE(session_->voice_rtp_transport_channel()->GetOption(
+      rtc::Socket::Option::OPT_SNDBUF, &option_val));
+
+  EXPECT_TRUE(session_->voice_rtp_transport_channel()->GetOption(
+      rtc::Socket::Option::OPT_RCVBUF, &option_val));
+  EXPECT_EQ(8000, option_val);
+  EXPECT_FALSE(session_->video_rtp_transport_channel()->GetOption(
+      rtc::Socket::Option::OPT_RCVBUF, &option_val));
+
+  EXPECT_NE(session_->voice_rtp_transport_channel(),
+            session_->video_rtp_transport_channel());
+
+  SendAudioVideoStream2();
+  SessionDescriptionInterface* answer =
+      CreateRemoteAnswer(session_->local_description());
+  SetRemoteDescriptionWithoutError(answer);
+
+  EXPECT_TRUE(session_->voice_rtp_transport_channel()->GetOption(
+      rtc::Socket::Option::OPT_SNDBUF, &option_val));
+  EXPECT_EQ(4000, option_val);
+
+  EXPECT_TRUE(session_->voice_rtp_transport_channel()->GetOption(
+      rtc::Socket::Option::OPT_RCVBUF, &option_val));
+  EXPECT_EQ(8000, option_val);
+}
+
+// Test creating a session, request multiple offers, destroy the session
+// and make sure we got success/failure callbacks for all of the requests.
+// Background: crbug.com/507307
+TEST_F(WebRtcSessionTest, CreateOffersAndShutdown) {
+  Init();
+
+  rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest> observers[100];
+  PeerConnectionInterface::RTCOfferAnswerOptions options;
+  options.offer_to_receive_audio =
+      RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+  cricket::MediaSessionOptions session_options;
+  session_options.recv_audio = true;
+
+  for (auto& o : observers) {
+    o = new WebRtcSessionCreateSDPObserverForTest();
+    session_->CreateOffer(o, options, session_options);
+  }
+
+  session_.reset();
+
+  for (auto& o : observers) {
+    // We expect to have received a notification now even if the session was
+    // terminated.  The offer creation may or may not have succeeded, but we
+    // must have received a notification which, so the only invalid state
+    // is kInit.
+    EXPECT_NE(WebRtcSessionCreateSDPObserverForTest::kInit, o->state());
+  }
+}
+
+TEST_F(WebRtcSessionTest, TestPacketOptionsAndOnPacketSent) {
+  TestPacketOptions();
+}
+
+// Make sure the signal from "GetOnDestroyedSignal()" fires when the session
+// is destroyed.
+TEST_F(WebRtcSessionTest, TestOnDestroyedSignal) {
+  Init();
+  session_.reset();
+  EXPECT_TRUE(session_destroyed_);
+}
+
+// TODO(bemasc): Add a TestIceStatesBundle with BUNDLE enabled.  That test
+// currently fails because upon disconnection and reconnection OnIceComplete is
+// called more than once without returning to IceGatheringGathering.
+
+INSTANTIATE_TEST_CASE_P(WebRtcSessionTests,
+                        WebRtcSessionTest,
+                        testing::Values(ALREADY_GENERATED,
+                                        DTLS_IDENTITY_STORE));
diff --git a/webrtc/api/webrtcsessiondescriptionfactory.cc b/webrtc/api/webrtcsessiondescriptionfactory.cc
new file mode 100644
index 0000000..4421465
--- /dev/null
+++ b/webrtc/api/webrtcsessiondescriptionfactory.cc
@@ -0,0 +1,527 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "webrtc/api/webrtcsessiondescriptionfactory.h"
+
+#include <utility>
+
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/jsep.h"
+#include "webrtc/api/jsepsessiondescription.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/webrtcsession.h"
+#include "webrtc/base/sslidentity.h"
+
+using cricket::MediaSessionOptions;
+
+namespace webrtc {
+namespace {
+static const char kFailedDueToIdentityFailed[] =
+    " failed because DTLS identity request failed";
+static const char kFailedDueToSessionShutdown[] =
+    " failed because the session was shut down";
+
+static const uint64_t kInitSessionVersion = 2;
+
+static bool CompareStream(const MediaSessionOptions::Stream& stream1,
+                          const MediaSessionOptions::Stream& stream2) {
+  return stream1.id < stream2.id;
+}
+
+static bool SameId(const MediaSessionOptions::Stream& stream1,
+                   const MediaSessionOptions::Stream& stream2) {
+  return stream1.id == stream2.id;
+}
+
+// Checks if each Stream within the |streams| has unique id.
+static bool ValidStreams(const MediaSessionOptions::Streams& streams) {
+  MediaSessionOptions::Streams sorted_streams = streams;
+  std::sort(sorted_streams.begin(), sorted_streams.end(), CompareStream);
+  MediaSessionOptions::Streams::iterator it =
+      std::adjacent_find(sorted_streams.begin(), sorted_streams.end(),
+                         SameId);
+  return it == sorted_streams.end();
+}
+
+enum {
+  MSG_CREATE_SESSIONDESCRIPTION_SUCCESS,
+  MSG_CREATE_SESSIONDESCRIPTION_FAILED,
+  MSG_USE_CONSTRUCTOR_CERTIFICATE
+};
+
+struct CreateSessionDescriptionMsg : public rtc::MessageData {
+  explicit CreateSessionDescriptionMsg(
+      webrtc::CreateSessionDescriptionObserver* observer)
+      : observer(observer) {
+  }
+
+  rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
+  std::string error;
+  rtc::scoped_ptr<webrtc::SessionDescriptionInterface> description;
+};
+}  // namespace
+
+void WebRtcIdentityRequestObserver::OnFailure(int error) {
+  SignalRequestFailed(error);
+}
+
+void WebRtcIdentityRequestObserver::OnSuccess(
+    const std::string& der_cert, const std::string& der_private_key) {
+  std::string pem_cert = rtc::SSLIdentity::DerToPem(
+      rtc::kPemTypeCertificate,
+      reinterpret_cast<const unsigned char*>(der_cert.data()),
+      der_cert.length());
+  std::string pem_key = rtc::SSLIdentity::DerToPem(
+      rtc::kPemTypeRsaPrivateKey,
+      reinterpret_cast<const unsigned char*>(der_private_key.data()),
+      der_private_key.length());
+  rtc::scoped_ptr<rtc::SSLIdentity> identity(
+      rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
+  SignalCertificateReady(rtc::RTCCertificate::Create(std::move(identity)));
+}
+
+void WebRtcIdentityRequestObserver::OnSuccess(
+    rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+  SignalCertificateReady(rtc::RTCCertificate::Create(std::move(identity)));
+}
+
+// static
+void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
+    const SessionDescriptionInterface* source_desc,
+    SessionDescriptionInterface* dest_desc) {
+  if (!source_desc)
+    return;
+  for (size_t m = 0; m < source_desc->number_of_mediasections() &&
+                     m < dest_desc->number_of_mediasections(); ++m) {
+    const IceCandidateCollection* source_candidates =
+        source_desc->candidates(m);
+    const IceCandidateCollection* dest_candidates = dest_desc->candidates(m);
+    for  (size_t n = 0; n < source_candidates->count(); ++n) {
+      const IceCandidateInterface* new_candidate = source_candidates->at(n);
+      if (!dest_candidates->HasCandidate(new_candidate))
+        dest_desc->AddCandidate(source_candidates->at(n));
+    }
+  }
+}
+
+// Private constructor called by other constructors.
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+    rtc::Thread* signaling_thread,
+    cricket::ChannelManager* channel_manager,
+    rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+    const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
+        identity_request_observer,
+    WebRtcSession* session,
+    const std::string& session_id,
+    bool dtls_enabled)
+    : signaling_thread_(signaling_thread),
+      session_desc_factory_(channel_manager, &transport_desc_factory_),
+      // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp
+      // as the session id and session version. To simplify, it should be fine
+      // to just use a random number as session id and start version from
+      // |kInitSessionVersion|.
+      session_version_(kInitSessionVersion),
+      dtls_identity_store_(std::move(dtls_identity_store)),
+      identity_request_observer_(identity_request_observer),
+      session_(session),
+      session_id_(session_id),
+      certificate_request_state_(CERTIFICATE_NOT_NEEDED) {
+  session_desc_factory_.set_add_legacy_streams(false);
+  // SRTP-SDES is disabled if DTLS is on.
+  SetSdesPolicy(dtls_enabled ? cricket::SEC_DISABLED : cricket::SEC_REQUIRED);
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+    rtc::Thread* signaling_thread,
+    cricket::ChannelManager* channel_manager,
+    WebRtcSession* session,
+    const std::string& session_id)
+    : WebRtcSessionDescriptionFactory(signaling_thread,
+                                      channel_manager,
+                                      nullptr,
+                                      nullptr,
+                                      session,
+                                      session_id,
+                                      false) {
+  LOG(LS_VERBOSE) << "DTLS-SRTP disabled.";
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+    rtc::Thread* signaling_thread,
+    cricket::ChannelManager* channel_manager,
+    rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+    WebRtcSession* session,
+    const std::string& session_id)
+    : WebRtcSessionDescriptionFactory(
+          signaling_thread,
+          channel_manager,
+          std::move(dtls_identity_store),
+          new rtc::RefCountedObject<WebRtcIdentityRequestObserver>(),
+          session,
+          session_id,
+          true) {
+  RTC_DCHECK(dtls_identity_store_);
+
+  certificate_request_state_ = CERTIFICATE_WAITING;
+
+  identity_request_observer_->SignalRequestFailed.connect(
+      this, &WebRtcSessionDescriptionFactory::OnIdentityRequestFailed);
+  identity_request_observer_->SignalCertificateReady.connect(
+      this, &WebRtcSessionDescriptionFactory::SetCertificate);
+
+  rtc::KeyType key_type = rtc::KT_DEFAULT;
+  LOG(LS_VERBOSE) << "DTLS-SRTP enabled; sending DTLS identity request (key "
+                  << "type: " << key_type << ").";
+
+  // Request identity. This happens asynchronously, so the caller will have a
+  // chance to connect to SignalIdentityReady.
+  dtls_identity_store_->RequestIdentity(key_type, identity_request_observer_);
+}
+
+WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
+    rtc::Thread* signaling_thread,
+    cricket::ChannelManager* channel_manager,
+    const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
+    WebRtcSession* session,
+    const std::string& session_id)
+    : WebRtcSessionDescriptionFactory(signaling_thread,
+                                      channel_manager,
+                                      nullptr,
+                                      nullptr,
+                                      session,
+                                      session_id,
+                                      true) {
+  RTC_DCHECK(certificate);
+
+  certificate_request_state_ = CERTIFICATE_WAITING;
+
+  LOG(LS_VERBOSE) << "DTLS-SRTP enabled; has certificate parameter.";
+  // We already have a certificate but we wait to do SetIdentity; if we do
+  // it in the constructor then the caller has not had a chance to connect to
+  // SignalIdentityReady.
+  signaling_thread_->Post(
+      this, MSG_USE_CONSTRUCTOR_CERTIFICATE,
+      new rtc::ScopedRefMessageData<rtc::RTCCertificate>(certificate));
+}
+
+WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() {
+  ASSERT(signaling_thread_->IsCurrent());
+
+  // Fail any requests that were asked for before identity generation completed.
+  FailPendingRequests(kFailedDueToSessionShutdown);
+
+  // Process all pending notifications in the message queue.  If we don't do
+  // this, requests will linger and not know they succeeded or failed.
+  rtc::MessageList list;
+  signaling_thread_->Clear(this, rtc::MQID_ANY, &list);
+  for (auto& msg : list) {
+    if (msg.message_id != MSG_USE_CONSTRUCTOR_CERTIFICATE) {
+      OnMessage(&msg);
+    } else {
+      // Skip MSG_USE_CONSTRUCTOR_CERTIFICATE because we don't want to trigger
+      // SetIdentity-related callbacks in the destructor. This can be a problem
+      // when WebRtcSession listens to the callback but it was the WebRtcSession
+      // destructor that caused WebRtcSessionDescriptionFactory's destruction.
+      // The callback is then ignored, leaking memory allocated by OnMessage for
+      // MSG_USE_CONSTRUCTOR_CERTIFICATE.
+      delete msg.pdata;
+    }
+  }
+}
+
+void WebRtcSessionDescriptionFactory::CreateOffer(
+    CreateSessionDescriptionObserver* observer,
+    const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+    const cricket::MediaSessionOptions& session_options) {
+  std::string error = "CreateOffer";
+  if (certificate_request_state_ == CERTIFICATE_FAILED) {
+    error += kFailedDueToIdentityFailed;
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailed(observer, error);
+    return;
+  }
+
+  if (!ValidStreams(session_options.streams)) {
+    error += " called with invalid media streams.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailed(observer, error);
+    return;
+  }
+
+  CreateSessionDescriptionRequest request(
+      CreateSessionDescriptionRequest::kOffer, observer, session_options);
+  if (certificate_request_state_ == CERTIFICATE_WAITING) {
+    create_session_description_requests_.push(request);
+  } else {
+    ASSERT(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
+           certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
+    InternalCreateOffer(request);
+  }
+}
+
+void WebRtcSessionDescriptionFactory::CreateAnswer(
+    CreateSessionDescriptionObserver* observer,
+    const MediaConstraintsInterface* constraints,
+    const cricket::MediaSessionOptions& session_options) {
+  std::string error = "CreateAnswer";
+  if (certificate_request_state_ == CERTIFICATE_FAILED) {
+    error += kFailedDueToIdentityFailed;
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailed(observer, error);
+    return;
+  }
+  if (!session_->remote_description()) {
+    error += " can't be called before SetRemoteDescription.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailed(observer, error);
+    return;
+  }
+  if (session_->remote_description()->type() !=
+      JsepSessionDescription::kOffer) {
+    error += " failed because remote_description is not an offer.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailed(observer, error);
+    return;
+  }
+
+  if (!ValidStreams(session_options.streams)) {
+    error += " called with invalid media streams.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailed(observer, error);
+    return;
+  }
+
+  CreateSessionDescriptionRequest request(
+      CreateSessionDescriptionRequest::kAnswer, observer, session_options);
+  if (certificate_request_state_ == CERTIFICATE_WAITING) {
+    create_session_description_requests_.push(request);
+  } else {
+    ASSERT(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
+           certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
+    InternalCreateAnswer(request);
+  }
+}
+
+void WebRtcSessionDescriptionFactory::SetSdesPolicy(
+    cricket::SecurePolicy secure_policy) {
+  session_desc_factory_.set_secure(secure_policy);
+}
+
+cricket::SecurePolicy WebRtcSessionDescriptionFactory::SdesPolicy() const {
+  return session_desc_factory_.secure();
+}
+
+void WebRtcSessionDescriptionFactory::OnMessage(rtc::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_CREATE_SESSIONDESCRIPTION_SUCCESS: {
+      CreateSessionDescriptionMsg* param =
+          static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+      param->observer->OnSuccess(param->description.release());
+      delete param;
+      break;
+    }
+    case MSG_CREATE_SESSIONDESCRIPTION_FAILED: {
+      CreateSessionDescriptionMsg* param =
+          static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+      param->observer->OnFailure(param->error);
+      delete param;
+      break;
+    }
+    case MSG_USE_CONSTRUCTOR_CERTIFICATE: {
+      rtc::ScopedRefMessageData<rtc::RTCCertificate>* param =
+          static_cast<rtc::ScopedRefMessageData<rtc::RTCCertificate>*>(
+              msg->pdata);
+      LOG(LS_INFO) << "Using certificate supplied to the constructor.";
+      SetCertificate(param->data());
+      delete param;
+      break;
+    }
+    default:
+      ASSERT(false);
+      break;
+  }
+}
+
+void WebRtcSessionDescriptionFactory::InternalCreateOffer(
+    CreateSessionDescriptionRequest request) {
+  cricket::SessionDescription* desc(session_desc_factory_.CreateOffer(
+      request.options, session_->local_description()
+                           ? session_->local_description()->description()
+                           : nullptr));
+  // RFC 3264
+  // When issuing an offer that modifies the session,
+  // the "o=" line of the new SDP MUST be identical to that in the
+  // previous SDP, except that the version in the origin field MUST
+  // increment by one from the previous SDP.
+
+  // Just increase the version number by one each time when a new offer
+  // is created regardless if it's identical to the previous one or not.
+  // The |session_version_| is a uint64_t, the wrap around should not happen.
+  ASSERT(session_version_ + 1 > session_version_);
+  JsepSessionDescription* offer(new JsepSessionDescription(
+      JsepSessionDescription::kOffer));
+  if (!offer->Initialize(desc, session_id_,
+                         rtc::ToString(session_version_++))) {
+    delete offer;
+    PostCreateSessionDescriptionFailed(request.observer,
+                                       "Failed to initialize the offer.");
+    return;
+  }
+  if (session_->local_description() &&
+      !request.options.audio_transport_options.ice_restart &&
+      !request.options.video_transport_options.ice_restart &&
+      !request.options.data_transport_options.ice_restart) {
+    // Include all local ice candidates in the SessionDescription unless
+    // the an ice restart has been requested.
+    CopyCandidatesFromSessionDescription(session_->local_description(), offer);
+  }
+  PostCreateSessionDescriptionSucceeded(request.observer, offer);
+}
+
+void WebRtcSessionDescriptionFactory::InternalCreateAnswer(
+    CreateSessionDescriptionRequest request) {
+  // According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1
+  // an answer should also contain new ice ufrag and password if an offer has
+  // been received with new ufrag and password.
+  request.options.audio_transport_options.ice_restart =
+      session_->IceRestartPending();
+  request.options.video_transport_options.ice_restart =
+      session_->IceRestartPending();
+  request.options.data_transport_options.ice_restart =
+      session_->IceRestartPending();
+  // We should pass current ssl role to the transport description factory, if
+  // there is already an existing ongoing session.
+  rtc::SSLRole ssl_role;
+  if (session_->GetSslRole(session_->voice_channel(), &ssl_role)) {
+    request.options.audio_transport_options.prefer_passive_role =
+        (rtc::SSL_SERVER == ssl_role);
+  }
+  if (session_->GetSslRole(session_->video_channel(), &ssl_role)) {
+    request.options.video_transport_options.prefer_passive_role =
+        (rtc::SSL_SERVER == ssl_role);
+  }
+  if (session_->GetSslRole(session_->data_channel(), &ssl_role)) {
+    request.options.data_transport_options.prefer_passive_role =
+        (rtc::SSL_SERVER == ssl_role);
+  }
+
+  cricket::SessionDescription* desc(session_desc_factory_.CreateAnswer(
+      session_->remote_description()
+          ? session_->remote_description()->description()
+          : nullptr,
+      request.options, session_->local_description()
+                           ? session_->local_description()->description()
+                           : nullptr));
+  // RFC 3264
+  // If the answer is different from the offer in any way (different IP
+  // addresses, ports, etc.), the origin line MUST be different in the answer.
+  // In that case, the version number in the "o=" line of the answer is
+  // unrelated to the version number in the o line of the offer.
+  // Get a new version number by increasing the |session_version_answer_|.
+  // The |session_version_| is a uint64_t, the wrap around should not happen.
+  ASSERT(session_version_ + 1 > session_version_);
+  JsepSessionDescription* answer(new JsepSessionDescription(
+      JsepSessionDescription::kAnswer));
+  if (!answer->Initialize(desc, session_id_,
+                          rtc::ToString(session_version_++))) {
+    delete answer;
+    PostCreateSessionDescriptionFailed(request.observer,
+                                       "Failed to initialize the answer.");
+    return;
+  }
+  if (session_->local_description() &&
+      !request.options.audio_transport_options.ice_restart &&
+      !request.options.video_transport_options.ice_restart &&
+      !request.options.data_transport_options.ice_restart) {
+    // Include all local ice candidates in the SessionDescription unless
+    // the remote peer has requested an ice restart.
+    CopyCandidatesFromSessionDescription(session_->local_description(), answer);
+  }
+  session_->ResetIceRestartLatch();
+  PostCreateSessionDescriptionSucceeded(request.observer, answer);
+}
+
+void WebRtcSessionDescriptionFactory::FailPendingRequests(
+    const std::string& reason) {
+  ASSERT(signaling_thread_->IsCurrent());
+  while (!create_session_description_requests_.empty()) {
+    const CreateSessionDescriptionRequest& request =
+        create_session_description_requests_.front();
+    PostCreateSessionDescriptionFailed(request.observer,
+        ((request.type == CreateSessionDescriptionRequest::kOffer) ?
+            "CreateOffer" : "CreateAnswer") + reason);
+    create_session_description_requests_.pop();
+  }
+}
+
+void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed(
+    CreateSessionDescriptionObserver* observer, const std::string& error) {
+  CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+  msg->error = error;
+  signaling_thread_->Post(this, MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg);
+  LOG(LS_ERROR) << "Create SDP failed: " << error;
+}
+
+void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded(
+    CreateSessionDescriptionObserver* observer,
+    SessionDescriptionInterface* description) {
+  CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+  msg->description.reset(description);
+  signaling_thread_->Post(this, MSG_CREATE_SESSIONDESCRIPTION_SUCCESS, msg);
+}
+
+void WebRtcSessionDescriptionFactory::OnIdentityRequestFailed(int error) {
+  ASSERT(signaling_thread_->IsCurrent());
+
+  LOG(LS_ERROR) << "Async identity request failed: error = " << error;
+  certificate_request_state_ = CERTIFICATE_FAILED;
+
+  FailPendingRequests(kFailedDueToIdentityFailed);
+}
+
+void WebRtcSessionDescriptionFactory::SetCertificate(
+    const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+  RTC_DCHECK(certificate);
+  LOG(LS_VERBOSE) << "Setting new certificate";
+
+  certificate_request_state_ = CERTIFICATE_SUCCEEDED;
+  SignalCertificateReady(certificate);
+
+  transport_desc_factory_.set_certificate(certificate);
+  transport_desc_factory_.set_secure(cricket::SEC_ENABLED);
+
+  while (!create_session_description_requests_.empty()) {
+    if (create_session_description_requests_.front().type ==
+        CreateSessionDescriptionRequest::kOffer) {
+      InternalCreateOffer(create_session_description_requests_.front());
+    } else {
+      InternalCreateAnswer(create_session_description_requests_.front());
+    }
+    create_session_description_requests_.pop();
+  }
+}
+}  // namespace webrtc
diff --git a/webrtc/api/webrtcsessiondescriptionfactory.h b/webrtc/api/webrtcsessiondescriptionfactory.h
new file mode 100644
index 0000000..7d2cdee
--- /dev/null
+++ b/webrtc/api/webrtcsessiondescriptionfactory.h
@@ -0,0 +1,193 @@
+/*
+ * libjingle
+ * Copyright 2013 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_API_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
+#define WEBRTC_API_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
+
+#include "talk/session/media/mediasession.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/rtccertificate.h"
+#include "webrtc/p2p/base/transportdescriptionfactory.h"
+
+namespace cricket {
+class ChannelManager;
+class TransportDescriptionFactory;
+}  // namespace cricket
+
+namespace webrtc {
+class CreateSessionDescriptionObserver;
+class MediaConstraintsInterface;
+class SessionDescriptionInterface;
+class WebRtcSession;
+
+// DTLS identity request callback class.
+class WebRtcIdentityRequestObserver : public DtlsIdentityRequestObserver,
+                                      public sigslot::has_slots<> {
+ public:
+  // DtlsIdentityRequestObserver overrides.
+  void OnFailure(int error) override;
+  void OnSuccess(const std::string& der_cert,
+                 const std::string& der_private_key) override;
+  void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) override;
+
+  sigslot::signal1<int> SignalRequestFailed;
+  sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
+      SignalCertificateReady;
+};
+
+struct CreateSessionDescriptionRequest {
+  enum Type {
+    kOffer,
+    kAnswer,
+  };
+
+  CreateSessionDescriptionRequest(
+      Type type,
+      CreateSessionDescriptionObserver* observer,
+      const cricket::MediaSessionOptions& options)
+      : type(type),
+        observer(observer),
+        options(options) {}
+
+  Type type;
+  rtc::scoped_refptr<CreateSessionDescriptionObserver> observer;
+  cricket::MediaSessionOptions options;
+};
+
+// This class is used to create offer/answer session description with regards to
+// the async DTLS identity generation for WebRtcSession.
+// It queues the create offer/answer request until the DTLS identity
+// request has completed, i.e. when OnIdentityRequestFailed or OnIdentityReady
+// is called.
+class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
+                                        public sigslot::has_slots<> {
+ public:
+  // Construct with DTLS disabled.
+  WebRtcSessionDescriptionFactory(rtc::Thread* signaling_thread,
+                                  cricket::ChannelManager* channel_manager,
+                                  WebRtcSession* session,
+                                  const std::string& session_id);
+
+  // Construct with DTLS enabled using the specified |dtls_identity_store| to
+  // generate a certificate.
+  WebRtcSessionDescriptionFactory(
+      rtc::Thread* signaling_thread,
+      cricket::ChannelManager* channel_manager,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+      WebRtcSession* session,
+      const std::string& session_id);
+
+  // Construct with DTLS enabled using the specified (already generated)
+  // |certificate|.
+  WebRtcSessionDescriptionFactory(
+      rtc::Thread* signaling_thread,
+      cricket::ChannelManager* channel_manager,
+      const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
+      WebRtcSession* session,
+      const std::string& session_id);
+  virtual ~WebRtcSessionDescriptionFactory();
+
+  static void CopyCandidatesFromSessionDescription(
+    const SessionDescriptionInterface* source_desc,
+    SessionDescriptionInterface* dest_desc);
+
+  void CreateOffer(
+      CreateSessionDescriptionObserver* observer,
+      const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+      const cricket::MediaSessionOptions& session_options);
+  void CreateAnswer(CreateSessionDescriptionObserver* observer,
+                    const MediaConstraintsInterface* constraints,
+                    const cricket::MediaSessionOptions& session_options);
+
+  void SetSdesPolicy(cricket::SecurePolicy secure_policy);
+  cricket::SecurePolicy SdesPolicy() const;
+
+  sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
+      SignalCertificateReady;
+
+  // For testing.
+  bool waiting_for_certificate_for_testing() const {
+    return certificate_request_state_ == CERTIFICATE_WAITING;
+  }
+
+ private:
+  enum CertificateRequestState {
+    CERTIFICATE_NOT_NEEDED,
+    CERTIFICATE_WAITING,
+    CERTIFICATE_SUCCEEDED,
+    CERTIFICATE_FAILED,
+  };
+
+  WebRtcSessionDescriptionFactory(
+      rtc::Thread* signaling_thread,
+      cricket::ChannelManager* channel_manager,
+      rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+      const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
+          identity_request_observer,
+      WebRtcSession* session,
+      const std::string& session_id,
+      bool dtls_enabled);
+
+  // MessageHandler implementation.
+  virtual void OnMessage(rtc::Message* msg);
+
+  void InternalCreateOffer(CreateSessionDescriptionRequest request);
+  void InternalCreateAnswer(CreateSessionDescriptionRequest request);
+  // Posts failure notifications for all pending session description requests.
+  void FailPendingRequests(const std::string& reason);
+  void PostCreateSessionDescriptionFailed(
+      CreateSessionDescriptionObserver* observer,
+      const std::string& error);
+  void PostCreateSessionDescriptionSucceeded(
+      CreateSessionDescriptionObserver* observer,
+      SessionDescriptionInterface* description);
+
+  void OnIdentityRequestFailed(int error);
+  void SetCertificate(
+      const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
+
+  std::queue<CreateSessionDescriptionRequest>
+      create_session_description_requests_;
+  rtc::Thread* const signaling_thread_;
+  cricket::TransportDescriptionFactory transport_desc_factory_;
+  cricket::MediaSessionDescriptionFactory session_desc_factory_;
+  uint64_t session_version_;
+  const rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store_;
+  const rtc::scoped_refptr<WebRtcIdentityRequestObserver>
+      identity_request_observer_;
+  // TODO(jiayl): remove the dependency on session once bug 2264 is fixed.
+  WebRtcSession* const session_;
+  const std::string session_id_;
+  CertificateRequestState certificate_request_state_;
+
+  RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory);
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_API_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
diff --git a/webrtc/build/android/test_runner.py b/webrtc/build/android/test_runner.py
index 3772005..fd8ca2f 100755
--- a/webrtc/build/android/test_runner.py
+++ b/webrtc/build/android/test_runner.py
@@ -39,7 +39,7 @@
     'common_video_unittests':
         'webrtc/common_video/common_video_unittests.isolate',
     'peerconnection_unittests':
-        'talk/peerconnection_unittests.isolate',
+        'webrtc/api/peerconnection_unittests.isolate',
     'modules_tests': 'webrtc/modules/modules_tests.isolate',
     'modules_unittests': 'webrtc/modules/modules_unittests.isolate',
     'rtc_unittests': 'webrtc/rtc_unittests.isolate',
diff --git a/webrtc/build/apk_tests.gyp b/webrtc/build/apk_tests.gyp
index 45cb7b6..f7e9a90 100644
--- a/webrtc/build/apk_tests.gyp
+++ b/webrtc/build/apk_tests.gyp
@@ -68,8 +68,8 @@
         'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)peerconnection_unittests<(SHARED_LIB_SUFFIX)',
       },
       'dependencies': [
-        '<(DEPTH)/talk/libjingle_tests.gyp:peerconnection_unittests',
-        '<(DEPTH)/talk/libjingle.gyp:libjingle_peerconnection_java',
+        '<(webrtc_root)/api/api_tests.gyp:peerconnection_unittests',
+        '<(webrtc_root)/api/api.gyp:libjingle_peerconnection_java',
       ],
       'includes': [
         '../../build/apk_test.gypi',
diff --git a/webrtc/build/common.gypi b/webrtc/build/common.gypi
index 2d81271..6d79aa3 100644
--- a/webrtc/build/common.gypi
+++ b/webrtc/build/common.gypi
@@ -134,6 +134,9 @@
     # Determines whether NEON code will be built.
     'build_with_neon%': 0,
 
+    # Disable this to skip building source requiring GTK.
+    'use_gtk%': 1,
+
     # Enable this to use HW H.264 encoder/decoder on iOS/Mac PeerConnections.
     # Enabling this may break interop with Android clients that support H264.
     'use_objc_h264%': 0,
@@ -193,6 +196,9 @@
         'include_tests%': 1,
         'restrict_webrtc_logging%': 0,
       }],
+      ['OS=="android" or OS=="linux"', {
+        'java_home%': '<!(python -c "import os; dir=os.getenv(\'JAVA_HOME\', \'/usr/lib/jvm/java-7-openjdk-amd64\'); assert os.path.exists(os.path.join(dir, \'include/jni.h\')), \'Point \\$JAVA_HOME or the java_home gyp variable to a directory containing include/jni.h!\'; print dir")',
+      }],
       ['OS=="ios"', {
         'build_libjpeg%': 0,
       }],
diff --git a/webrtc/examples/peerconnection/client/conductor.cc b/webrtc/examples/peerconnection/client/conductor.cc
index 3a40fd9..2facf94 100644
--- a/webrtc/examples/peerconnection/client/conductor.cc
+++ b/webrtc/examples/peerconnection/client/conductor.cc
@@ -13,8 +13,8 @@
 #include <utility>
 #include <vector>
 
-#include "talk/app/webrtc/test/fakeconstraints.h"
-#include "talk/app/webrtc/videosourceinterface.h"
+#include "webrtc/api/videosourceinterface.h"
+#include "webrtc/api/test/fakeconstraints.h"
 #include "webrtc/base/common.h"
 #include "webrtc/base/json.h"
 #include "webrtc/base/logging.h"
diff --git a/webrtc/examples/peerconnection/client/conductor.h b/webrtc/examples/peerconnection/client/conductor.h
index 21d838a..db2f77b 100644
--- a/webrtc/examples/peerconnection/client/conductor.h
+++ b/webrtc/examples/peerconnection/client/conductor.h
@@ -17,8 +17,8 @@
 #include <set>
 #include <string>
 
-#include "talk/app/webrtc/mediastreaminterface.h"
-#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/api/mediastreaminterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
 #include "webrtc/examples/peerconnection/client/main_wnd.h"
 #include "webrtc/examples/peerconnection/client/peer_connection_client.h"
 #include "webrtc/base/scoped_ptr.h"
diff --git a/webrtc/examples/peerconnection/client/main_wnd.h b/webrtc/examples/peerconnection/client/main_wnd.h
index 6d39b38..5cf38df 100644
--- a/webrtc/examples/peerconnection/client/main_wnd.h
+++ b/webrtc/examples/peerconnection/client/main_wnd.h
@@ -15,7 +15,7 @@
 #include <map>
 #include <string>
 
-#include "talk/app/webrtc/mediastreaminterface.h"
+#include "webrtc/api/mediastreaminterface.h"
 #include "webrtc/base/win32.h"
 #include "webrtc/examples/peerconnection/client/peer_connection_client.h"
 #include "webrtc/media/base/mediachannel.h"
diff --git a/webrtc/webrtc.gyp b/webrtc/webrtc.gyp
index 974ad92..be91108 100644
--- a/webrtc/webrtc.gyp
+++ b/webrtc/webrtc.gyp
@@ -6,7 +6,35 @@
 # in the file PATENTS.  All contributing project authors may
 # be found in the AUTHORS file in the root of the source tree.
 {
+  'variables': {
+    'webrtc_all_dependencies': [
+      'base/base.gyp:*',
+      'sound/sound.gyp:*',
+      'common.gyp:*',
+      'common_audio/common_audio.gyp:*',
+      'common_video/common_video.gyp:*',
+      'media/media.gyp:*',
+      'modules/modules.gyp:*',
+      'p2p/p2p.gyp:*',
+      'system_wrappers/system_wrappers.gyp:*',
+      'tools/tools.gyp:*',
+      'voice_engine/voice_engine.gyp:*',
+      '<(webrtc_vp8_dir)/vp8.gyp:*',
+      '<(webrtc_vp9_dir)/vp9.gyp:*',
+    ],
+  },
   'conditions': [
+    ['build_with_chromium==0', {
+      # TODO(kjellander): Move this to webrtc_all_dependencies once all of talk/
+      # has been moved to webrtc/. It can't be processed by Chromium since the
+      # reference to buid/java.gypi is using an absolute path (and includes
+      # entries cannot contain variables).
+      'variables': {
+        'webrtc_all_dependencies': [
+          'api/api.gyp:*',
+        ],
+      },
+    }],
     ['include_tests==1', {
       'includes': [
         'libjingle/xmllite/xmllite_tests.gypi',
@@ -54,23 +82,6 @@
     'call/webrtc_call.gypi',
     'video/webrtc_video.gypi',
   ],
-  'variables': {
-    'webrtc_all_dependencies': [
-      'base/base.gyp:*',
-      'sound/sound.gyp:*',
-      'common.gyp:*',
-      'common_audio/common_audio.gyp:*',
-      'common_video/common_video.gyp:*',
-      'media/media.gyp:*',
-      'modules/modules.gyp:*',
-      'p2p/p2p.gyp:*',
-      'system_wrappers/system_wrappers.gyp:*',
-      'tools/tools.gyp:*',
-      'voice_engine/voice_engine.gyp:*',
-      '<(webrtc_vp8_dir)/vp8.gyp:*',
-      '<(webrtc_vp9_dir)/vp9.gyp:*',
-    ],
-  },
   'targets': [
     {
       'target_name': 'webrtc_all',
@@ -82,6 +93,7 @@
       'conditions': [
         ['include_tests==1', {
           'dependencies': [
+            'api/api_tests.gyp:*',
             'common_video/common_video_unittests.gyp:*',
             'rtc_unittests',
             'system_wrappers/system_wrappers_tests.gyp:*',
@@ -91,14 +103,6 @@
             'webrtc_tests',
           ],
         }],
-        ['OS=="ios"', {
-          'dependencies': [
-            # TODO(tkchin): Move this target to webrtc_all_dependencies once it
-            # has more than iOS specific targets.
-            # TODO(tkchin): Figure out where to add this in BUILD.gn.
-            'api/api.gyp:*',
-          ],
-        }],
       ],
     },
     {
diff --git a/webrtc/webrtc_examples.gyp b/webrtc/webrtc_examples.gyp
index fd39b03..51cd792 100755
--- a/webrtc/webrtc_examples.gyp
+++ b/webrtc/webrtc_examples.gyp
@@ -78,7 +78,7 @@
             'examples/peerconnection/client/peer_connection_client.h',
           ],
           'dependencies': [
-            '../talk/libjingle.gyp:libjingle_peerconnection',
+            'api/api.gyp:libjingle_peerconnection',
             '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
           ],
           'conditions': [
@@ -364,7 +364,7 @@
           'target_name': 'AppRTCDemo',
           'type': 'none',
           'dependencies': [
-            '../talk/libjingle.gyp:libjingle_peerconnection_java',
+            'api/api.gyp:libjingle_peerconnection_java',
           ],
           'variables': {
             'apk_name': 'AppRTCDemo',