Parcourir la source

<librtc切换为本地库,呼叫增加铃声提醒>

weizhengliang il y a 4 ans
Parent
commit
a1ecea83b8
100 fichiers modifiés avec 15365 ajouts et 0 suppressions
  1. 1 0
      libwebrtc/.gitignore
  2. 48 0
      libwebrtc/build.gradle
  3. BIN
      libwebrtc/libs/arm64-v8a/libjingle_peerconnection_so.so
  4. BIN
      libwebrtc/libs/armeabi-v7a/libjingle_peerconnection_so.so
  5. 21 0
      libwebrtc/proguard-rules.pro
  6. 2 0
      libwebrtc/src/main/AndroidManifest.xml
  7. 51 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java
  8. 324 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
  9. 378 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
  10. 409 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
  11. 524 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
  12. 388 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
  13. 46 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ContextUtils.java
  14. 22 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Loggable.java
  15. 199 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Logging.java
  16. 2 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/OWNERS
  17. 45 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Size.java
  18. 214 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ThreadUtils.java
  19. 21 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java
  20. 21 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java
  21. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioProcessingFactory.java
  22. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioSource.java
  23. 32 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioTrack.java
  24. 23 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
  25. 23 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
  26. 41 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
  27. 35 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Capturer.java
  28. 186 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Enumerator.java
  29. 37 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Capturer.java
  30. 245 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Enumerator.java
  31. 206 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
  32. 25 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerator.java
  33. 167 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraVideoCapturer.java
  34. 27 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CapturerObserver.java
  35. 145 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CryptoOptions.java
  36. 195 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DataChannel.java
  37. 68 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
  38. 66 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
  39. 96 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DtmfSender.java
  40. 202 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase.java
  41. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase10.java
  42. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase14.java
  43. 753 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglRenderer.java
  44. 139 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EncodedImage.java
  45. 22 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java
  46. 201 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FileVideoCapturer.java
  47. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameDecryptor.java
  48. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameEncryptor.java
  49. 31 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlRectDrawer.java
  50. 129 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlShader.java
  51. 122 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
  52. 58 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlUtil.java
  53. 72 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
  54. 295 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
  55. 52 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/IceCandidate.java
  56. 200 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/JavaI420Buffer.java
  57. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java
  58. 25 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java
  59. 22 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java
  60. 27 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java
  61. 1021 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
  62. 1124 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
  63. 98 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaConstraints.java
  64. 58 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaSource.java
  65. 159 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaStream.java
  66. 129 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaStreamTrack.java
  67. 22 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java
  68. 81 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Metrics.java
  69. 24 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/NativeLibraryLoader.java
  70. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java
  71. 314 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/NetworkMonitor.java
  72. 870 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
  73. 3 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/OWNERS
  74. 1244 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/PeerConnection.java
  75. 65 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/PeerConnectionDependencies.java
  76. 598 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/PeerConnectionFactory.java
  77. 49 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java
  78. 73 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Predicate.java
  79. 113 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RTCStats.java
  80. 17 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RTCStatsCollectorCallback.java
  81. 62 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RTCStatsReport.java
  82. 28 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RefCounted.java
  83. 249 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RendererCommon.java
  84. 75 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtcCertificatePem.java
  85. 273 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpParameters.java
  86. 97 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpReceiver.java
  87. 129 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpSender.java
  88. 243 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpTransceiver.java
  89. 27 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SSLCertificateVerifier.java
  90. 211 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
  91. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SdpObserver.java
  92. 55 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SessionDescription.java
  93. 53 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
  94. 47 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
  95. 17 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/StatsObserver.java
  96. 63 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/StatsReport.java
  97. 160 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SurfaceEglRenderer.java
  98. 327 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
  99. 300 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/SurfaceViewRenderer.java
  100. 0 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/TextureBufferImpl.java

+ 1 - 0
libwebrtc/.gitignore

@@ -0,0 +1 @@
+/build

+ 48 - 0
libwebrtc/build.gradle

@@ -0,0 +1,48 @@
+apply plugin: 'com.android.library'
+
+android {
+    compileSdkVersion target_sdk_version
+    buildToolsVersion build_tools_version
+
+    defaultConfig {
+        minSdkVersion min_sdk_version
+        targetSdkVersion target_sdk_version
+        versionCode app_version_code
+        versionName app_version
+
+        ndk {
+            abiFilters 'armeabi-v7a','arm64-v8a'
+        }
+    }
+
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+        }
+    }
+
+    sourceSets.main {
+        jniLibs.srcDirs = ['libs']
+        java.srcDirs = [
+                "src/main/java/sdk/android/api",
+                "src/main/java/sdk/android/src/java",
+                "src/main/java/rtc_base/java/src",
+                "src/main/java/modules/audio_device/android/java/src",
+        ]
+    }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+
+    lintOptions {
+        checkReleaseBuilds false
+        abortOnError false
+    }
+}
+
+dependencies {
+    implementation fileTree(dir: 'libs', include: ['*.jar'])
+}

BIN
libwebrtc/libs/arm64-v8a/libjingle_peerconnection_so.so


BIN
libwebrtc/libs/armeabi-v7a/libjingle_peerconnection_so.so


+ 21 - 0
libwebrtc/proguard-rules.pro

@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile

+ 2 - 0
libwebrtc/src/main/AndroidManifest.xml

@@ -0,0 +1,2 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.webrtc" />

+ 51 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java

@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.os.Build;
+
+public final class BuildInfo {
+  public static String getDevice() {
+    return Build.DEVICE;
+  }
+
+  public static String getDeviceModel() {
+    return Build.MODEL;
+  }
+
+  public static String getProduct() {
+    return Build.PRODUCT;
+  }
+
+  public static String getBrand() {
+    return Build.BRAND;
+  }
+
+  public static String getDeviceManufacturer() {
+    return Build.MANUFACTURER;
+  }
+
+  public static String getAndroidBuildId() {
+    return Build.ID;
+  }
+
+  public static String getBuildType() {
+    return Build.TYPE;
+  }
+
+  public static String getBuildRelease() {
+    return Build.VERSION.RELEASE;
+  }
+
+  public static int getSdkVersion() {
+    return Build.VERSION.SDK_INT;
+  }
+}

+ 324 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java

@@ -0,0 +1,324 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.media.audiofx.AcousticEchoCanceler;
+import android.media.audiofx.AudioEffect;
+import android.media.audiofx.AudioEffect.Descriptor;
+import android.media.audiofx.NoiseSuppressor;
+import android.os.Build;
+
+import org.webrtc.Logging;
+
+import java.util.List;
+import java.util.UUID;
+
+// This class wraps control of three different platform effects. Supported
+// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).
+// Calling enable() will active all effects that are
+// supported by the device if the corresponding |shouldEnableXXX| member is set.
+public class WebRtcAudioEffects {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioEffects";
+
+  // UUIDs for Software Audio Effects that we want to avoid using.
+  // The implementor field will be set to "The Android Open Source Project".
+  private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER =
+      UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b");
+  private static final UUID AOSP_NOISE_SUPPRESSOR =
+      UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b");
+
+  // Contains the available effect descriptors returned from the
+  // AudioEffect.getEffects() call. This result is cached to avoid doing the
+  // slow OS call multiple times.
+  private static   Descriptor[] cachedEffects;
+
+  // Contains the audio effect objects. Created in enable() and destroyed
+  // in release().
+  private   AcousticEchoCanceler aec;
+  private   NoiseSuppressor ns;
+
+  // Affects the final state given to the setEnabled() method on each effect.
+  // The default state is set to "disabled" but each effect can also be enabled
+  // by calling setAEC() and setNS().
+  // To enable an effect, both the shouldEnableXXX member and the static
+  // canUseXXX() must be true.
+  private boolean shouldEnableAec;
+  private boolean shouldEnableNs;
+
+  // Checks if the device implements Acoustic Echo Cancellation (AEC).
+  // Returns true if the device implements AEC, false otherwise.
+  public static boolean isAcousticEchoCancelerSupported() {
+    // Note: we're using isAcousticEchoCancelerEffectAvailable() instead of
+    // AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects()
+    // OS API call.
+    return isAcousticEchoCancelerEffectAvailable();
+  }
+
+  // Checks if the device implements Noise Suppression (NS).
+  // Returns true if the device implements NS, false otherwise.
+  public static boolean isNoiseSuppressorSupported() {
+    // Note: we're using isNoiseSuppressorEffectAvailable() instead of
+    // NoiseSuppressor.isAvailable() to avoid the expensive getEffects()
+    // OS API call.
+    return isNoiseSuppressorEffectAvailable();
+  }
+
+  // Returns true if the device is blacklisted for HW AEC usage.
+  public static boolean isAcousticEchoCancelerBlacklisted() {
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage();
+    boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
+    if (isBlacklisted) {
+      Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!");
+    }
+    return isBlacklisted;
+  }
+
+  // Returns true if the device is blacklisted for HW NS usage.
+  public static boolean isNoiseSuppressorBlacklisted() {
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage();
+    boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
+    if (isBlacklisted) {
+      Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!");
+    }
+    return isBlacklisted;
+  }
+
+  // Returns true if the platform AEC should be excluded based on its UUID.
+  // AudioEffect.queryEffects() can throw IllegalStateException.
+  private static boolean isAcousticEchoCancelerExcludedByUUID() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    for (Descriptor d : getAvailableEffects()) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
+          && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  // Returns true if the platform NS should be excluded based on its UUID.
+  // AudioEffect.queryEffects() can throw IllegalStateException.
+  private static boolean isNoiseSuppressorExcludedByUUID() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    for (Descriptor d : getAvailableEffects()) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  // Returns true if the device supports Acoustic Echo Cancellation (AEC).
+  private static boolean isAcousticEchoCancelerEffectAvailable() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
+  }
+
+  // Returns true if the device supports Noise Suppression (NS).
+  private static boolean isNoiseSuppressorEffectAvailable() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
+  }
+
+  // Returns true if all conditions for supporting the HW AEC are fulfilled.
+  // It will not be possible to enable the HW AEC if this method returns false.
+  public static boolean canUseAcousticEchoCanceler() {
+    boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported()
+        && !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler()
+        && !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID();
+    Logging.d(TAG, "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler);
+    return canUseAcousticEchoCanceler;
+  }
+
+  // Returns true if all conditions for supporting the HW NS are fulfilled.
+  // It will not be possible to enable the HW NS if this method returns false.
+  public static boolean canUseNoiseSuppressor() {
+    boolean canUseNoiseSuppressor = isNoiseSuppressorSupported()
+        && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted()
+        && !isNoiseSuppressorExcludedByUUID();
+    Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor);
+    return canUseNoiseSuppressor;
+  }
+
+  public static WebRtcAudioEffects create() {
+    return new WebRtcAudioEffects();
+  }
+
+  private WebRtcAudioEffects() {
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+  }
+
+  // Call this method to enable or disable the platform AEC. It modifies
+  // |shouldEnableAec| which is used in enable() where the actual state
+  // of the AEC effect is modified. Returns true if HW AEC is supported and
+  // false otherwise.
+  public boolean setAEC(boolean enable) {
+    Logging.d(TAG, "setAEC(" + enable + ")");
+    if (!canUseAcousticEchoCanceler()) {
+      Logging.w(TAG, "Platform AEC is not supported");
+      shouldEnableAec = false;
+      return false;
+    }
+    if (aec != null && (enable != shouldEnableAec)) {
+      Logging.e(TAG, "Platform AEC state can't be modified while recording");
+      return false;
+    }
+    shouldEnableAec = enable;
+    return true;
+  }
+
+  // Call this method to enable or disable the platform NS. It modifies
+  // |shouldEnableNs| which is used in enable() where the actual state
+  // of the NS effect is modified. Returns true if HW NS is supported and
+  // false otherwise.
+  public boolean setNS(boolean enable) {
+    Logging.d(TAG, "setNS(" + enable + ")");
+    if (!canUseNoiseSuppressor()) {
+      Logging.w(TAG, "Platform NS is not supported");
+      shouldEnableNs = false;
+      return false;
+    }
+    if (ns != null && (enable != shouldEnableNs)) {
+      Logging.e(TAG, "Platform NS state can't be modified while recording");
+      return false;
+    }
+    shouldEnableNs = enable;
+    return true;
+  }
+
+  public void enable(int audioSession) {
+    Logging.d(TAG, "enable(audioSession=" + audioSession + ")");
+    assertTrue(aec == null);
+    assertTrue(ns == null);
+
+    if (DEBUG) {
+      // Add logging of supported effects but filter out "VoIP effects", i.e.,
+      // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the
+      // DEBUG flag is set since we have seen crashes in this API.
+      for (Descriptor d : AudioEffect.queryEffects()) {
+        if (effectTypeIsVoIP(d.type)) {
+          Logging.d(TAG, "name: " + d.name + ", "
+                  + "mode: " + d.connectMode + ", "
+                  + "implementor: " + d.implementor + ", "
+                  + "UUID: " + d.uuid);
+        }
+      }
+    }
+
+    if (isAcousticEchoCancelerSupported()) {
+      // Create an AcousticEchoCanceler and attach it to the AudioRecord on
+      // the specified audio session.
+      aec = AcousticEchoCanceler.create(audioSession);
+      if (aec != null) {
+        boolean enabled = aec.getEnabled();
+        boolean enable = shouldEnableAec && canUseAcousticEchoCanceler();
+        if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
+          Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
+        }
+        Logging.d(TAG, "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled")
+                + ", enable: " + enable + ", is now: "
+                + (aec.getEnabled() ? "enabled" : "disabled"));
+      } else {
+        Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance");
+      }
+    }
+
+    if (isNoiseSuppressorSupported()) {
+      // Create an NoiseSuppressor and attach it to the AudioRecord on the
+      // specified audio session.
+      ns = NoiseSuppressor.create(audioSession);
+      if (ns != null) {
+        boolean enabled = ns.getEnabled();
+        boolean enable = shouldEnableNs && canUseNoiseSuppressor();
+        if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
+          Logging.e(TAG, "Failed to set the NoiseSuppressor state");
+        }
+        Logging.d(TAG, "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+                + enable + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
+      } else {
+        Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
+      }
+    }
+  }
+
+  // Releases all native audio effect resources. It is a good practice to
+  // release the effect engine when not in use as control can be returned
+  // to other applications or the native resources released.
+  public void release() {
+    Logging.d(TAG, "release");
+    if (aec != null) {
+      aec.release();
+      aec = null;
+    }
+    if (ns != null) {
+      ns.release();
+      ns = null;
+    }
+  }
+
+  // Returns true for effect types in |type| that are of "VoIP" types:
+  // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or
+  // Noise Suppressor (NS). Note that, an extra check for support is needed
+  // in each comparison since some devices includes effects in the
+  // AudioEffect.Descriptor array that are actually not available on the device.
+  // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
+  // AutomaticGainControl.isAvailable() returns false.
+  private boolean effectTypeIsVoIP(UUID type) {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+
+    return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
+        || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
+  }
+
+  // Helper method which throws an exception when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  // Returns the cached copy of the audio effects array, if available, or
+  // queries the operating system for the list of effects.
+  private static   Descriptor[] getAvailableEffects() {
+    if (cachedEffects != null) {
+      return cachedEffects;
+    }
+    // The caching is best effort only - if this method is called from several
+    // threads in parallel, they may end up doing the underlying OS call
+    // multiple times. It's normally only called on one thread so there's no
+    // real need to optimize for the multiple threads case.
+    cachedEffects = AudioEffect.queryEffects();
+    return cachedEffects;
+  }
+
+  // Returns true if an effect of the specified type is available. Functionally
+  // equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but
+  // faster as it avoids the expensive OS call to enumerate effects.
+  private static boolean isEffectTypeAvailable(UUID effectType) {
+    Descriptor[] effects = getAvailableEffects();
+    if (effects == null) {
+      return false;
+    }
+    for (Descriptor d : effects) {
+      if (d.type.equals(effectType)) {
+        return true;
+      }
+    }
+    return false;
+  }
+}

+ 378 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java

@@ -0,0 +1,378 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.os.Build;
+
+import org.webrtc.ContextUtils;
+import org.webrtc.Logging;
+
+import java.util.Timer;
+import java.util.TimerTask;
+
+// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
+// At construction, storeAudioParameters() is called and it retrieves
+// fundamental audio parameters like native sample rate and number of channels.
+// The result is then provided to the caller by nativeCacheAudioParameters().
+// It is also possible to call init() to set up the audio environment for best
+// possible "VoIP performance". All settings done in init() are reverted by
+// dispose(). This class can also be used without calling init() if the user
+// prefers to set up the audio environment separately. However, it is
+// recommended to always use AudioManager.MODE_IN_COMMUNICATION.
+public class WebRtcAudioManager {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioManager";
+
+  // TODO(bugs.webrtc.org/8914): disabled by default until AAudio support has
+  // been completed. Goal is to always return false on Android O MR1 and higher.
+  private static final boolean blacklistDeviceForAAudioUsage = true;
+
+  // Use mono as default for both audio directions.
+  private static boolean useStereoOutput;
+  private static boolean useStereoInput;
+
+  private static boolean blacklistDeviceForOpenSLESUsage;
+  private static boolean blacklistDeviceForOpenSLESUsageIsOverridden;
+
+  // Call this method to override the default list of blacklisted devices
+  // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
+  // Allows an app to take control over which devices to exclude from using
+  // the OpenSL ES audio output path
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
+    blacklistDeviceForOpenSLESUsageIsOverridden = true;
+    blacklistDeviceForOpenSLESUsage = enable;
+  }
+
+  // Call these methods to override the default mono audio modes for the specified direction(s)
+  // (input and/or output).
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setStereoOutput(boolean enable) {
+    Logging.w(TAG, "Overriding default output behavior: setStereoOutput(" + enable + ')');
+    useStereoOutput = enable;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setStereoInput(boolean enable) {
+    Logging.w(TAG, "Overriding default input behavior: setStereoInput(" + enable + ')');
+    useStereoInput = enable;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean getStereoOutput() {
+    return useStereoOutput;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean getStereoInput() {
+    return useStereoInput;
+  }
+
+  // Default audio data format is PCM 16 bit per sample.
+  // Guaranteed to be supported by all devices.
+  private static final int BITS_PER_SAMPLE = 16;
+
+  private static final int DEFAULT_FRAME_PER_BUFFER = 256;
+
+  // Private utility class that periodically checks and logs the volume level
+  // of the audio stream that is currently controlled by the volume control.
+  // A timer triggers logs once every 30 seconds and the timer's associated
+  // thread is named "WebRtcVolumeLevelLoggerThread".
+  private static class VolumeLogger {
+    private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread";
+    private static final int TIMER_PERIOD_IN_SECONDS = 30;
+
+    private final AudioManager audioManager;
+    private   Timer timer;
+
+    public VolumeLogger(AudioManager audioManager) {
+      this.audioManager = audioManager;
+    }
+
+    public void start() {
+      timer = new Timer(THREAD_NAME);
+      timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
+                         audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
+          0, TIMER_PERIOD_IN_SECONDS * 1000);
+    }
+
+    private class LogVolumeTask extends TimerTask {
+      private final int maxRingVolume;
+      private final int maxVoiceCallVolume;
+
+      LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) {
+        this.maxRingVolume = maxRingVolume;
+        this.maxVoiceCallVolume = maxVoiceCallVolume;
+      }
+
+      @Override
+      public void run() {
+        final int mode = audioManager.getMode();
+        if (mode == AudioManager.MODE_RINGTONE) {
+          Logging.d(TAG, "STREAM_RING stream volume: "
+                  + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max="
+                  + maxRingVolume + ")");
+        } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
+          Logging.d(TAG, "VOICE_CALL stream volume: "
+                  + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max="
+                  + maxVoiceCallVolume + ")");
+        }
+      }
+    }
+
+    private void stop() {
+      if (timer != null) {
+        timer.cancel();
+        timer = null;
+      }
+    }
+  }
+
+  private final long nativeAudioManager;
+  private final AudioManager audioManager;
+
+  private boolean initialized;
+  private int nativeSampleRate;
+  private int nativeChannels;
+
+  private boolean hardwareAEC;
+  private boolean hardwareAGC;
+  private boolean hardwareNS;
+  private boolean lowLatencyOutput;
+  private boolean lowLatencyInput;
+  private boolean proAudio;
+  private boolean aAudio;
+  private int sampleRate;
+  private int outputChannels;
+  private int inputChannels;
+  private int outputBufferSize;
+  private int inputBufferSize;
+
+  private final VolumeLogger volumeLogger;
+
+  WebRtcAudioManager(long nativeAudioManager) {
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+    this.nativeAudioManager = nativeAudioManager;
+    audioManager =
+        (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
+    if (DEBUG) {
+      WebRtcAudioUtils.logDeviceInfo(TAG);
+    }
+    volumeLogger = new VolumeLogger(audioManager);
+    storeAudioParameters();
+    nativeCacheAudioParameters(sampleRate, outputChannels, inputChannels, hardwareAEC, hardwareAGC,
+        hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio, outputBufferSize,
+        inputBufferSize, nativeAudioManager);
+    WebRtcAudioUtils.logAudioState(TAG);
+  }
+
+  private boolean init() {
+    Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo());
+    if (initialized) {
+      return true;
+    }
+    Logging.d(TAG, "audio mode is: "
+        + WebRtcAudioUtils.modeToString(audioManager.getMode()));
+    initialized = true;
+    volumeLogger.start();
+    return true;
+  }
+
+  private void dispose() {
+    Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo());
+    if (!initialized) {
+      return;
+    }
+    volumeLogger.stop();
+  }
+
+  private boolean isCommunicationModeEnabled() {
+    return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
+  }
+
+  private boolean isDeviceBlacklistedForOpenSLESUsage() {
+    boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
+        ? blacklistDeviceForOpenSLESUsage
+        : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
+    if (blacklisted) {
+      Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
+    }
+    return blacklisted;
+  }
+
+  private void storeAudioParameters() {
+    outputChannels = getStereoOutput() ? 2 : 1;
+    inputChannels = getStereoInput() ? 2 : 1;
+    sampleRate = getNativeOutputSampleRate();
+    hardwareAEC = isAcousticEchoCancelerSupported();
+    // TODO(henrika): use of hardware AGC is no longer supported. Currently
+    // hardcoded to false. To be removed.
+    hardwareAGC = false;
+    hardwareNS = isNoiseSuppressorSupported();
+    lowLatencyOutput = isLowLatencyOutputSupported();
+    lowLatencyInput = isLowLatencyInputSupported();
+    proAudio = isProAudioSupported();
+    aAudio = isAAudioSupported();
+    outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer()
+                                        : getMinOutputFrameSize(sampleRate, outputChannels);
+    inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer()
+                                      : getMinInputFrameSize(sampleRate, inputChannels);
+  }
+
+  // Gets the current earpiece state.
+  private boolean hasEarpiece() {
+    return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+        PackageManager.FEATURE_TELEPHONY);
+  }
+
+  // Returns true if low-latency audio output is supported.
+  private boolean isLowLatencyOutputSupported() {
+    return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+        PackageManager.FEATURE_AUDIO_LOW_LATENCY);
+  }
+
+  // Returns true if low-latency audio input is supported.
+  // TODO(henrika): remove the hardcoded false return value when OpenSL ES
+  // input performance has been evaluated and tested more.
+  public boolean isLowLatencyInputSupported() {
+    // TODO(henrika): investigate if some sort of device list is needed here
+    // as well. The NDK doc states that: "As of API level 21, lower latency
+    // audio input is supported on select devices. To take advantage of this
+    // feature, first confirm that lower latency output is available".
+    return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported();
+  }
+
+  // Returns true if the device has professional audio level of functionality
+  // and therefore supports the lowest possible round-trip latency.
+  private boolean isProAudioSupported() {
+    return Build.VERSION.SDK_INT >= 23
+        && ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+               PackageManager.FEATURE_AUDIO_PRO);
+  }
+
+  // AAudio is supported on Androio Oreo MR1 (API 27) and higher.
+  // TODO(bugs.webrtc.org/8914): currently disabled by default.
+  private boolean isAAudioSupported() {
+    if (blacklistDeviceForAAudioUsage) {
+      Logging.w(TAG, "AAudio support is currently disabled on all devices!");
+    }
+    return !blacklistDeviceForAAudioUsage && Build.VERSION.SDK_INT >= 27;
+  }
+
+  // Returns the native output sample rate for this device's output stream.
+  private int getNativeOutputSampleRate() {
+    // Override this if we're running on an old emulator image which only
+    // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE.
+    if (WebRtcAudioUtils.runningOnEmulator()) {
+      Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz.");
+      return 8000;
+    }
+    // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz().
+    // If so, use that value and return here.
+    if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) {
+      Logging.d(TAG, "Default sample rate is overriden to "
+              + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
+      return WebRtcAudioUtils.getDefaultSampleRateHz();
+    }
+    // No overrides available. Deliver best possible estimate based on default
+    // Android AudioManager APIs.
+    final int sampleRateHz = getSampleRateForApiLevel();
+    Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
+    return sampleRateHz;
+  }
+
+  private int getSampleRateForApiLevel() {
+    if (Build.VERSION.SDK_INT < 17) {
+      return WebRtcAudioUtils.getDefaultSampleRateHz();
+    }
+    String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+    return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
+                                      : Integer.parseInt(sampleRateString);
+  }
+
+  // Returns the native output buffer size for low-latency output streams.
+  private int getLowLatencyOutputFramesPerBuffer() {
+    assertTrue(isLowLatencyOutputSupported());
+    if (Build.VERSION.SDK_INT < 17) {
+      return DEFAULT_FRAME_PER_BUFFER;
+    }
+    String framesPerBuffer =
+        audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+    return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
+  }
+
+  // Returns true if the device supports an audio effect (AEC or NS).
+  // Four conditions must be fulfilled if functions are to return true:
+  // 1) the platform must support the built-in (HW) effect,
+  // 2) explicit use (override) of a WebRTC based version must not be set,
+  // 3) the device must not be blacklisted for use of the effect, and
+  // 4) the UUID of the effect must be approved (some UUIDs can be excluded).
+  private static boolean isAcousticEchoCancelerSupported() {
+    return WebRtcAudioEffects.canUseAcousticEchoCanceler();
+  }
+  private static boolean isNoiseSuppressorSupported() {
+    return WebRtcAudioEffects.canUseNoiseSuppressor();
+  }
+
+  // Returns the minimum output buffer size for Java based audio (AudioTrack).
+  // This size can also be used for OpenSL ES implementations on devices that
+  // lacks support of low-latency output.
+  private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
+    final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+    final int channelConfig =
+        (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+    return AudioTrack.getMinBufferSize(
+               sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+        / bytesPerFrame;
+  }
+
+  // Returns the native input buffer size for input streams.
+  private int getLowLatencyInputFramesPerBuffer() {
+    assertTrue(isLowLatencyInputSupported());
+    return getLowLatencyOutputFramesPerBuffer();
+  }
+
+  // Returns the minimum input buffer size for Java based audio (AudioRecord).
+  // This size can calso be used for OpenSL ES implementations on devices that
+  // lacks support of low-latency input.
+  private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
+    final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+    final int channelConfig =
+        (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+    return AudioRecord.getMinBufferSize(
+               sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+        / bytesPerFrame;
+  }
+
+  // Helper method which throws an exception  when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  private native void nativeCacheAudioParameters(int sampleRate, int outputChannels,
+      int inputChannels, boolean hardwareAEC, boolean hardwareAGC, boolean hardwareNS,
+      boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio, boolean aAudio,
+      int outputBufferSize, int inputBufferSize, long nativeAudioManager);
+}

+ 409 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java

@@ -0,0 +1,409 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+
+import java.lang.System;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+
+public class WebRtcAudioRecord {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioRecord";
+
+  // Default audio data format is PCM 16 bit per sample.
+  // Guaranteed to be supported by all devices.
+  private static final int BITS_PER_SAMPLE = 16;
+
+  // Requested size of each recorded buffer provided to the client.
+  private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+  // Average number of callbacks per second.
+  private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+  // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required
+  // buffer size). The extra space is allocated to guard against glitches under
+  // high load.
+  private static final int BUFFER_SIZE_FACTOR = 2;
+
+  // The AudioRecordJavaThread is allowed to wait for successful call to join()
+  // but the wait times out afther this amount of time.
+  private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+  private static final int DEFAULT_AUDIO_SOURCE = getDefaultAudioSource();
+  private static int audioSource = DEFAULT_AUDIO_SOURCE;
+
+  private final long nativeAudioRecord;
+
+  private   WebRtcAudioEffects effects;
+
+  private ByteBuffer byteBuffer;
+
+  private   AudioRecord audioRecord;
+  private   AudioRecordThread audioThread;
+
+  private static volatile boolean microphoneMute;
+  private byte[] emptyBytes;
+
+  // Audio recording error handler functions.
+  public enum AudioRecordStartErrorCode {
+    AUDIO_RECORD_START_EXCEPTION,
+    AUDIO_RECORD_START_STATE_MISMATCH,
+  }
+
+  public static interface WebRtcAudioRecordErrorCallback {
+    void onWebRtcAudioRecordInitError(String errorMessage);
+    void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage);
+    void onWebRtcAudioRecordError(String errorMessage);
+  }
+
+  private static   WebRtcAudioRecordErrorCallback errorCallback;
+
+  public static void setErrorCallback(WebRtcAudioRecordErrorCallback errorCallback) {
+    Logging.d(TAG, "Set error callback");
+    WebRtcAudioRecord.errorCallback = errorCallback;
+  }
+
+  /**
+   * Contains audio sample information. Object is passed using {@link
+   * WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback}
+   */
+  public static class AudioSamples {
+    /** See {@link AudioRecord#getAudioFormat()} */
+    private final int audioFormat;
+    /** See {@link AudioRecord#getChannelCount()} */
+    private final int channelCount;
+    /** See {@link AudioRecord#getSampleRate()} */
+    private final int sampleRate;
+
+    private final byte[] data;
+
+    private AudioSamples(AudioRecord audioRecord, byte[] data) {
+      this.audioFormat = audioRecord.getAudioFormat();
+      this.channelCount = audioRecord.getChannelCount();
+      this.sampleRate = audioRecord.getSampleRate();
+      this.data = data;
+    }
+
+    public int getAudioFormat() {
+      return audioFormat;
+    }
+
+    public int getChannelCount() {
+      return channelCount;
+    }
+
+    public int getSampleRate() {
+      return sampleRate;
+    }
+
+    public byte[] getData() {
+      return data;
+    }
+  }
+
+  /** Called when new audio samples are ready. This should only be set for debug purposes */
+  public static interface WebRtcAudioRecordSamplesReadyCallback {
+    void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
+  }
+
+  private static   WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback;
+
+  public static void setOnAudioSamplesReady(WebRtcAudioRecordSamplesReadyCallback callback) {
+    audioSamplesReadyCallback = callback;
+  }
+
+  /**
+   * Audio thread which keeps calling ByteBuffer.read() waiting for audio
+   * to be recorded. Feeds recorded data to the native counterpart as a
+   * periodic sequence of callbacks using DataIsRecorded().
+   * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+   */
+  private class AudioRecordThread extends Thread {
+    private volatile boolean keepAlive = true;
+
+    public AudioRecordThread(String name) {
+      super(name);
+    }
+
+    // TODO(titovartem) make correct fix during webrtc:9175
+    @SuppressWarnings("ByteBufferBackingArray")
+    @Override
+    public void run() {
+      Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+      Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
+      assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
+
+      long lastTime = System.nanoTime();
+      while (keepAlive) {
+        int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity());
+        if (bytesRead == byteBuffer.capacity()) {
+          if (microphoneMute) {
+            byteBuffer.clear();
+            byteBuffer.put(emptyBytes);
+          }
+          // It's possible we've been shut down during the read, and stopRecording() tried and
+          // failed to join this thread. To be a bit safer, try to avoid calling any native methods
+          // in case they've been unregistered after stopRecording() returned.
+          if (keepAlive) {
+            nativeDataIsRecorded(bytesRead, nativeAudioRecord);
+          }
+          if (audioSamplesReadyCallback != null) {
+            // Copy the entire byte buffer array.  Assume that the start of the byteBuffer is
+            // at index 0.
+            byte[] data = Arrays.copyOf(byteBuffer.array(), byteBuffer.capacity());
+            audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(
+                new AudioSamples(audioRecord, data));
+          }
+        } else {
+          String errorMessage = "AudioRecord.read failed: " + bytesRead;
+          Logging.e(TAG, errorMessage);
+          if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
+            keepAlive = false;
+            reportWebRtcAudioRecordError(errorMessage);
+          }
+        }
+        if (DEBUG) {
+          long nowTime = System.nanoTime();
+          long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
+          lastTime = nowTime;
+          Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead);
+        }
+      }
+
+      try {
+        if (audioRecord != null) {
+          audioRecord.stop();
+        }
+      } catch (IllegalStateException e) {
+        Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
+      }
+    }
+
+    // Stops the inner thread loop and also calls AudioRecord.stop().
+    // Does not block the calling thread.
+    public void stopThread() {
+      Logging.d(TAG, "stopThread");
+      keepAlive = false;
+    }
+  }
+
+  WebRtcAudioRecord(long nativeAudioRecord) {
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+    this.nativeAudioRecord = nativeAudioRecord;
+    if (DEBUG) {
+      WebRtcAudioUtils.logDeviceInfo(TAG);
+    }
+    effects = WebRtcAudioEffects.create();
+  }
+
+  private boolean enableBuiltInAEC(boolean enable) {
+    Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
+    if (effects == null) {
+      Logging.e(TAG, "Built-in AEC is not supported on this platform");
+      return false;
+    }
+    return effects.setAEC(enable);
+  }
+
+  private boolean enableBuiltInNS(boolean enable) {
+    Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
+    if (effects == null) {
+      Logging.e(TAG, "Built-in NS is not supported on this platform");
+      return false;
+    }
+    return effects.setNS(enable);
+  }
+
+  private int initRecording(int sampleRate, int channels) {
+    Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+    if (audioRecord != null) {
+      reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording.");
+      return -1;
+    }
+    final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+    final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND;
+    byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
+    Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+    emptyBytes = new byte[byteBuffer.capacity()];
+    // Rather than passing the ByteBuffer with every callback (requiring
+    // the potentially expensive GetDirectBufferAddress) we simply have the
+    // the native class cache the address to the memory once.
+    nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord);
+
+    // Get the minimum buffer size required for the successful creation of
+    // an AudioRecord object, in byte units.
+    // Note that this size doesn't guarantee a smooth recording under load.
+    final int channelConfig = channelCountToConfiguration(channels);
+    int minBufferSize =
+        AudioRecord.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
+    if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+      reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize);
+      return -1;
+    }
+    Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize);
+
+    // Use a larger buffer size than the minimum required when creating the
+    // AudioRecord instance to ensure smooth recording under load. It has been
+    // verified that it does not increase the actual recording latency.
+    int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
+    Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
+    try {
+      audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig,
+          AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
+    } catch (IllegalArgumentException e) {
+      reportWebRtcAudioRecordInitError("AudioRecord ctor error: " + e.getMessage());
+      releaseAudioResources();
+      return -1;
+    }
+    if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+      reportWebRtcAudioRecordInitError("Failed to create a new AudioRecord instance");
+      releaseAudioResources();
+      return -1;
+    }
+    if (effects != null) {
+      effects.enable(audioRecord.getAudioSessionId());
+    }
+    logMainParameters();
+    logMainParametersExtended();
+    return framesPerBuffer;
+  }
+
+  private boolean startRecording() {
+    Logging.d(TAG, "startRecording");
+    assertTrue(audioRecord != null);
+    assertTrue(audioThread == null);
+    try {
+      audioRecord.startRecording();
+    } catch (IllegalStateException e) {
+      reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
+          "AudioRecord.startRecording failed: " + e.getMessage());
+      return false;
+    }
+    if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+      reportWebRtcAudioRecordStartError(
+          AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
+          "AudioRecord.startRecording failed - incorrect state :"
+          + audioRecord.getRecordingState());
+      return false;
+    }
+    audioThread = new AudioRecordThread("AudioRecordJavaThread");
+    audioThread.start();
+    return true;
+  }
+
+  private boolean stopRecording() {
+    Logging.d(TAG, "stopRecording");
+    assertTrue(audioThread != null);
+    audioThread.stopThread();
+    if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
+      Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
+      WebRtcAudioUtils.logAudioState(TAG);
+    }
+    audioThread = null;
+    if (effects != null) {
+      effects.release();
+    }
+    releaseAudioResources();
+    return true;
+  }
+
+  private void logMainParameters() {
+    Logging.d(TAG, "AudioRecord: "
+            + "session ID: " + audioRecord.getAudioSessionId() + ", "
+            + "channels: " + audioRecord.getChannelCount() + ", "
+            + "sample rate: " + audioRecord.getSampleRate());
+  }
+
+  private void logMainParametersExtended() {
+    if (Build.VERSION.SDK_INT >= 23) {
+      Logging.d(TAG, "AudioRecord: "
+              // The frame count of the native AudioRecord buffer.
+              + "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
+    }
+  }
+
+  // Helper method which throws an exception  when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  private int channelCountToConfiguration(int channels) {
+    return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+  }
+
+  private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
+
+  private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
+
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setAudioSource(int source) {
+    Logging.w(TAG, "Audio source is changed from: " + audioSource
+            + " to " + source);
+    audioSource = source;
+  }
+
+  private static int getDefaultAudioSource() {
+    return AudioSource.VOICE_COMMUNICATION;
+  }
+
+  // Sets all recorded samples to zero if |mute| is true, i.e., ensures that
+  // the microphone is muted.
+  public static void setMicrophoneMute(boolean mute) {
+    Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
+    microphoneMute = mute;
+  }
+
+  // Releases the native AudioRecord resources.
+  private void releaseAudioResources() {
+    Logging.d(TAG, "releaseAudioResources");
+    if (audioRecord != null) {
+      audioRecord.release();
+      audioRecord = null;
+    }
+  }
+
+  private void reportWebRtcAudioRecordInitError(String errorMessage) {
+    Logging.e(TAG, "Init recording error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioRecordInitError(errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioRecordStartError(
+      AudioRecordStartErrorCode errorCode, String errorMessage) {
+    Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioRecordError(String errorMessage) {
+    Logging.e(TAG, "Run-time recording error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioRecordError(errorMessage);
+    }
+  }
+}

+ 524 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java

@@ -0,0 +1,524 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
+import android.os.Process;
+
+import org.webrtc.ContextUtils;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+
+import java.nio.ByteBuffer;
+
+public class WebRtcAudioTrack {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioTrack";
+
+  // Default audio data format is PCM 16 bit per sample.
+  // Guaranteed to be supported by all devices.
+  private static final int BITS_PER_SAMPLE = 16;
+
+  // Requested size of each recorded buffer provided to the client.
+  private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+  // Average number of callbacks per second.
+  private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+  // The AudioTrackThread is allowed to wait for successful call to join()
+  // but the wait times out afther this amount of time.
+  private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+  // By default, WebRTC creates audio tracks with a usage attribute
+  // corresponding to voice communications, such as telephony or VoIP.
+  private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
+  private static int usageAttribute = DEFAULT_USAGE;
+
+  // This method overrides the default usage attribute and allows the user
+  // to set it to something else than AudioAttributes.USAGE_VOICE_COMMUNICATION.
+  // NOTE: calling this method will most likely break existing VoIP tuning.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setAudioTrackUsageAttribute(int usage) {
+    Logging.w(TAG, "Default usage attribute is changed from: "
+        + DEFAULT_USAGE + " to " + usage);
+    usageAttribute = usage;
+  }
+
+  private static int getDefaultUsageAttribute() {
+    if (Build.VERSION.SDK_INT >= 21) {
+      return AudioAttributes.USAGE_VOICE_COMMUNICATION;
+    } else {
+      // Not used on SDKs lower than 21.
+      return 0;
+    }
+  }
+
+  private final long nativeAudioTrack;
+  private final AudioManager audioManager;
+  private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+  private ByteBuffer byteBuffer;
+
+  private   AudioTrack audioTrack;
+  private   AudioTrackThread audioThread;
+
+  // Samples to be played are replaced by zeros if |speakerMute| is set to true.
+  // Can be used to ensure that the speaker is fully muted.
+  private static volatile boolean speakerMute;
+  private byte[] emptyBytes;
+
+  // Audio playout/track error handler functions.
+  public enum AudioTrackStartErrorCode {
+    AUDIO_TRACK_START_EXCEPTION,
+    AUDIO_TRACK_START_STATE_MISMATCH,
+  }
+
+  @Deprecated
+  public static interface WebRtcAudioTrackErrorCallback {
+    void onWebRtcAudioTrackInitError(String errorMessage);
+    void onWebRtcAudioTrackStartError(String errorMessage);
+    void onWebRtcAudioTrackError(String errorMessage);
+  }
+
+  // TODO(henrika): upgrade all clients to use this new interface instead.
+  public static interface ErrorCallback {
+    void onWebRtcAudioTrackInitError(String errorMessage);
+    void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
+    void onWebRtcAudioTrackError(String errorMessage);
+  }
+
+  private static   WebRtcAudioTrackErrorCallback errorCallbackOld;
+  private static   ErrorCallback errorCallback;
+
+  @Deprecated
+  public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) {
+    Logging.d(TAG, "Set error callback (deprecated");
+    WebRtcAudioTrack.errorCallbackOld = errorCallback;
+  }
+
+  public static void setErrorCallback(ErrorCallback errorCallback) {
+    Logging.d(TAG, "Set extended error callback");
+    WebRtcAudioTrack.errorCallback = errorCallback;
+  }
+
+  /**
+   * Audio thread which keeps calling AudioTrack.write() to stream audio.
+   * Data is periodically acquired from the native WebRTC layer using the
+   * nativeGetPlayoutData callback function.
+   * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+   */
+  private class AudioTrackThread extends Thread {
+    private volatile boolean keepAlive = true;
+
+    public AudioTrackThread(String name) {
+      super(name);
+    }
+
+    @Override
+    public void run() {
+      Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+      Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
+      assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
+
+      // Fixed size in bytes of each 10ms block of audio data that we ask for
+      // using callbacks to the native WebRTC client.
+      final int sizeInBytes = byteBuffer.capacity();
+
+      while (keepAlive) {
+        // Get 10ms of PCM data from the native WebRTC client. Audio data is
+        // written into the common ByteBuffer using the address that was
+        // cached at construction.
+        nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
+        // Write data until all data has been written to the audio sink.
+        // Upon return, the buffer position will have been advanced to reflect
+        // the amount of data that was successfully written to the AudioTrack.
+        assertTrue(sizeInBytes <= byteBuffer.remaining());
+        if (speakerMute) {
+          byteBuffer.clear();
+          byteBuffer.put(emptyBytes);
+          byteBuffer.position(0);
+        }
+        int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
+        if (bytesWritten != sizeInBytes) {
+          Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
+          // If a write() returns a negative value, an error has occurred.
+          // Stop playing and report an error in this case.
+          if (bytesWritten < 0) {
+            keepAlive = false;
+            reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten);
+          }
+        }
+        // The byte buffer must be rewinded since byteBuffer.position() is
+        // increased at each call to AudioTrack.write(). If we don't do this,
+        // next call to AudioTrack.write() will fail.
+        byteBuffer.rewind();
+
+        // TODO(henrika): it is possible to create a delay estimate here by
+        // counting number of written frames and subtracting the result from
+        // audioTrack.getPlaybackHeadPosition().
+      }
+
+      // Stops playing the audio data. Since the instance was created in
+      // MODE_STREAM mode, audio will stop playing after the last buffer that
+      // was written has been played.
+      if (audioTrack != null) {
+        Logging.d(TAG, "Calling AudioTrack.stop...");
+        try {
+          audioTrack.stop();
+          Logging.d(TAG, "AudioTrack.stop is done.");
+        } catch (IllegalStateException e) {
+          Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
+        }
+      }
+    }
+
+    private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
+      if (Build.VERSION.SDK_INT >= 21) {
+        return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+      } else {
+        return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
+      }
+    }
+
+    // Stops the inner thread loop which results in calling AudioTrack.stop().
+    // Does not block the calling thread.
+    public void stopThread() {
+      Logging.d(TAG, "stopThread");
+      keepAlive = false;
+    }
+  }
+
+  WebRtcAudioTrack(long nativeAudioTrack) {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+    this.nativeAudioTrack = nativeAudioTrack;
+    audioManager =
+        (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
+    if (DEBUG) {
+      WebRtcAudioUtils.logDeviceInfo(TAG);
+    }
+  }
+
+  private boolean initPlayout(int sampleRate, int channels) {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+    final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+    byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
+    Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+    emptyBytes = new byte[byteBuffer.capacity()];
+    // Rather than passing the ByteBuffer with every callback (requiring
+    // the potentially expensive GetDirectBufferAddress) we simply have the
+    // the native class cache the address to the memory once.
+    nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
+
+    // Get the minimum buffer size required for the successful creation of an
+    // AudioTrack object to be created in the MODE_STREAM mode.
+    // Note that this size doesn't guarantee a smooth playback under load.
+    // TODO(henrika): should we extend the buffer size to avoid glitches?
+    final int channelConfig = channelCountToConfiguration(channels);
+    final int minBufferSizeInBytes =
+        AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
+    Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
+    // For the streaming mode, data must be written to the audio sink in
+    // chunks of size (given by byteBuffer.capacity()) less than or equal
+    // to the total buffer size |minBufferSizeInBytes|. But, we have seen
+    // reports of "getMinBufferSize(): error querying hardware". Hence, it
+    // can happen that |minBufferSizeInBytes| contains an invalid value.
+    if (minBufferSizeInBytes < byteBuffer.capacity()) {
+      reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
+      return false;
+    }
+
+    // Ensure that prevision audio session was stopped correctly before trying
+    // to create a new AudioTrack.
+    if (audioTrack != null) {
+      reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
+      return false;
+    }
+    try {
+      // Create an AudioTrack object and initialize its associated audio buffer.
+      // The size of this buffer determines how long an AudioTrack can play
+      // before running out of data.
+      if (Build.VERSION.SDK_INT >= 21) {
+        // If we are on API level 21 or higher, it is possible to use a special AudioTrack
+        // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
+        // supersede the notion of stream types for defining the behavior of audio playback,
+        // and to allow certain platforms or routing policies to use this information for more
+        // refined volume or routing decisions.
+        audioTrack = createAudioTrackOnLollipopOrHigher(
+            sampleRate, channelConfig, minBufferSizeInBytes);
+      } else {
+        // Use default constructor for API levels below 21.
+        audioTrack =
+            createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
+      }
+    } catch (IllegalArgumentException e) {
+      reportWebRtcAudioTrackInitError(e.getMessage());
+      releaseAudioResources();
+      return false;
+    }
+
+    // It can happen that an AudioTrack is created but it was not successfully
+    // initialized upon creation. Seems to be the case e.g. when the maximum
+    // number of globally available audio tracks is exceeded.
+    if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
+      reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
+      releaseAudioResources();
+      return false;
+    }
+    logMainParameters();
+    logMainParametersExtended();
+    return true;
+  }
+
+  private boolean startPlayout() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "startPlayout");
+    assertTrue(audioTrack != null);
+    assertTrue(audioThread == null);
+
+    // Starts playing an audio track.
+    try {
+      audioTrack.play();
+    } catch (IllegalStateException e) {
+      reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
+          "AudioTrack.play failed: " + e.getMessage());
+      releaseAudioResources();
+      return false;
+    }
+    if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+      reportWebRtcAudioTrackStartError(
+          AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
+          "AudioTrack.play failed - incorrect state :"
+          + audioTrack.getPlayState());
+      releaseAudioResources();
+      return false;
+    }
+
+    // Create and start new high-priority thread which calls AudioTrack.write()
+    // and where we also call the native nativeGetPlayoutData() callback to
+    // request decoded audio from WebRTC.
+    audioThread = new AudioTrackThread("AudioTrackJavaThread");
+    audioThread.start();
+    return true;
+  }
+
+  private boolean stopPlayout() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "stopPlayout");
+    assertTrue(audioThread != null);
+    logUnderrunCount();
+    audioThread.stopThread();
+
+    Logging.d(TAG, "Stopping the AudioTrackThread...");
+    audioThread.interrupt();
+    if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
+      Logging.e(TAG, "Join of AudioTrackThread timed out.");
+      WebRtcAudioUtils.logAudioState(TAG);
+    }
+    Logging.d(TAG, "AudioTrackThread has now been stopped.");
+    audioThread = null;
+    releaseAudioResources();
+    return true;
+  }
+
+  // Get max possible volume index for a phone call audio stream.
+  private int getStreamMaxVolume() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "getStreamMaxVolume");
+    assertTrue(audioManager != null);
+    return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
+  }
+
+  // Set current volume level for a phone call audio stream.
+  private boolean setStreamVolume(int volume) {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "setStreamVolume(" + volume + ")");
+    assertTrue(audioManager != null);
+    if (isVolumeFixed()) {
+      Logging.e(TAG, "The device implements a fixed volume policy.");
+      return false;
+    }
+    audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
+    return true;
+  }
+
+  private boolean isVolumeFixed() {
+    if (Build.VERSION.SDK_INT < 21)
+      return false;
+    return audioManager.isVolumeFixed();
+  }
+
+  /** Get current volume level for a phone call audio stream. */
+  private int getStreamVolume() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "getStreamVolume");
+    assertTrue(audioManager != null);
+    return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
+  }
+
+  private void logMainParameters() {
+    Logging.d(TAG, "AudioTrack: "
+            + "session ID: " + audioTrack.getAudioSessionId() + ", "
+            + "channels: " + audioTrack.getChannelCount() + ", "
+            + "sample rate: " + audioTrack.getSampleRate() + ", "
+            // Gain (>=1.0) expressed as linear multiplier on sample values.
+            + "max gain: " + AudioTrack.getMaxVolume());
+  }
+
+  // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+  // It allows certain platforms or routing policies to use this information for more
+  // refined volume or routing decisions.
+  @TargetApi(21)
+  private static AudioTrack createAudioTrackOnLollipopOrHigher(
+      int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
+    Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
+    // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
+    // performance when Android O is supported. Add some logging in the mean time.
+    final int nativeOutputSampleRate =
+        AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
+    Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
+    if (sampleRateInHz != nativeOutputSampleRate) {
+      Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
+    }
+    if (usageAttribute != DEFAULT_USAGE) {
+      Logging.w(TAG, "A non default usage attribute is used: " + usageAttribute);
+    }
+    // Create an audio track where the audio usage is for VoIP and the content type is speech.
+    return new AudioTrack(
+        new AudioAttributes.Builder()
+            .setUsage(usageAttribute)
+            .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
+        .build(),
+        new AudioFormat.Builder()
+          .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+          .setSampleRate(sampleRateInHz)
+          .setChannelMask(channelConfig)
+          .build(),
+        bufferSizeInBytes,
+        AudioTrack.MODE_STREAM,
+        AudioManager.AUDIO_SESSION_ID_GENERATE);
+  }
+
+  @SuppressWarnings("deprecation") // Deprecated in API level 25.
+  private static AudioTrack createAudioTrackOnLowerThanLollipop(
+      int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
+    return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
+        AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
+  }
+
+  private void logBufferSizeInFrames() {
+    if (Build.VERSION.SDK_INT >= 23) {
+      Logging.d(TAG, "AudioTrack: "
+              // The effective size of the AudioTrack buffer that the app writes to.
+              + "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
+    }
+  }
+
+  private void logBufferCapacityInFrames() {
+    if (Build.VERSION.SDK_INT >= 24) {
+      Logging.d(TAG,
+          "AudioTrack: "
+              // Maximum size of the AudioTrack buffer in frames.
+              + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
+    }
+  }
+
+  private void logMainParametersExtended() {
+    logBufferSizeInFrames();
+    logBufferCapacityInFrames();
+  }
+
+  // Prints the number of underrun occurrences in the application-level write
+  // buffer since the AudioTrack was created. An underrun occurs if the app does
+  // not write audio data quickly enough, causing the buffer to underflow and a
+  // potential audio glitch.
+  // TODO(henrika): keep track of this value in the field and possibly add new
+  // UMA stat if needed.
+  private void logUnderrunCount() {
+    if (Build.VERSION.SDK_INT >= 24) {
+      Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
+    }
+  }
+
+  // Helper method which throws an exception  when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  private int channelCountToConfiguration(int channels) {
+    return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+  }
+
+  private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
+
+  private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
+
+  // Sets all samples to be played out to zero if |mute| is true, i.e.,
+  // ensures that the speaker is muted.
+  public static void setSpeakerMute(boolean mute) {
+    Logging.w(TAG, "setSpeakerMute(" + mute + ")");
+    speakerMute = mute;
+  }
+
+  // Releases the native AudioTrack resources.
+  private void releaseAudioResources() {
+    Logging.d(TAG, "releaseAudioResources");
+    if (audioTrack != null) {
+      audioTrack.release();
+      audioTrack = null;
+    }
+  }
+
+  private void reportWebRtcAudioTrackInitError(String errorMessage) {
+    Logging.e(TAG, "Init playout error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallbackOld != null) {
+      errorCallbackOld.onWebRtcAudioTrackInitError(errorMessage);
+    }
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioTrackInitError(errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioTrackStartError(
+      AudioTrackStartErrorCode errorCode, String errorMessage) {
+    Logging.e(TAG, "Start playout error: "  + errorCode + ". " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallbackOld != null) {
+      errorCallbackOld.onWebRtcAudioTrackStartError(errorMessage);
+    }
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioTrackError(String errorMessage) {
+    Logging.e(TAG, "Run-time playback error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallbackOld != null) {
+      errorCallbackOld.onWebRtcAudioTrackError(errorMessage);
+    }
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioTrackError(errorMessage);
+    }
+  }
+}

+ 388 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java

@@ -0,0 +1,388 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import static android.media.AudioManager.MODE_IN_CALL;
+import static android.media.AudioManager.MODE_IN_COMMUNICATION;
+import static android.media.AudioManager.MODE_NORMAL;
+import static android.media.AudioManager.MODE_RINGTONE;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.media.AudioRecordingConfiguration;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+import java.lang.Thread;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import org.webrtc.ContextUtils;
+import org.webrtc.Logging;
+
+public final class WebRtcAudioUtils {
+  private static final String TAG = "WebRtcAudioUtils";
+
+  // List of devices where we have seen issues (e.g. bad audio quality) using
+  // the low latency output mode in combination with OpenSL ES.
+  // The device name is given by Build.MODEL.
+  private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] {
+      // It is recommended to maintain a list of blacklisted models outside
+      // this package and instead call
+      // WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true)
+      // from the client for devices where OpenSL ES shall be disabled.
+  };
+
+  // List of devices where it has been verified that the built-in effect
+  // bad and where it makes sense to avoid using it and instead rely on the
+  // native WebRTC version instead. The device name is given by Build.MODEL.
+  private static final String[] BLACKLISTED_AEC_MODELS = new String[] {
+      // It is recommended to maintain a list of blacklisted models outside
+      // this package and instead call setWebRtcBasedAcousticEchoCanceler(true)
+      // from the client for devices where the built-in AEC shall be disabled.
+  };
+  private static final String[] BLACKLISTED_NS_MODELS = new String[] {
+    // It is recommended to maintain a list of blacklisted models outside
+    // this package and instead call setWebRtcBasedNoiseSuppressor(true)
+    // from the client for devices where the built-in NS shall be disabled.
+  };
+
+  // Use 16kHz as the default sample rate. A higher sample rate might prevent
+  // us from supporting communication mode on some older (e.g. ICS) devices.
+  private static final int DEFAULT_SAMPLE_RATE_HZ = 16000;
+  private static int defaultSampleRateHz = DEFAULT_SAMPLE_RATE_HZ;
+  // Set to true if setDefaultSampleRateHz() has been called.
+  private static boolean isDefaultSampleRateOverridden;
+
+  // By default, utilize hardware based audio effects for AEC and NS when
+  // available.
+  private static boolean useWebRtcBasedAcousticEchoCanceler;
+  private static boolean useWebRtcBasedNoiseSuppressor;
+
+  // Call these methods if any hardware based effect shall be replaced by a
+  // software based version provided by the WebRTC stack instead.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) {
+    useWebRtcBasedAcousticEchoCanceler = enable;
+  }
+
+    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) {
+    useWebRtcBasedNoiseSuppressor = enable;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) {
+    // TODO(henrika): deprecated; remove when no longer used by any client.
+    Logging.w(TAG, "setWebRtcBasedAutomaticGainControl() is deprecated");
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean useWebRtcBasedAcousticEchoCanceler() {
+    if (useWebRtcBasedAcousticEchoCanceler) {
+      Logging.w(TAG, "Overriding default behavior; now using WebRTC AEC!");
+    }
+    return useWebRtcBasedAcousticEchoCanceler;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean useWebRtcBasedNoiseSuppressor() {
+    if (useWebRtcBasedNoiseSuppressor) {
+      Logging.w(TAG, "Overriding default behavior; now using WebRTC NS!");
+    }
+    return useWebRtcBasedNoiseSuppressor;
+  }
+
+  // TODO(henrika): deprecated; remove when no longer used by any client.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean useWebRtcBasedAutomaticGainControl() {
+    // Always return true here to avoid trying to use any built-in AGC.
+    return true;
+  }
+
+  // Returns true if the device supports an audio effect (AEC or NS).
+  // Four conditions must be fulfilled if functions are to return true:
+  // 1) the platform must support the built-in (HW) effect,
+  // 2) explicit use (override) of a WebRTC based version must not be set,
+  // 3) the device must not be blacklisted for use of the effect, and
+  // 4) the UUID of the effect must be approved (some UUIDs can be excluded).
+  public static boolean isAcousticEchoCancelerSupported() {
+    return WebRtcAudioEffects.canUseAcousticEchoCanceler();
+  }
+  public static boolean isNoiseSuppressorSupported() {
+    return WebRtcAudioEffects.canUseNoiseSuppressor();
+  }
+  // TODO(henrika): deprecated; remove when no longer used by any client.
+  public static boolean isAutomaticGainControlSupported() {
+    // Always return false here to avoid trying to use any built-in AGC.
+    return false;
+  }
+
+  // Call this method if the default handling of querying the native sample
+  // rate shall be overridden. Can be useful on some devices where the
+  // available Android APIs are known to return invalid results.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setDefaultSampleRateHz(int sampleRateHz) {
+    isDefaultSampleRateOverridden = true;
+    defaultSampleRateHz = sampleRateHz;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean isDefaultSampleRateOverridden() {
+    return isDefaultSampleRateOverridden;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized int getDefaultSampleRateHz() {
+    return defaultSampleRateHz;
+  }
+
+  public static List<String> getBlackListedModelsForAecUsage() {
+    return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_AEC_MODELS);
+  }
+
+  public static List<String> getBlackListedModelsForNsUsage() {
+    return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_NS_MODELS);
+  }
+
+  // Helper method for building a string of thread information.
+  public static String getThreadInfo() {
+    return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+        + "]";
+  }
+
+  // Returns true if we're running on emulator.
+  public static boolean runningOnEmulator() {
+    return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
+  }
+
+  // Returns true if the device is blacklisted for OpenSL ES usage.
+  public static boolean deviceIsBlacklistedForOpenSLESUsage() {
+    List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
+    return blackListedModels.contains(Build.MODEL);
+  }
+
+  // Information about the current build, taken from system properties.
+  static void logDeviceInfo(String tag) {
+    Logging.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
+            + "Release: " + Build.VERSION.RELEASE + ", "
+            + "Brand: " + Build.BRAND + ", "
+            + "Device: " + Build.DEVICE + ", "
+            + "Id: " + Build.ID + ", "
+            + "Hardware: " + Build.HARDWARE + ", "
+            + "Manufacturer: " + Build.MANUFACTURER + ", "
+            + "Model: " + Build.MODEL + ", "
+            + "Product: " + Build.PRODUCT);
+  }
+
+  // Logs information about the current audio state. The idea is to call this
+  // method when errors are detected to log under what conditions the error
+  // occurred. Hopefully it will provide clues to what might be the root cause.
+  static void logAudioState(String tag) {
+    logDeviceInfo(tag);
+    final Context context = ContextUtils.getApplicationContext();
+    final AudioManager audioManager =
+        (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+    logAudioStateBasic(tag, audioManager);
+    logAudioStateVolume(tag, audioManager);
+    logAudioDeviceInfo(tag, audioManager);
+  }
+
+  // Reports basic audio statistics.
+  private static void logAudioStateBasic(String tag, AudioManager audioManager) {
+    Logging.d(tag, "Audio State: "
+            + "audio mode: " + modeToString(audioManager.getMode()) + ", "
+            + "has mic: " + hasMicrophone() + ", "
+            + "mic muted: " + audioManager.isMicrophoneMute() + ", "
+            + "music active: " + audioManager.isMusicActive() + ", "
+            + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
+            + "BT SCO: " + audioManager.isBluetoothScoOn());
+  }
+
+  private static boolean isVolumeFixed(AudioManager audioManager) {
+    if (Build.VERSION.SDK_INT < 21) {
+      return false;
+    }
+    return audioManager.isVolumeFixed();
+  }
+
+  // Adds volume information for all possible stream types.
+  private static void logAudioStateVolume(String tag, AudioManager audioManager) {
+    final int[] streams = {
+        AudioManager.STREAM_VOICE_CALL,
+        AudioManager.STREAM_MUSIC,
+        AudioManager.STREAM_RING,
+        AudioManager.STREAM_ALARM,
+        AudioManager.STREAM_NOTIFICATION,
+        AudioManager.STREAM_SYSTEM
+    };
+    Logging.d(tag, "Audio State: ");
+    // Some devices may not have volume controls and might use a fixed volume.
+    boolean fixedVolume = isVolumeFixed(audioManager);
+    Logging.d(tag, "  fixed volume=" + fixedVolume);
+    if (!fixedVolume) {
+      for (int stream : streams) {
+        StringBuilder info = new StringBuilder();
+        info.append("  " + streamTypeToString(stream) + ": ");
+        info.append("volume=").append(audioManager.getStreamVolume(stream));
+        info.append(", max=").append(audioManager.getStreamMaxVolume(stream));
+        logIsStreamMute(tag, audioManager, stream, info);
+        Logging.d(tag, info.toString());
+      }
+    }
+  }
+
+  private static void logIsStreamMute(
+      String tag, AudioManager audioManager, int stream, StringBuilder info) {
+    if (Build.VERSION.SDK_INT >= 23) {
+      info.append(", muted=").append(audioManager.isStreamMute(stream));
+    }
+  }
+
+  private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
+    if (Build.VERSION.SDK_INT < 23) {
+      return;
+    }
+    final AudioDeviceInfo[] devices =
+        audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+    if (devices.length == 0) {
+      return;
+    }
+    Logging.d(tag, "Audio Devices: ");
+    for (AudioDeviceInfo device : devices) {
+      StringBuilder info = new StringBuilder();
+      info.append("  ").append(deviceTypeToString(device.getType()));
+      info.append(device.isSource() ? "(in): " : "(out): ");
+      // An empty array indicates that the device supports arbitrary channel counts.
+      if (device.getChannelCounts().length > 0) {
+        info.append("channels=").append(Arrays.toString(device.getChannelCounts()));
+        info.append(", ");
+      }
+      if (device.getEncodings().length > 0) {
+        // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4.
+        info.append("encodings=").append(Arrays.toString(device.getEncodings()));
+        info.append(", ");
+      }
+      if (device.getSampleRates().length > 0) {
+        info.append("sample rates=").append(Arrays.toString(device.getSampleRates()));
+        info.append(", ");
+      }
+      info.append("id=").append(device.getId());
+      Logging.d(tag, info.toString());
+    }
+  }
+
+  // Converts media.AudioManager modes into local string representation.
+  static String modeToString(int mode) {
+    switch (mode) {
+      case MODE_IN_CALL:
+        return "MODE_IN_CALL";
+      case MODE_IN_COMMUNICATION:
+        return "MODE_IN_COMMUNICATION";
+      case MODE_NORMAL:
+        return "MODE_NORMAL";
+      case MODE_RINGTONE:
+        return "MODE_RINGTONE";
+      default:
+        return "MODE_INVALID";
+    }
+  }
+
+  private static String streamTypeToString(int stream) {
+    switch(stream) {
+      case AudioManager.STREAM_VOICE_CALL:
+        return "STREAM_VOICE_CALL";
+      case AudioManager.STREAM_MUSIC:
+        return "STREAM_MUSIC";
+      case AudioManager.STREAM_RING:
+        return "STREAM_RING";
+      case AudioManager.STREAM_ALARM:
+        return "STREAM_ALARM";
+      case AudioManager.STREAM_NOTIFICATION:
+        return "STREAM_NOTIFICATION";
+      case AudioManager.STREAM_SYSTEM:
+        return "STREAM_SYSTEM";
+      default:
+        return "STREAM_INVALID";
+    }
+  }
+
+  // Converts AudioDeviceInfo types to local string representation.
+  private static String deviceTypeToString(int type) {
+    switch (type) {
+      case AudioDeviceInfo.TYPE_UNKNOWN:
+        return "TYPE_UNKNOWN";
+      case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+        return "TYPE_BUILTIN_EARPIECE";
+      case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+        return "TYPE_BUILTIN_SPEAKER";
+      case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+        return "TYPE_WIRED_HEADSET";
+      case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+        return "TYPE_WIRED_HEADPHONES";
+      case AudioDeviceInfo.TYPE_LINE_ANALOG:
+        return "TYPE_LINE_ANALOG";
+      case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+        return "TYPE_LINE_DIGITAL";
+      case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+        return "TYPE_BLUETOOTH_SCO";
+      case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+        return "TYPE_BLUETOOTH_A2DP";
+      case AudioDeviceInfo.TYPE_HDMI:
+        return "TYPE_HDMI";
+      case AudioDeviceInfo.TYPE_HDMI_ARC:
+        return "TYPE_HDMI_ARC";
+      case AudioDeviceInfo.TYPE_USB_DEVICE:
+        return "TYPE_USB_DEVICE";
+      case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+        return "TYPE_USB_ACCESSORY";
+      case AudioDeviceInfo.TYPE_DOCK:
+        return "TYPE_DOCK";
+      case AudioDeviceInfo.TYPE_FM:
+        return "TYPE_FM";
+      case AudioDeviceInfo.TYPE_BUILTIN_MIC:
+        return "TYPE_BUILTIN_MIC";
+      case AudioDeviceInfo.TYPE_FM_TUNER:
+        return "TYPE_FM_TUNER";
+      case AudioDeviceInfo.TYPE_TV_TUNER:
+        return "TYPE_TV_TUNER";
+      case AudioDeviceInfo.TYPE_TELEPHONY:
+        return "TYPE_TELEPHONY";
+      case AudioDeviceInfo.TYPE_AUX_LINE:
+        return "TYPE_AUX_LINE";
+      case AudioDeviceInfo.TYPE_IP:
+        return "TYPE_IP";
+      case AudioDeviceInfo.TYPE_BUS:
+        return "TYPE_BUS";
+      case AudioDeviceInfo.TYPE_USB_HEADSET:
+        return "TYPE_USB_HEADSET";
+      default:
+        return "TYPE_UNKNOWN";
+    }
+  }
+
+  // Returns true if the device can record audio via a microphone.
+  private static boolean hasMicrophone() {
+    return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+        PackageManager.FEATURE_MICROPHONE);
+  }
+}

+ 46 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ContextUtils.java

@@ -0,0 +1,46 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import org.webrtc.Logging;
+
+/**
+ * Class for storing the application context and retrieving it in a static context. Similar to
+ * org.chromium.base.ContextUtils.
+ */
+public class ContextUtils {
+  private static final String TAG = "ContextUtils";
+  private static Context applicationContext;
+
+  /**
+   * Stores the application context that will be returned by getApplicationContext. This is called
+   * by PeerConnectionFactory.initialize. The application context must be set before creating
+   * a PeerConnectionFactory and must not be modified while it is alive.
+   */
+  public static void initialize(Context applicationContext) {
+    if (applicationContext == null) {
+      throw new IllegalArgumentException(
+          "Application context cannot be null for ContextUtils.initialize.");
+    }
+    ContextUtils.applicationContext = applicationContext;
+  }
+
+  /**
+   * Returns the stored application context.
+   *
+   * @deprecated crbug.com/webrtc/8937
+   */
+  @Deprecated
+  public static Context getApplicationContext() {
+    return applicationContext;
+  }
+}

+ 22 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Loggable.java

@@ -0,0 +1,22 @@
+/*
+ *  Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.Logging.Severity;
+
+/**
+ * Java interface for WebRTC logging. The default implementation uses webrtc.Logging.
+ *
+ * When injected, the Loggable will receive logging from both Java and native.
+ */
+public interface Loggable {
+  public void onLogMessage(String message, Severity severity, String tag);
+}

+ 199 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Logging.java

@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.EnumSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom
+ * logger implementing the Loggable interface can be injected along with a Severity. All subsequent
+ * log messages will then be redirected to the injected Loggable, except those with a severity lower
+ * than the specified severity, which will be discarded.
+ *
+ * It is also possible to switch to native logging (rtc::LogMessage) if one of the following static
+ * functions are called from the app:
+ * - Logging.enableLogThreads
+ * - Logging.enableLogTimeStamps
+ * - Logging.enableLogToDebugOutput
+ *
+ * The priority goes:
+ * 1. Injected loggable
+ * 2. Native logging
+ * 3. Fallback logging.
+ * Only one method will be used at a time.
+ *
+ * Injecting a Loggable or using any of the enable... methods requires that the native library is
+ * loaded, using PeerConnectionFactory.initialize.
+ */
+public class Logging {
+  private static final Logger fallbackLogger = createFallbackLogger();
+  private static volatile boolean loggingEnabled;
+    private static Loggable loggable;
+  private static Severity loggableSeverity;
+
+  private static Logger createFallbackLogger() {
+    final Logger fallbackLogger = Logger.getLogger("org.webrtc.Logging");
+    fallbackLogger.setLevel(Level.ALL);
+    return fallbackLogger;
+  }
+
+  static void injectLoggable(Loggable injectedLoggable, Severity severity) {
+    if (injectedLoggable != null) {
+      loggable = injectedLoggable;
+      loggableSeverity = severity;
+    }
+  }
+
+  static void deleteInjectedLoggable() {
+    loggable = null;
+  }
+
+  // TODO(solenberg): Remove once dependent projects updated.
+  @Deprecated
+  public enum TraceLevel {
+    TRACE_NONE(0x0000),
+    TRACE_STATEINFO(0x0001),
+    TRACE_WARNING(0x0002),
+    TRACE_ERROR(0x0004),
+    TRACE_CRITICAL(0x0008),
+    TRACE_APICALL(0x0010),
+    TRACE_DEFAULT(0x00ff),
+    TRACE_MODULECALL(0x0020),
+    TRACE_MEMORY(0x0100),
+    TRACE_TIMER(0x0200),
+    TRACE_STREAM(0x0400),
+    TRACE_DEBUG(0x0800),
+    TRACE_INFO(0x1000),
+    TRACE_TERSEINFO(0x2000),
+    TRACE_ALL(0xffff);
+
+    public final int level;
+    TraceLevel(int level) {
+      this.level = level;
+    }
+  }
+
+  // Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity.
+  public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
+
+  public static void enableLogThreads() {
+    nativeEnableLogThreads();
+  }
+
+  public static void enableLogTimeStamps() {
+    nativeEnableLogTimeStamps();
+  }
+
+  // TODO(solenberg): Remove once dependent projects updated.
+  @Deprecated
+  public static void enableTracing(String path, EnumSet<TraceLevel> levels) {}
+
+  // Enable diagnostic logging for messages of |severity| to the platform debug
+  // output. On Android, the output will be directed to Logcat.
+  // Note: this function starts collecting the output of the RTC_LOG() macros.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void enableLogToDebugOutput(Severity severity) {
+    if (loggable != null) {
+      throw new IllegalStateException(
+          "Logging to native debug output not supported while Loggable is injected. "
+          + "Delete the Loggable before calling this method.");
+    }
+    nativeEnableLogToDebugOutput(severity.ordinal());
+    loggingEnabled = true;
+  }
+
+  public static void log(Severity severity, String tag, String message) {
+    if (tag == null || message == null) {
+      throw new IllegalArgumentException("Logging tag or message may not be null.");
+    }
+    if (loggable != null) {
+      // Filter log messages below loggableSeverity.
+      if (severity.ordinal() < loggableSeverity.ordinal()) {
+        return;
+      }
+      loggable.onLogMessage(message, severity, tag);
+      return;
+    }
+
+    // Try native logging if no loggable is injected.
+    if (loggingEnabled) {
+      nativeLog(severity.ordinal(), tag, message);
+      return;
+    }
+
+    // Fallback to system log.
+    Level level;
+    switch (severity) {
+      case LS_ERROR:
+        level = Level.SEVERE;
+        break;
+      case LS_WARNING:
+        level = Level.WARNING;
+        break;
+      case LS_INFO:
+        level = Level.INFO;
+        break;
+      default:
+        level = Level.FINE;
+        break;
+    }
+    fallbackLogger.log(level, tag + ": " + message);
+  }
+
+  public static void d(String tag, String message) {
+    log(Severity.LS_INFO, tag, message);
+  }
+
+  public static void e(String tag, String message) {
+    log(Severity.LS_ERROR, tag, message);
+  }
+
+  public static void w(String tag, String message) {
+    log(Severity.LS_WARNING, tag, message);
+  }
+
+  public static void e(String tag, String message, Throwable e) {
+    log(Severity.LS_ERROR, tag, message);
+    log(Severity.LS_ERROR, tag, e.toString());
+    log(Severity.LS_ERROR, tag, getStackTraceString(e));
+  }
+
+  public static void w(String tag, String message, Throwable e) {
+    log(Severity.LS_WARNING, tag, message);
+    log(Severity.LS_WARNING, tag, e.toString());
+    log(Severity.LS_WARNING, tag, getStackTraceString(e));
+  }
+
+  public static void v(String tag, String message) {
+    log(Severity.LS_VERBOSE, tag, message);
+  }
+
+  private static String getStackTraceString(Throwable e) {
+    if (e == null) {
+      return "";
+    }
+
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter(sw);
+    e.printStackTrace(pw);
+    return sw.toString();
+  }
+
+  private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
+  private static native void nativeEnableLogThreads();
+  private static native void nativeEnableLogTimeStamps();
+  private static native void nativeLog(int severity, String tag, String message);
+}

+ 2 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/OWNERS

@@ -0,0 +1,2 @@
+magjed@webrtc.org
+sakal@webrtc.org

+ 45 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Size.java

@@ -0,0 +1,45 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Class for representing size of an object. Very similar to android.util.Size but available on all
+ * devices.
+ */
+public class Size {
+  public int width;
+  public int height;
+
+  public Size(int width, int height) {
+    this.width = width;
+    this.height = height;
+  }
+
+  @Override
+  public String toString() {
+    return width + "x" + height;
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    if (!(other instanceof Size)) {
+      return false;
+    }
+    final Size otherSize = (Size) other;
+    return width == otherSize.width && height == otherSize.height;
+  }
+
+  @Override
+  public int hashCode() {
+    // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+    return 1 + 65537 * width + height;
+  }
+}

+ 214 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ThreadUtils.java

@@ -0,0 +1,214 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.Looper;
+import android.os.SystemClock;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+public class ThreadUtils {
+    /**
+     * Utility class to be used for checking that a method is called on the correct thread.
+     */
+    public static class ThreadChecker {
+        private Thread thread = Thread.currentThread();
+
+        public void checkIsOnValidThread() {
+            if (thread == null) {
+                thread = Thread.currentThread();
+            }
+            if (Thread.currentThread() != thread) {
+                throw new IllegalStateException("Wrong thread");
+            }
+        }
+
+        public void detachThread() {
+            thread = null;
+        }
+    }
+
+    /**
+     * Throws exception if called from other than main thread.
+     */
+    public static void checkIsOnMainThread() {
+        if (Thread.currentThread() != Looper.getMainLooper().getThread()) {
+            throw new IllegalStateException("Not on main thread!");
+        }
+    }
+
+    /**
+     * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
+     * to complete without getting interrupted..
+     */
+    public interface BlockingOperation {
+        void run() throws InterruptedException;
+    }
+
+    /**
+     * Utility method to make sure a blocking operation is executed to completion without getting
+     * interrupted. This should be used in cases where the operation is waiting for some critical
+     * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during
+     * the blocking operation, this function will re-run the operation until completion, and only then
+     * re-interrupt the thread.
+     */
+    public static void executeUninterruptibly(BlockingOperation operation) {
+        boolean wasInterrupted = false;
+        while (true) {
+            try {
+                operation.run();
+                break;
+            } catch (InterruptedException e) {
+                // Someone is asking us to return early at our convenience. We can't cancel this operation,
+                // but we should preserve the information and pass it along.
+                wasInterrupted = true;
+            }
+        }
+        // Pass interruption information along.
+        if (wasInterrupted) {
+            Thread.currentThread().interrupt();
+        }
+    }
+
+    public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) {
+        final long startTimeMs = SystemClock.elapsedRealtime();
+        long timeRemainingMs = timeoutMs;
+        boolean wasInterrupted = false;
+        while (timeRemainingMs > 0) {
+            try {
+                thread.join(timeRemainingMs);
+                break;
+            } catch (InterruptedException e) {
+                // Someone is asking us to return early at our convenience. We can't cancel this operation,
+                // but we should preserve the information and pass it along.
+                wasInterrupted = true;
+                final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+                timeRemainingMs = timeoutMs - elapsedTimeMs;
+            }
+        }
+        // Pass interruption information along.
+        if (wasInterrupted) {
+            Thread.currentThread().interrupt();
+        }
+        return !thread.isAlive();
+    }
+
+    public static void joinUninterruptibly(final Thread thread) {
+        executeUninterruptibly(new BlockingOperation() {
+            @Override
+            public void run() throws InterruptedException {
+                thread.join();
+            }
+        });
+    }
+
+    public static void awaitUninterruptibly(final CountDownLatch latch) {
+        executeUninterruptibly(new BlockingOperation() {
+            @Override
+            public void run() throws InterruptedException {
+                latch.await();
+            }
+        });
+    }
+
+    public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) {
+        final long startTimeMs = SystemClock.elapsedRealtime();
+        long timeRemainingMs = timeoutMs;
+        boolean wasInterrupted = false;
+        boolean result = false;
+        do {
+            try {
+                result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS);
+                break;
+            } catch (InterruptedException e) {
+                // Someone is asking us to return early at our convenience. We can't cancel this operation,
+                // but we should preserve the information and pass it along.
+                wasInterrupted = true;
+                final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+                timeRemainingMs = timeoutMs - elapsedTimeMs;
+            }
+        } while (timeRemainingMs > 0);
+        // Pass interruption information along.
+        if (wasInterrupted) {
+            Thread.currentThread().interrupt();
+        }
+        return result;
+    }
+
+    /**
+     * Post |callable| to |handler| and wait for the result.
+     */
+    public static <V> V invokeAtFrontUninterruptibly(
+            final Handler handler, final Callable<V> callable) {
+        if (handler.getLooper().getThread() == Thread.currentThread()) {
+            try {
+                return callable.call();
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+        // Place-holder classes that are assignable inside nested class.
+        class CaughtException {
+            Exception e;
+        }
+        class Result {
+            public V value;
+        }
+        final Result result = new Result();
+        final CaughtException caughtException = new CaughtException();
+        final CountDownLatch barrier = new CountDownLatch(1);
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    result.value = callable.call();
+                } catch (Exception e) {
+                    caughtException.e = e;
+                }
+                barrier.countDown();
+            }
+        });
+        awaitUninterruptibly(barrier);
+        // Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add
+        // stack trace for the waiting thread as well.
+        if (caughtException.e != null) {
+            final RuntimeException runtimeException = new RuntimeException(caughtException.e);
+            runtimeException.setStackTrace(
+                    concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
+            throw runtimeException;
+        }
+        return result.value;
+    }
+
+    /**
+     * Post |runner| to |handler|, at the front, and wait for completion.
+     */
+    public static void invokeAtFrontUninterruptibly(final Handler handler, final Runnable runner) {
+        invokeAtFrontUninterruptibly(handler, new Callable<Void>() {
+            @Override
+            public Void call() {
+                runner.run();
+                return null;
+            }
+        });
+    }
+
+    static StackTraceElement[] concatStackTraces(
+            StackTraceElement[] inner, StackTraceElement[] outer) {
+        final StackTraceElement[] combined = new StackTraceElement[inner.length + outer.length];
+        System.arraycopy(inner, 0, combined, 0, inner.length);
+        System.arraycopy(outer, 0, combined, inner.length, outer.length);
+        return combined;
+    }
+}

+ 21 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java

@@ -0,0 +1,21 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}.
+ */
+public interface AudioDecoderFactoryFactory {
+  /**
+   * Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership.
+   */
+  long createNativeAudioDecoderFactory();
+}

+ 21 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java

@@ -0,0 +1,21 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}.
+ */
+public interface AudioEncoderFactoryFactory {
+  /**
+   * Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership.
+   */
+  long createNativeAudioEncoderFactory();
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioProcessingFactory.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::AudioProcessing instances. */
+public interface AudioProcessingFactory {
+  /**
+   * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
+   * The caller takes ownership of the object.
+   */
+  public long createNative();
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioSource.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface.  Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+  public AudioSource(long nativeSource) {
+    super(nativeSource);
+  }
+
+  /** Returns a pointer to webrtc::AudioSourceInterface. */
+  long getNativeAudioSource() {
+    return getNativeMediaSource();
+  }
+}

+ 32 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioTrack.java

@@ -0,0 +1,32 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+  public AudioTrack(long nativeTrack) {
+    super(nativeTrack);
+  }
+
+  /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
+   *  0 to 10.
+   */
+  public void setVolume(double volume) {
+    nativeSetVolume(getNativeAudioTrack(), volume);
+  }
+
+  /** Returns a pointer to webrtc::AudioTrackInterface. */
+  long getNativeAudioTrack() {
+    return getNativeMediaStreamTrack();
+  }
+
+  private static native void nativeSetVolume(long track, double volume);
+}

+ 23 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java

@@ -0,0 +1,23 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders.
+ */
+public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory {
+  @Override
+  public long createNativeAudioDecoderFactory() {
+    return nativeCreateBuiltinAudioDecoderFactory();
+  }
+
+  private static native long nativeCreateBuiltinAudioDecoderFactory();
+}

+ 23 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java

@@ -0,0 +1,23 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders.
+ */
+public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory {
+  @Override
+  public long createNativeAudioEncoderFactory() {
+    return nativeCreateBuiltinAudioEncoderFactory();
+  }
+
+  private static native long nativeCreateBuiltinAudioEncoderFactory();
+}

+ 41 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java

@@ -0,0 +1,41 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+  private long nativeSink;
+
+  public static byte[] getLogData(String dirPath) {
+    if (dirPath == null) {
+      throw new IllegalArgumentException("dirPath may not be null.");
+    }
+    return nativeGetLogData(dirPath);
+  }
+
+  public CallSessionFileRotatingLogSink(
+      String dirPath, int maxFileSize, Logging.Severity severity) {
+    if (dirPath == null) {
+      throw new IllegalArgumentException("dirPath may not be null.");
+    }
+    nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+  }
+
+  public void dispose() {
+    if (nativeSink != 0) {
+      nativeDeleteSink(nativeSink);
+      nativeSink = 0;
+    }
+  }
+
+  private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
+  private static native void nativeDeleteSink(long sink);
+  private static native byte[] nativeGetLogData(String dirPath);
+}

+ 35 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Capturer.java

@@ -0,0 +1,35 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+
+public class Camera1Capturer extends CameraCapturer {
+  private final boolean captureToTexture;
+
+  public Camera1Capturer(
+      String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
+    super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
+
+    this.captureToTexture = captureToTexture;
+  }
+
+  @Override
+  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+      CameraSession.Events events, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+      int framerate) {
+    Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
+        surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
+        framerate);
+  }
+}

+ 186 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Enumerator.java

@@ -0,0 +1,186 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class Camera1Enumerator implements CameraEnumerator {
+    private final static String TAG = "Camera1Enumerator";
+    // Each entry contains the supported formats for corresponding camera index. The formats for all
+    // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+    // reference.
+    private static List<List<CaptureFormat>> cachedSupportedFormats;
+
+    private final boolean captureToTexture;
+
+    public Camera1Enumerator() {
+        this(true /* captureToTexture */);
+    }
+
+    public Camera1Enumerator(boolean captureToTexture) {
+        this.captureToTexture = captureToTexture;
+    }
+
+    // Returns device names that can be used to create a new VideoCapturerAndroid.
+    @Override
+    public String[] getDeviceNames() {
+        ArrayList<String> namesList = new ArrayList<>();
+        for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+            String name = getDeviceName(i);
+            if (name != null) {
+                namesList.add(name);
+                Logging.d(TAG, "Index: " + i + ". " + name);
+            } else {
+                Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
+            }
+        }
+        String[] namesArray = new String[namesList.size()];
+        return namesList.toArray(namesArray);
+    }
+
+    @Override
+    public boolean isFrontFacing(String deviceName) {
+        android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+        return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
+    }
+
+    @Override
+    public boolean isBackFacing(String deviceName) {
+        android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+        return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
+    }
+
+    @Override
+    public List<CaptureFormat> getSupportedFormats(String deviceName) {
+        return getSupportedFormats(getCameraIndex(deviceName));
+    }
+
+    @Override
+    public CameraVideoCapturer createCapturer(
+            String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+        return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
+    }
+
+    private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+        android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+        try {
+            android.hardware.Camera.getCameraInfo(index, info);
+        } catch (Exception e) {
+            Logging.e(TAG, "getCameraInfo failed on index " + index, e);
+            return null;
+        }
+        return info;
+    }
+
+    static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+        if (cachedSupportedFormats == null) {
+            cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+            for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+                cachedSupportedFormats.add(enumerateFormats(i));
+            }
+        }
+        return cachedSupportedFormats.get(cameraId);
+    }
+
+    private static List<CaptureFormat> enumerateFormats(int cameraId) {
+        Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+        final long startTimeMs = SystemClock.elapsedRealtime();
+        final android.hardware.Camera.Parameters parameters;
+        android.hardware.Camera camera = null;
+        try {
+            Logging.d(TAG, "Opening camera with index " + cameraId);
+            camera = android.hardware.Camera.open(cameraId);
+            parameters = camera.getParameters();
+        } catch (RuntimeException e) {
+            Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+            return new ArrayList<CaptureFormat>();
+        } finally {
+            if (camera != null) {
+                camera.release();
+            }
+        }
+
+        final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+        try {
+            int minFps = 0;
+            int maxFps = 0;
+            final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+            if (listFpsRange != null) {
+                // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+                // corresponding to the highest fps.
+                final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+                minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+                maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+            }
+            for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+                formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+            }
+        } catch (Exception e) {
+            Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+        }
+
+        final long endTimeMs = SystemClock.elapsedRealtime();
+        Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+                + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+        return formatList;
+    }
+
+    // Convert from android.hardware.Camera.Size to Size.
+    static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
+        final List<Size> sizes = new ArrayList<Size>();
+        for (android.hardware.Camera.Size size : cameraSizes) {
+            sizes.add(new Size(size.width, size.height));
+        }
+        return sizes;
+    }
+
+    // Convert from int[2] to CaptureFormat.FramerateRange.
+    static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
+        final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+        for (int[] range : arrayRanges) {
+            ranges.add(new CaptureFormat.FramerateRange(
+                    range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+                    range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+        }
+        return ranges;
+    }
+
+    // Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
+    // if no such camera can be found.
+    static int getCameraIndex(String deviceName) {
+        Logging.d(TAG, "getCameraIndex: " + deviceName);
+        for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+            if (deviceName.equals(getDeviceName(i))) {
+                return i;
+            }
+        }
+        throw new IllegalArgumentException("No such camera: " + deviceName);
+    }
+
+    // Returns the name of the camera with camera index. Returns null if the
+    // camera can not be used.
+    static String getDeviceName(int index) {
+        android.hardware.Camera.CameraInfo info = getCameraInfo(index);
+        if (info == null) {
+            return null;
+        }
+
+        String facing =
+                (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+        return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
+    }
+}

+ 37 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Capturer.java

@@ -0,0 +1,37 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.hardware.camera2.CameraManager;
+
+@TargetApi(21)
+public class Camera2Capturer extends CameraCapturer {
+  private final Context context;
+    private final CameraManager cameraManager;
+
+  public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
+    super(cameraName, eventsHandler, new Camera2Enumerator(context));
+
+    this.context = context;
+    cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+  }
+
+  @Override
+  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+      CameraSession.Events events, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+      int framerate) {
+    Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
+        surfaceTextureHelper, cameraName, width, height, framerate);
+  }
+}

+ 245 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Enumerator.java

@@ -0,0 +1,245 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.AndroidException;
+import android.util.Range;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@TargetApi(21)
+public class Camera2Enumerator implements CameraEnumerator {
+  private final static String TAG = "Camera2Enumerator";
+  private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+  // Each entry contains the supported formats for a given camera index. The formats are enumerated
+  // lazily in getSupportedFormats(), and cached for future reference.
+  private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
+      new HashMap<String, List<CaptureFormat>>();
+
+  final Context context;
+    final CameraManager cameraManager;
+
+  public Camera2Enumerator(Context context) {
+    this.context = context;
+    this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+  }
+
+  @Override
+  public String[] getDeviceNames() {
+    try {
+      return cameraManager.getCameraIdList();
+      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+      // catch statement with an Exception from a newer API, even if the code is never executed.
+      // https://code.google.com/p/android/issues/detail?id=209129
+    } catch (/* CameraAccessException */ AndroidException e) {
+      Logging.e(TAG, "Camera access exception: " + e);
+      return new String[] {};
+    }
+  }
+
+  @Override
+  public boolean isFrontFacing(String deviceName) {
+    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+    return characteristics != null
+        && characteristics.get(CameraCharacteristics.LENS_FACING)
+        == CameraMetadata.LENS_FACING_FRONT;
+  }
+
+  @Override
+  public boolean isBackFacing(String deviceName) {
+    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+    return characteristics != null
+        && characteristics.get(CameraCharacteristics.LENS_FACING)
+        == CameraMetadata.LENS_FACING_BACK;
+  }
+
+  @Override
+  public List<CaptureFormat> getSupportedFormats(String deviceName) {
+    return getSupportedFormats(context, deviceName);
+  }
+
+  @Override
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+    return new Camera2Capturer(context, deviceName, eventsHandler);
+  }
+
+  private   CameraCharacteristics getCameraCharacteristics(String deviceName) {
+    try {
+      return cameraManager.getCameraCharacteristics(deviceName);
+      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+      // catch statement with an Exception from a newer API, even if the code is never executed.
+      // https://code.google.com/p/android/issues/detail?id=209129
+    } catch (/* CameraAccessException */ AndroidException e) {
+      Logging.e(TAG, "Camera access exception: " + e);
+      return null;
+    }
+  }
+
+  /**
+   * Checks if API is supported and all cameras have better than legacy support.
+   */
+  public static boolean isSupported(Context context) {
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+      return false;
+    }
+
+    CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+    try {
+      String[] cameraIds = cameraManager.getCameraIdList();
+      for (String id : cameraIds) {
+        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
+        if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
+            == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+          return false;
+        }
+      }
+    } catch (AndroidException e) {
+      Logging.e(TAG, "Camera access exception: " + e);
+      return false;
+    }
+    return true;
+  }
+
+  static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
+    if (fpsRanges.length == 0) {
+      return 1000;
+    }
+    return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
+  }
+
+  static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
+    final StreamConfigurationMap streamMap =
+        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+    final int supportLevel =
+        cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+
+    final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
+    final List<Size> sizes = convertSizes(nativeSizes);
+
+    // Video may be stretched pre LMR1 on legacy implementations.
+    // Filter out formats that have different aspect ratio than the sensor array.
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
+        && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+      final Rect activeArraySize =
+          cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+      final ArrayList<Size> filteredSizes = new ArrayList<Size>();
+
+      for (Size size : sizes) {
+        if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
+          filteredSizes.add(size);
+        }
+      }
+
+      return filteredSizes;
+    } else {
+      return sizes;
+    }
+  }
+
+  static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
+    return getSupportedFormats(
+        (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
+  }
+
+  static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
+    synchronized (cachedSupportedFormats) {
+      if (cachedSupportedFormats.containsKey(cameraId)) {
+        return cachedSupportedFormats.get(cameraId);
+      }
+
+      Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+      final long startTimeMs = SystemClock.elapsedRealtime();
+
+      final CameraCharacteristics cameraCharacteristics;
+      try {
+        cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+      } catch (Exception ex) {
+        Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+        return new ArrayList<CaptureFormat>();
+      }
+
+      final StreamConfigurationMap streamMap =
+          cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+      Range<Integer>[] fpsRanges =
+          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+      List<CaptureFormat.FramerateRange> framerateRanges =
+          convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
+      List<Size> sizes = getSupportedSizes(cameraCharacteristics);
+
+      int defaultMaxFps = 0;
+      for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
+        defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
+      }
+
+      final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+      for (Size size : sizes) {
+        long minFrameDurationNs = 0;
+        try {
+          minFrameDurationNs = streamMap.getOutputMinFrameDuration(
+              SurfaceTexture.class, new android.util.Size(size.width, size.height));
+        } catch (Exception e) {
+          // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+        }
+        final int maxFps = (minFrameDurationNs == 0)
+            ? defaultMaxFps
+            : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
+        formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
+        Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
+      }
+
+      cachedSupportedFormats.put(cameraId, formatList);
+      final long endTimeMs = SystemClock.elapsedRealtime();
+      Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+              + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+      return formatList;
+    }
+  }
+
+  // Convert from android.util.Size to Size.
+  private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
+    final List<Size> sizes = new ArrayList<Size>();
+    for (android.util.Size size : cameraSizes) {
+      sizes.add(new Size(size.getWidth(), size.getHeight()));
+    }
+    return sizes;
+  }
+
+  // Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
+  static List<CaptureFormat.FramerateRange> convertFramerates(
+      Range<Integer>[] arrayRanges, int unitFactor) {
+    final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+    for (Range<Integer> range : arrayRanges) {
+      ranges.add(new CaptureFormat.FramerateRange(
+          range.getLower() * unitFactor, range.getUpper() * unitFactor));
+    }
+    return ranges;
+  }
+}

+ 206 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java

@@ -0,0 +1,206 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+
+import android.graphics.ImageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+  private final static String TAG = "CameraEnumerationAndroid";
+
+  static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
+      // 0, Unknown resolution
+      new Size(160, 120), // 1, QQVGA
+      new Size(240, 160), // 2, HQVGA
+      new Size(320, 240), // 3, QVGA
+      new Size(400, 240), // 4, WQVGA
+      new Size(480, 320), // 5, HVGA
+      new Size(640, 360), // 6, nHD
+      new Size(640, 480), // 7, VGA
+      new Size(768, 480), // 8, WVGA
+      new Size(854, 480), // 9, FWVGA
+      new Size(800, 600), // 10, SVGA
+      new Size(960, 540), // 11, qHD
+      new Size(960, 640), // 12, DVGA
+      new Size(1024, 576), // 13, WSVGA
+      new Size(1024, 600), // 14, WVSGA
+      new Size(1280, 720), // 15, HD
+      new Size(1280, 1024), // 16, SXGA
+      new Size(1920, 1080), // 17, Full HD
+      new Size(1920, 1440), // 18, Full HD 4:3
+      new Size(2560, 1440), // 19, QHD
+      new Size(3840, 2160) // 20, UHD
+      ));
+
+  public static class CaptureFormat {
+    // Class to represent a framerate range. The framerate varies because of lightning conditions.
+    // The values are multiplied by 1000, so 1000 represents one frame per second.
+    public static class FramerateRange {
+      public int min;
+      public int max;
+
+      public FramerateRange(int min, int max) {
+        this.min = min;
+        this.max = max;
+      }
+
+      @Override
+      public String toString() {
+        return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+      }
+
+      @Override
+      public boolean equals(Object other) {
+        if (!(other instanceof FramerateRange)) {
+          return false;
+        }
+        final FramerateRange otherFramerate = (FramerateRange) other;
+        return min == otherFramerate.min && max == otherFramerate.max;
+      }
+
+      @Override
+      public int hashCode() {
+        // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+        return 1 + 65537 * min + max;
+      }
+    }
+
+    public final int width;
+    public final int height;
+    public final FramerateRange framerate;
+
+    // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
+    // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
+    // all imageFormats.
+    public final int imageFormat = ImageFormat.NV21;
+
+    public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
+      this.width = width;
+      this.height = height;
+      this.framerate = new FramerateRange(minFramerate, maxFramerate);
+    }
+
+    public CaptureFormat(int width, int height, FramerateRange framerate) {
+      this.width = width;
+      this.height = height;
+      this.framerate = framerate;
+    }
+
+    // Calculates the frame size of this capture format.
+    public int frameSize() {
+      return frameSize(width, height, imageFormat);
+    }
+
+    // Calculates the frame size of the specified image format. Currently only
+    // supporting ImageFormat.NV21.
+    // The size is width * height * number of bytes per pixel.
+    // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+    public static int frameSize(int width, int height, int imageFormat) {
+      if (imageFormat != ImageFormat.NV21) {
+        throw new UnsupportedOperationException("Don't know how to calculate "
+            + "the frame size of non-NV21 image formats.");
+      }
+      return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+    }
+
+    @Override
+    public String toString() {
+      return width + "x" + height + "@" + framerate;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof CaptureFormat)) {
+        return false;
+      }
+      final CaptureFormat otherFormat = (CaptureFormat) other;
+      return width == otherFormat.width && height == otherFormat.height
+          && framerate.equals(otherFormat.framerate);
+    }
+
+    @Override
+    public int hashCode() {
+      return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
+    }
+  }
+
+  // Helper class for finding the closest supported format for the two functions below. It creates a
+  // comparator based on the difference to some requested parameters, where the element with the
+  // minimum difference is the element that is closest to the requested parameters.
+  private static abstract class ClosestComparator<T> implements Comparator<T> {
+    // Difference between supported and requested parameter.
+    abstract int diff(T supportedParameter);
+
+    @Override
+    public int compare(T t1, T t2) {
+      return diff(t1) - diff(t2);
+    }
+  }
+
+  // Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
+  // lower bound, to allow the framerate to fluctuate based on lightning conditions.
+  public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+      List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
+    return Collections.min(
+        supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
+          // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
+          // from requested.
+          private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
+          private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
+          private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
+
+          // Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
+          private static final int MIN_FPS_THRESHOLD = 8000;
+          private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
+          private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
+
+          // Use one weight for small |value| less than |threshold|, and another weight above.
+          private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
+            return (value < threshold) ? value * lowWeight
+                                       : threshold * lowWeight + (value - threshold) * highWeight;
+          }
+
+          @Override
+          int diff(CaptureFormat.FramerateRange range) {
+            final int minFpsError = progressivePenalty(
+                range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+            final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
+                MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
+            return minFpsError + maxFpsError;
+          }
+        });
+  }
+
+  public static Size getClosestSupportedSize(
+      List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+    return Collections.min(supportedSizes, new ClosestComparator<Size>() {
+      @Override
+      int diff(Size size) {
+        return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+      }
+    });
+  }
+
+  // Helper method for camera classes.
+  static void reportCameraResolution(Histogram histogram, Size resolution) {
+    int index = COMMON_RESOLUTIONS.indexOf(resolution);
+    // 0 is reserved for unknown resolution, so add 1.
+    // indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
+    histogram.addSample(index + 1);
+  }
+}

+ 25 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerator.java

@@ -0,0 +1,25 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.List;
+
+public interface CameraEnumerator {
+  public String[] getDeviceNames();
+  public boolean isFrontFacing(String deviceName);
+  public boolean isBackFacing(String deviceName);
+  public List<CaptureFormat> getSupportedFormats(String deviceName);
+
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
+}

+ 167 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraVideoCapturer.java

@@ -0,0 +1,167 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaRecorder;
+
+
+/**
+ * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
+ * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
+ * class for detecting camera freezes.
+ */
+public interface CameraVideoCapturer extends VideoCapturer {
+  /**
+   * Camera events handler - can be used to be notifed about camera events. The callbacks are
+   * executed from an arbitrary thread.
+   */
+  public interface CameraEventsHandler {
+    // Camera error handler - invoked when camera can not be opened
+    // or any camera exception happens on camera thread.
+    void onCameraError(String errorDescription);
+
+    // Called when camera is disconnected.
+    void onCameraDisconnected();
+
+    // Invoked when camera stops receiving frames.
+    void onCameraFreezed(String errorDescription);
+
+    // Callback invoked when camera is opening.
+    void onCameraOpening(String cameraName);
+
+    // Callback invoked when first camera frame is available after camera is started.
+    void onFirstFrameAvailable();
+
+    // Callback invoked when camera is closed.
+    void onCameraClosed();
+  }
+
+  /**
+   * Camera switch handler - one of these functions are invoked with the result of switchCamera().
+   * The callback may be called on an arbitrary thread.
+   */
+  public interface CameraSwitchHandler {
+    // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
+    void onCameraSwitchDone(boolean isFrontCamera);
+
+    // Invoked on failure, e.g. camera is stopped or only one camera available.
+    void onCameraSwitchError(String errorDescription);
+  }
+
+  /**
+   * Switch camera to the next valid camera id. This can only be called while the camera is running.
+   * This function can be called from any thread.
+   */
+  void switchCamera(CameraSwitchHandler switchEventsHandler);
+
+  /**
+   * MediaRecorder add/remove handler - one of these functions are invoked with the result of
+   * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
+   * The callback may be called on an arbitrary thread.
+   */
+  @Deprecated
+  public interface MediaRecorderHandler {
+    // Invoked on success.
+    void onMediaRecorderSuccess();
+
+    // Invoked on failure, e.g. camera is stopped or any exception happens.
+    void onMediaRecorderError(String errorDescription);
+  }
+
+  /**
+   * Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
+   * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
+   * This function can be called from any thread.
+   */
+  @Deprecated
+  default void addMediaRecorderToCamera(
+      MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
+    throw new UnsupportedOperationException("Deprecated and not implemented.");
+  }
+
+  /**
+   * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
+   * This function can be called from any thread.
+   */
+  @Deprecated
+  default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
+    throw new UnsupportedOperationException("Deprecated and not implemented.");
+  }
+
+  /**
+   * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
+   * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
+   * thread.
+   */
+  public static class CameraStatistics {
+    private final static String TAG = "CameraStatistics";
+    private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+    private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
+
+    private final SurfaceTextureHelper surfaceTextureHelper;
+    private final CameraEventsHandler eventsHandler;
+    private int frameCount;
+    private int freezePeriodCount;
+    // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+    private final Runnable cameraObserver = new Runnable() {
+      @Override
+      public void run() {
+        final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
+        Logging.d(TAG, "Camera fps: " + cameraFps + ".");
+        if (frameCount == 0) {
+          ++freezePeriodCount;
+          if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
+              && eventsHandler != null) {
+            Logging.e(TAG, "Camera freezed.");
+            if (surfaceTextureHelper.isTextureInUse()) {
+              // This can only happen if we are capturing to textures.
+              eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+            } else {
+              eventsHandler.onCameraFreezed("Camera failure.");
+            }
+            return;
+          }
+        } else {
+          freezePeriodCount = 0;
+        }
+        frameCount = 0;
+        surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+      }
+    };
+
+    public CameraStatistics(
+        SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
+      if (surfaceTextureHelper == null) {
+        throw new IllegalArgumentException("SurfaceTextureHelper is null");
+      }
+      this.surfaceTextureHelper = surfaceTextureHelper;
+      this.eventsHandler = eventsHandler;
+      this.frameCount = 0;
+      this.freezePeriodCount = 0;
+      surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+    }
+
+    private void checkThread() {
+      if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
+        throw new IllegalStateException("Wrong thread");
+      }
+    }
+
+    public void addFrame() {
+      checkThread();
+      ++frameCount;
+    }
+
+    public void release() {
+      surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
+    }
+  }
+}

+ 27 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CapturerObserver.java

@@ -0,0 +1,27 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
+ * {@link VideoSource#getCapturerObserver}.
+ *
+ * All callbacks must be executed on a single thread.
+ */
+public interface CapturerObserver {
+  /** Notify if the capturer have been started successfully or not. */
+  void onCapturerStarted(boolean success);
+  /** Notify that the capturer has been stopped. */
+  void onCapturerStopped();
+
+  /** Delivers a captured frame. */
+  void onFrameCaptured(VideoFrame frame);
+}

+ 145 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CryptoOptions.java

@@ -0,0 +1,145 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * CryptoOptions defines advanced cryptographic settings for native WebRTC.
+ * These settings must be passed into RTCConfiguration. WebRTC is secur by
+ * default and you should not need to set any of these options unless you are
+ * specifically looking for an additional crypto feature such as AES_GCM
+ * support. This class is the Java binding of native api/crypto/cryptooptions.h
+ */
+public final class CryptoOptions {
+  /**
+   * SRTP Related Peer Connection Options.
+   */
+  public final class Srtp {
+    /**
+     * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+     * if both sides enable it
+     */
+    private final boolean enableGcmCryptoSuites;
+    /**
+     * If set to true, the (potentially insecure) crypto cipher
+     * SRTP_AES128_CM_SHA1_32 will be included in the list of supported ciphers
+     * during negotiation. It will only be used if both peers support it and no
+     * other ciphers get preferred.
+     */
+    private final boolean enableAes128Sha1_32CryptoCipher;
+    /**
+     * If set to true, encrypted RTP header extensions as defined in RFC 6904
+     * will be negotiated. They will only be used if both peers support them.
+     */
+    private final boolean enableEncryptedRtpHeaderExtensions;
+
+    private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+        boolean enableEncryptedRtpHeaderExtensions) {
+      this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+      this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+      this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+    }
+
+    @CalledByNative("Srtp")
+    public boolean getEnableGcmCryptoSuites() {
+      return enableGcmCryptoSuites;
+    }
+
+    @CalledByNative("Srtp")
+    public boolean getEnableAes128Sha1_32CryptoCipher() {
+      return enableAes128Sha1_32CryptoCipher;
+    }
+
+    @CalledByNative("Srtp")
+    public boolean getEnableEncryptedRtpHeaderExtensions() {
+      return enableEncryptedRtpHeaderExtensions;
+    }
+  }
+
+  /**
+   * Options to be used when the FrameEncryptor / FrameDecryptor APIs are used.
+   */
+  public final class SFrame {
+    /**
+     * If set all RtpSenders must have an FrameEncryptor attached to them before
+     * they are allowed to send packets. All RtpReceivers must have a
+     * FrameDecryptor attached to them before they are able to receive packets.
+     */
+    private final boolean requireFrameEncryption;
+
+    private SFrame(boolean requireFrameEncryption) {
+      this.requireFrameEncryption = requireFrameEncryption;
+    }
+
+    @CalledByNative("SFrame")
+    public boolean getRequireFrameEncryption() {
+      return requireFrameEncryption;
+    }
+  }
+
+  private final Srtp srtp;
+  private final SFrame sframe;
+
+  private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+      boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) {
+    this.srtp = new Srtp(
+        enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions);
+    this.sframe = new SFrame(requireFrameEncryption);
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  @CalledByNative
+  public Srtp getSrtp() {
+    return srtp;
+  }
+
+  @CalledByNative
+  public SFrame getSFrame() {
+    return sframe;
+  }
+
+  public static class Builder {
+    private boolean enableGcmCryptoSuites;
+    private boolean enableAes128Sha1_32CryptoCipher;
+    private boolean enableEncryptedRtpHeaderExtensions;
+    private boolean requireFrameEncryption;
+
+    private Builder() {}
+
+    public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) {
+      this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+      return this;
+    }
+
+    public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) {
+      this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+      return this;
+    }
+
+    public Builder setEnableEncryptedRtpHeaderExtensions(
+        boolean enableEncryptedRtpHeaderExtensions) {
+      this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+      return this;
+    }
+
+    public Builder setRequireFrameEncryption(boolean requireFrameEncryption) {
+      this.requireFrameEncryption = requireFrameEncryption;
+      return this;
+    }
+
+    public CryptoOptions createCryptoOptions() {
+      return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher,
+          enableEncryptedRtpHeaderExtensions, requireFrameEncryption);
+    }
+  }
+}

+ 195 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DataChannel.java

@@ -0,0 +1,195 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+  /** Java wrapper for WebIDL RTCDataChannel. */
+  public static class Init {
+    public boolean ordered = true;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int maxRetransmitTimeMs = -1;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int maxRetransmits = -1;
+    public String protocol = "";
+    public boolean negotiated;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int id = -1;
+
+    @CalledByNative("Init")
+    boolean getOrdered() {
+      return ordered;
+    }
+
+    @CalledByNative("Init")
+    int getMaxRetransmitTimeMs() {
+      return maxRetransmitTimeMs;
+    }
+
+    @CalledByNative("Init")
+    int getMaxRetransmits() {
+      return maxRetransmits;
+    }
+
+    @CalledByNative("Init")
+    String getProtocol() {
+      return protocol;
+    }
+
+    @CalledByNative("Init")
+    boolean getNegotiated() {
+      return negotiated;
+    }
+
+    @CalledByNative("Init")
+    int getId() {
+      return id;
+    }
+  }
+
+  /** Java version of C++ DataBuffer.  The atom of data in a DataChannel. */
+  public static class Buffer {
+    /** The underlying data. */
+    public final ByteBuffer data;
+
+    /**
+     * Indicates whether |data| contains UTF-8 text or "binary data"
+     * (i.e. anything else).
+     */
+    public final boolean binary;
+
+    @CalledByNative("Buffer")
+    public Buffer(ByteBuffer data, boolean binary) {
+      this.data = data;
+      this.binary = binary;
+    }
+  }
+
+  /** Java version of C++ DataChannelObserver. */
+  public interface Observer {
+    /** The data channel's bufferedAmount has changed. */
+    @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
+    /** The data channel state has changed. */
+    @CalledByNative("Observer") public void onStateChange();
+    /**
+     * A data buffer was successfully received.  NOTE: |buffer.data| will be
+     * freed once this function returns so callers who want to use the data
+     * asynchronously must make sure to copy it first.
+     */
+    @CalledByNative("Observer") public void onMessage(Buffer buffer);
+  }
+
+  /** Keep in sync with DataChannelInterface::DataState. */
+  public enum State {
+    CONNECTING,
+    OPEN,
+    CLOSING,
+    CLOSED;
+
+    @CalledByNative("State")
+    static State fromNativeIndex(int nativeIndex) {
+      return values()[nativeIndex];
+    }
+  }
+
+  private long nativeDataChannel;
+  private long nativeObserver;
+
+  @CalledByNative
+  public DataChannel(long nativeDataChannel) {
+    this.nativeDataChannel = nativeDataChannel;
+  }
+
+  /** Register |observer|, replacing any previously-registered observer. */
+  public void registerObserver(Observer observer) {
+    checkDataChannelExists();
+    if (nativeObserver != 0) {
+      nativeUnregisterObserver(nativeObserver);
+    }
+    nativeObserver = nativeRegisterObserver(observer);
+  }
+
+  /** Unregister the (only) observer. */
+  public void unregisterObserver() {
+    checkDataChannelExists();
+    nativeUnregisterObserver(nativeObserver);
+  }
+
+  public String label() {
+    checkDataChannelExists();
+    return nativeLabel();
+  }
+
+  public int id() {
+    checkDataChannelExists();
+    return nativeId();
+  }
+
+  public State state() {
+    checkDataChannelExists();
+    return nativeState();
+  }
+
+  /**
+   * Return the number of bytes of application data (UTF-8 text and binary data)
+   * that have been queued using SendBuffer but have not yet been transmitted
+   * to the network.
+   */
+  public long bufferedAmount() {
+    checkDataChannelExists();
+    return nativeBufferedAmount();
+  }
+
+  /** Close the channel. */
+  public void close() {
+    checkDataChannelExists();
+    nativeClose();
+  }
+
+  /** Send |data| to the remote peer; return success. */
+  public boolean send(Buffer buffer) {
+    checkDataChannelExists();
+    // TODO(fischman): this could be cleverer about avoiding copies if the
+    // ByteBuffer is direct and/or is backed by an array.
+    byte[] data = new byte[buffer.data.remaining()];
+    buffer.data.get(data);
+    return nativeSend(data, buffer.binary);
+  }
+
+  /** Dispose of native resources attached to this channel. */
+  public void dispose() {
+    checkDataChannelExists();
+    JniCommon.nativeReleaseRef(nativeDataChannel);
+    nativeDataChannel = 0;
+  }
+
+  @CalledByNative
+  long getNativeDataChannel() {
+    return nativeDataChannel;
+  }
+
+  private void checkDataChannelExists() {
+    if (nativeDataChannel == 0) {
+      throw new IllegalStateException("DataChannel has been disposed.");
+    }
+  }
+
+  private native long nativeRegisterObserver(Observer observer);
+  private native void nativeUnregisterObserver(long observer);
+  private native String nativeLabel();
+  private native int nativeId();
+  private native State nativeState();
+  private native long nativeBufferedAmount();
+  private native void nativeClose();
+  private native boolean nativeSend(byte[] data, boolean binary);
+};

+ 68 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java

@@ -0,0 +1,68 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW decoders.
+ */
+public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
+  private final VideoDecoderFactory hardwareVideoDecoderFactory;
+  private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
+  private final   VideoDecoderFactory platformSoftwareVideoDecoderFactory;
+
+  /**
+   * Create decoder factory using default hardware decoder factory.
+   */
+  public DefaultVideoDecoderFactory(  EglBase.Context eglContext) {
+    this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext);
+    this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext);
+  }
+
+  /**
+   * Create decoder factory using explicit hardware decoder factory.
+   */
+  DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) {
+    this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory;
+    this.platformSoftwareVideoDecoderFactory = null;
+  }
+
+  @Override
+  public   VideoDecoder createDecoder(VideoCodecInfo codecType) {
+    VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType);
+    final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType);
+    if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) {
+      softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType);
+    }
+    if (hardwareDecoder != null && softwareDecoder != null) {
+      // Both hardware and software supported, wrap it in a software fallback
+      return new VideoDecoderFallback(
+          /* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder);
+    }
+    return hardwareDecoder != null ? hardwareDecoder : softwareDecoder;
+  }
+
+  @Override
+  public VideoCodecInfo[] getSupportedCodecs() {
+    LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
+
+    supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs()));
+    supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs()));
+    if (platformSoftwareVideoDecoderFactory != null) {
+      supportedCodecInfos.addAll(
+          Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs()));
+    }
+
+    return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+  }
+}

+ 66 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java

@@ -0,0 +1,66 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+import android.util.Log;
+
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW encoders.
+ */
+public class DefaultVideoEncoderFactory implements VideoEncoderFactory {
+    private final VideoEncoderFactory hardwareVideoEncoderFactory;
+    private final VideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
+
+    /**
+     * Create encoder factory using default hardware encoder factory.
+     */
+    public DefaultVideoEncoderFactory(
+            EglBase.Context eglContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+        this.hardwareVideoEncoderFactory = new HardwareVideoEncoderFactory(eglContext, enableIntelVp8Encoder, enableH264HighProfile);
+    }
+
+    /**
+     * Create encoder factory using explicit hardware encoder factory.
+     */
+    DefaultVideoEncoderFactory(VideoEncoderFactory hardwareVideoEncoderFactory) {
+        this.hardwareVideoEncoderFactory = hardwareVideoEncoderFactory;
+    }
+
+
+    @Override
+    public VideoEncoder createEncoder(VideoCodecInfo info) {
+        final VideoEncoder softwareEncoder = softwareVideoEncoderFactory.createEncoder(info);
+        final VideoEncoder hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info);
+
+
+        if (hardwareEncoder != null && softwareEncoder != null) {
+
+            // Both hardware and software supported, wrap it in a software fallback
+            return new VideoEncoderFallback(
+                    /* fallback= */softwareEncoder, /* primary= */ hardwareEncoder);
+        }
+        return hardwareEncoder != null ? hardwareEncoder : softwareEncoder;
+    }
+
+    @Override
+    public VideoCodecInfo[] getSupportedCodecs() {
+        LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<>();
+
+        supportedCodecInfos.addAll(Arrays.asList(softwareVideoEncoderFactory.getSupportedCodecs()));
+        supportedCodecInfos.addAll(Arrays.asList(hardwareVideoEncoderFactory.getSupportedCodecs()));
+
+        return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+    }
+}

+ 96 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DtmfSender.java

@@ -0,0 +1,96 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ DtmfSenderInterface. */
+public class DtmfSender {
+  private long nativeDtmfSender;
+
+  public DtmfSender(long nativeDtmfSender) {
+    this.nativeDtmfSender = nativeDtmfSender;
+  }
+
+  /**
+   * @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
+   */
+  public boolean canInsertDtmf() {
+    checkDtmfSenderExists();
+    return nativeCanInsertDtmf(nativeDtmfSender);
+  }
+
+  /**
+   * Queues a task that sends the provided DTMF tones.
+   * <p>
+   * If insertDtmf is called on the same object while an existing task for this
+   * object to generate DTMF is still running, the previous task is canceled.
+   *
+   * @param tones        This parameter is treated as a series of characters. The characters 0
+   *                     through 9, A through D, #, and * generate the associated DTMF tones. The
+   *                     characters a to d are equivalent to A to D. The character ',' indicates a
+   *                     delay of 2 seconds before processing the next character in the tones
+   *                     parameter. Unrecognized characters are ignored.
+   * @param duration     Indicates the duration in ms to use for each character passed in the tones
+   *                     parameter. The duration cannot be more than 6000 or less than 70.
+   * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
+   *                     as short as possible.
+   * @return             true on success and false on failure.
+   */
+  public boolean insertDtmf(String tones, int duration, int interToneGap) {
+    checkDtmfSenderExists();
+    return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
+  }
+
+  /**
+   * @return The tones remaining to be played out
+   */
+  public String tones() {
+    checkDtmfSenderExists();
+    return nativeTones(nativeDtmfSender);
+  }
+
+  /**
+   * @return The current tone duration value in ms. This value will be the value last set via the
+   *         insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
+   */
+  public int duration() {
+    checkDtmfSenderExists();
+    return nativeDuration(nativeDtmfSender);
+  }
+
+  /**
+   * @return The current value of the between-tone gap in ms. This value will be the value last set
+   *         via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+   *         called.
+   */
+  public int interToneGap() {
+    checkDtmfSenderExists();
+    return nativeInterToneGap(nativeDtmfSender);
+  }
+
+  public void dispose() {
+    checkDtmfSenderExists();
+    JniCommon.nativeReleaseRef(nativeDtmfSender);
+    nativeDtmfSender = 0;
+  }
+
+  private void checkDtmfSenderExists() {
+    if (nativeDtmfSender == 0) {
+      throw new IllegalStateException("DtmfSender has been disposed.");
+    }
+  }
+
+  private static native boolean nativeCanInsertDtmf(long dtmfSender);
+  private static native boolean nativeInsertDtmf(
+      long dtmfSender, String tones, int duration, int interToneGap);
+  private static native String nativeTones(long dtmfSender);
+  private static native int nativeDuration(long dtmfSender);
+  private static native int nativeInterToneGap(long dtmfSender);
+};

+ 202 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase.java

@@ -0,0 +1,202 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+
+import android.view.Surface;
+import javax.microedition.khronos.egl.EGL10;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public interface EglBase {
+  // EGL wrapper for an actual EGLContext.
+  public interface Context {
+    public final static long NO_CONTEXT = 0;
+
+    /**
+     * Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
+     * unsupported.
+     *
+     * @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
+     */
+    long getNativeEglContext();
+  }
+
+  // According to the documentation, EGL can be used from multiple threads at the same time if each
+  // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
+  // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
+  // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+  public static final Object lock = new Object();
+
+  // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+  // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+  // This is similar to how GlSurfaceView does:
+  // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+  public static final int EGL_OPENGL_ES2_BIT = 4;
+  // Android-specific extension.
+  public static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+  // clang-format off
+  public static final int[] CONFIG_PLAIN = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_RGBA = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_PIXEL_BUFFER = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_RECORDABLE = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL_RECORDABLE_ANDROID, 1,
+    EGL10.EGL_NONE
+  };
+  // clang-format on
+
+  /**
+   * Create a new context with the specified config attributes, sharing data with |sharedContext|.
+   * If |sharedContext| is null, a root context is created. This function will try to create an EGL
+   * 1.4 context if possible, and an EGL 1.0 context otherwise.
+   */
+  public static EglBase create(  Context sharedContext, int[] configAttributes) {
+    if (sharedContext == null) {
+      return EglBase14Impl.isEGL14Supported() ? createEgl14(configAttributes)
+                                              : createEgl10(configAttributes);
+    } else if (sharedContext instanceof EglBase14.Context) {
+      return createEgl14((EglBase14.Context) sharedContext, configAttributes);
+    } else if (sharedContext instanceof EglBase10.Context) {
+      return createEgl10((EglBase10.Context) sharedContext, configAttributes);
+    }
+    throw new IllegalArgumentException("Unrecognized Context");
+  }
+
+  /**
+   * Helper function for creating a plain root context. This function will try to create an EGL 1.4
+   * context if possible, and an EGL 1.0 context otherwise.
+   */
+  public static EglBase create() {
+    return create(null /* shaderContext */, CONFIG_PLAIN);
+  }
+
+  /**
+   * Helper function for creating a plain context, sharing data with |sharedContext|. This function
+   * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
+   */
+  public static EglBase create(Context sharedContext) {
+    return create(sharedContext, CONFIG_PLAIN);
+  }
+
+  /** Explicitly create a root EGl 1.0 context with the specified config attributes. */
+  public static EglBase10 createEgl10(int[] configAttributes) {
+    return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.0 context with the specified config attributes and shared
+   * context.
+   */
+  public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
+    return new EglBase10Impl(
+        sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.0 context with the specified config attributes
+   * and shared context.
+   */
+  public static EglBase10 createEgl10(
+      javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
+    return new EglBase10Impl(sharedContext, configAttributes);
+  }
+
+  /** Explicitly create a root EGl 1.4 context with the specified config attributes. */
+  public static EglBase14 createEgl14(int[] configAttributes) {
+    return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.4 context with the specified config attributes and shared
+   * context.
+   */
+  public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
+    return new EglBase14Impl(
+        sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.4 context with the specified config attributes
+   * and shared context.
+   */
+  public static EglBase14 createEgl14(
+      android.opengl.EGLContext sharedContext, int[] configAttributes) {
+    return new EglBase14Impl(sharedContext, configAttributes);
+  }
+
+  void createSurface(Surface surface);
+
+  // Create EGLSurface from the Android SurfaceTexture.
+  void createSurface(SurfaceTexture surfaceTexture);
+
+  // Create dummy 1x1 pixel buffer surface so the context can be made current.
+  void createDummyPbufferSurface();
+
+  void createPbufferSurface(int width, int height);
+
+  Context getEglBaseContext();
+
+  boolean hasSurface();
+
+  int surfaceWidth();
+
+  int surfaceHeight();
+
+  void releaseSurface();
+
+  void release();
+
+  void makeCurrent();
+
+  // Detach the current EGL context, so that it can be made current on another thread.
+  void detachCurrent();
+
+  void swapBuffers();
+
+  void swapBuffers(long presentationTimeStampNs);
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase10.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/** EGL 1.0 implementation of EglBase. */
+public interface EglBase10 extends EglBase {
+  interface Context extends EglBase.Context {
+    EGLContext getRawContext();
+  }
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase14.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.EGLContext;
+
+/** EGL 1.4 implementation of EglBase. */
+public interface EglBase14 extends EglBase {
+  interface Context extends EglBase.Context {
+    EGLContext getRawContext();
+  }
+}

+ 753 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglRenderer.java

@@ -0,0 +1,753 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+
+import java.nio.ByteBuffer;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
+ * be used as a helper class for rendering on SurfaceViews and TextureViews.
+ */
+public class EglRenderer implements VideoSink {
+  private static final String TAG = "EglRenderer";
+  private static final long LOG_INTERVAL_SEC = 4;
+
+  public interface FrameListener { void onFrame(Bitmap frame); }
+
+  private static class FrameListenerAndParams {
+    public final FrameListener listener;
+    public final float scale;
+    public final RendererCommon.GlDrawer drawer;
+    public final boolean applyFpsReduction;
+
+    public FrameListenerAndParams(FrameListener listener, float scale,
+        RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
+      this.listener = listener;
+      this.scale = scale;
+      this.drawer = drawer;
+      this.applyFpsReduction = applyFpsReduction;
+    }
+  }
+
+  private class EglSurfaceCreation implements Runnable {
+    private Object surface;
+
+    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+    @SuppressWarnings("NoSynchronizedMethodCheck")
+    public synchronized void setSurface(Object surface) {
+      this.surface = surface;
+    }
+
+    @Override
+    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+    @SuppressWarnings("NoSynchronizedMethodCheck")
+    public synchronized void run() {
+      if (surface != null && eglBase != null && !eglBase.hasSurface()) {
+        if (surface instanceof Surface) {
+          eglBase.createSurface((Surface) surface);
+        } else if (surface instanceof SurfaceTexture) {
+          eglBase.createSurface((SurfaceTexture) surface);
+        } else {
+          throw new IllegalStateException("Invalid surface: " + surface);
+        }
+        eglBase.makeCurrent();
+        // Necessary for YUV frames with odd width.
+        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+      }
+    }
+  }
+
+  /**
+   * Handler that triggers a callback when an uncaught exception happens when handling a message.
+   */
+  private static class HandlerWithExceptionCallback extends Handler {
+    private final Runnable exceptionCallback;
+
+    public HandlerWithExceptionCallback(Looper looper, Runnable exceptionCallback) {
+      super(looper);
+      this.exceptionCallback = exceptionCallback;
+    }
+
+    @Override
+    public void dispatchMessage(Message msg) {
+      try {
+        super.dispatchMessage(msg);
+      } catch (Exception e) {
+        Logging.e(TAG, "Exception on EglRenderer thread", e);
+        exceptionCallback.run();
+        throw e;
+      }
+    }
+  }
+
+  protected final String name;
+
+  // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
+  // on |handlerLock|.
+  private final Object handlerLock = new Object();
+    private Handler renderThreadHandler;
+
+  private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
+
+  // Variables for fps reduction.
+  private final Object fpsReductionLock = new Object();
+  // Time for when next frame should be rendered.
+  private long nextFrameTimeNs;
+  // Minimum duration between frames when fps reduction is active, or -1 if video is completely
+  // paused.
+  private long minRenderPeriodNs;
+
+  // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
+  // from the render thread.
+    private EglBase eglBase;
+  private final VideoFrameDrawer frameDrawer;
+    private RendererCommon.GlDrawer drawer;
+  private boolean usePresentationTimeStamp;
+  private final Matrix drawMatrix = new Matrix();
+
+  // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
+  private final Object frameLock = new Object();
+    private VideoFrame pendingFrame;
+
+  // These variables are synchronized on |layoutLock|.
+  private final Object layoutLock = new Object();
+  private float layoutAspectRatio;
+  // If true, mirrors the video stream horizontally.
+  private boolean mirrorHorizontally;
+  // If true, mirrors the video stream vertically.
+  private boolean mirrorVertically;
+
+  // These variables are synchronized on |statisticsLock|.
+  private final Object statisticsLock = new Object();
+  // Total number of video frames received in renderFrame() call.
+  private int framesReceived;
+  // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+  // yet.
+  private int framesDropped;
+  // Number of rendered video frames.
+  private int framesRendered;
+  // Start time for counting these statistics, or 0 if we haven't started measuring yet.
+  private long statisticsStartTimeNs;
+  // Time in ns spent in renderFrameOnRenderThread() function.
+  private long renderTimeNs;
+  // Time in ns spent by the render thread in the swapBuffers() function.
+  private long renderSwapBufferTimeNs;
+
+  // Used for bitmap capturing.
+  private final GlTextureFrameBuffer bitmapTextureFramebuffer =
+      new GlTextureFrameBuffer(GLES20.GL_RGBA);
+
+  private final Runnable logStatisticsRunnable = new Runnable() {
+    @Override
+    public void run() {
+      logStatistics();
+      synchronized (handlerLock) {
+        if (renderThreadHandler != null) {
+          renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+          renderThreadHandler.postDelayed(
+              logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+        }
+      }
+    }
+  };
+
+  private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
+
+  /**
+   * Standard constructor. The name will be used for the render thread name and included when
+   * logging. In order to render something, you must first call init() and createEglSurface.
+   */
+  public EglRenderer(String name) {
+    this(name, new VideoFrameDrawer());
+  }
+
+  public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
+    this.name = name;
+    this.frameDrawer = videoFrameDrawer;
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+   * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
+   * set with the frame timestamps, which specifies desired presentation time and might be useful
+   * for e.g. syncing audio and video.
+   */
+  public void init(  final EglBase.Context sharedContext, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        throw new IllegalStateException(name + "Already initialized");
+      }
+      logD("Initializing EglRenderer");
+      this.drawer = drawer;
+      this.usePresentationTimeStamp = usePresentationTimeStamp;
+
+      final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
+      renderThread.start();
+      renderThreadHandler =
+          new HandlerWithExceptionCallback(renderThread.getLooper(), new Runnable() {
+            @Override
+            public void run() {
+              synchronized (handlerLock) {
+                renderThreadHandler = null;
+              }
+            }
+          });
+      // Create EGL context on the newly created render thread. It should be possibly to create the
+      // context on this thread and make it current on the render thread, but this causes failure on
+      // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
+      ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+        // If sharedContext is null, then texture frames are disabled. This is typically for old
+        // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
+        // caused trouble on some weird devices.
+        if (sharedContext == null) {
+          logD("EglBase10.create context");
+          eglBase = EglBase.createEgl10(configAttributes);
+        } else {
+          logD("EglBase.create shared context");
+          eglBase = EglBase.create(sharedContext, configAttributes);
+        }
+      });
+      renderThreadHandler.post(eglSurfaceCreationRunnable);
+      final long currentTimeNs = System.nanoTime();
+      resetStatistics(currentTimeNs);
+      renderThreadHandler.postDelayed(
+          logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+    }
+  }
+
+  /**
+   * Same as above with usePresentationTimeStamp set to false.
+   *
+   * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
+   */
+  public void init(  final EglBase.Context sharedContext, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer) {
+    init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
+  }
+
+  public void createEglSurface(Surface surface) {
+    createEglSurfaceInternal(surface);
+  }
+
+  public void createEglSurface(SurfaceTexture surfaceTexture) {
+    createEglSurfaceInternal(surfaceTexture);
+  }
+
+  private void createEglSurfaceInternal(Object surface) {
+    eglSurfaceCreationRunnable.setSurface(surface);
+    postToRenderThread(eglSurfaceCreationRunnable);
+  }
+
+  /**
+   * Block until any pending frame is returned and all GL resources released, even if an interrupt
+   * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+   * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+   * don't call this function, the GL resources might leak.
+   */
+  public void release() {
+    logD("Releasing.");
+    final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        logD("Already released");
+        return;
+      }
+      renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+      // Release EGL and GL resources on render thread.
+      renderThreadHandler.postAtFrontOfQueue(() -> {
+        // Detach current shader program.
+        GLES20.glUseProgram(/* program= */ 0);
+        if (drawer != null) {
+          drawer.release();
+          drawer = null;
+        }
+        frameDrawer.release();
+        bitmapTextureFramebuffer.release();
+        if (eglBase != null) {
+          logD("eglBase detach and release.");
+          eglBase.detachCurrent();
+          eglBase.release();
+          eglBase = null;
+        }
+        frameListeners.clear();
+        eglCleanupBarrier.countDown();
+      });
+      final Looper renderLooper = renderThreadHandler.getLooper();
+      // TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
+      renderThreadHandler.post(() -> {
+        logD("Quitting render thread.");
+        renderLooper.quit();
+      });
+      // Don't accept any more frames or messages to the render thread.
+      renderThreadHandler = null;
+    }
+    // Make sure the EGL/GL cleanup posted above is executed.
+    ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+    synchronized (frameLock) {
+      if (pendingFrame != null) {
+        pendingFrame.release();
+        pendingFrame = null;
+      }
+    }
+    logD("Releasing done.");
+  }
+
+  /**
+   * Reset the statistics logged in logStatistics().
+   */
+  private void resetStatistics(long currentTimeNs) {
+    synchronized (statisticsLock) {
+      statisticsStartTimeNs = currentTimeNs;
+      framesReceived = 0;
+      framesDropped = 0;
+      framesRendered = 0;
+      renderTimeNs = 0;
+      renderSwapBufferTimeNs = 0;
+    }
+  }
+
+  public void printStackTrace() {
+    synchronized (handlerLock) {
+      final Thread renderThread =
+          (renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
+      if (renderThread != null) {
+        final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
+        if (renderStackTrace.length > 0) {
+          logW("EglRenderer stack trace:");
+          for (StackTraceElement traceElem : renderStackTrace) {
+            logW(traceElem.toString());
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Set if the video stream should be mirrored horizontally or not.
+   */
+  public void setMirror(final boolean mirror) {
+    logD("setMirrorHorizontally: " + mirror);
+    synchronized (layoutLock) {
+      this.mirrorHorizontally = mirror;
+    }
+  }
+
+  /**
+   * Set if the video stream should be mirrored vertically or not.
+   */
+  public void setMirrorVertically(final boolean mirrorVertically) {
+    logD("setMirrorVertically: " + mirrorVertically);
+    synchronized (layoutLock) {
+      this.mirrorVertically = mirrorVertically;
+    }
+  }
+
+  /**
+   * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
+   * Set this to 0 to disable cropping.
+   */
+  public void setLayoutAspectRatio(float layoutAspectRatio) {
+    logD("setLayoutAspectRatio: " + layoutAspectRatio);
+    synchronized (layoutLock) {
+      this.layoutAspectRatio = layoutAspectRatio;
+    }
+  }
+
+  /**
+   * Limit render framerate.
+   *
+   * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+   *            reduction.
+   */
+  public void setFpsReduction(float fps) {
+    logD("setFpsReduction: " + fps);
+    synchronized (fpsReductionLock) {
+      final long previousRenderPeriodNs = minRenderPeriodNs;
+      if (fps <= 0) {
+        minRenderPeriodNs = Long.MAX_VALUE;
+      } else {
+        minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
+      }
+      if (minRenderPeriodNs != previousRenderPeriodNs) {
+        // Fps reduction changed - reset frame time.
+        nextFrameTimeNs = System.nanoTime();
+      }
+    }
+  }
+
+  public void disableFpsReduction() {
+    setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
+  }
+
+  public void pauseVideo() {
+    setFpsReduction(0 /* fps */);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received. This version uses
+   * the drawer of the EglRenderer that was passed in init.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   */
+  public void addFrameListener(final FrameListener listener, final float scale) {
+    addFrameListener(listener, scale, null, false /* applyFpsReduction */);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
+   */
+  public void addFrameListener(
+      final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
+    addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
+   * @param applyFpsReduction This callback will not be called for frames that have been dropped by
+   *                          FPS reduction.
+   */
+  public void addFrameListener(final FrameListener listener, final float scale,
+        final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
+    postToRenderThread(() -> {
+      final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
+      frameListeners.add(
+          new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
+    });
+  }
+
+  /**
+   * Remove any pending callback that was added with addFrameListener. If the callback is not in
+   * the queue, nothing happens. It is ensured that callback won't be called after this method
+   * returns.
+   *
+   * @param runnable The callback to remove.
+   */
+  public void removeFrameListener(final FrameListener listener) {
+    final CountDownLatch latch = new CountDownLatch(1);
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        return;
+      }
+      if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
+        throw new RuntimeException("removeFrameListener must not be called on the render thread.");
+      }
+      postToRenderThread(() -> {
+        latch.countDown();
+        final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
+        while (iter.hasNext()) {
+          if (iter.next().listener == listener) {
+            iter.remove();
+          }
+        }
+      });
+    }
+    ThreadUtils.awaitUninterruptibly(latch);
+  }
+
+  // VideoSink interface.
+  @Override
+  public void onFrame(VideoFrame frame) {
+    synchronized (statisticsLock) {
+      ++framesReceived;
+    }
+    final boolean dropOldFrame;
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        logD("Dropping frame - Not initialized or already released.");
+        return;
+      }
+      synchronized (frameLock) {
+        dropOldFrame = (pendingFrame != null);
+        if (dropOldFrame) {
+          pendingFrame.release();
+        }
+        pendingFrame = frame;
+        pendingFrame.retain();
+        renderThreadHandler.post(this ::renderFrameOnRenderThread);
+      }
+    }
+    if (dropOldFrame) {
+      synchronized (statisticsLock) {
+        ++framesDropped;
+      }
+    }
+  }
+
+  /**
+   * Release EGL surface. This function will block until the EGL surface is released.
+   */
+  public void releaseEglSurface(final Runnable completionCallback) {
+    // Ensure that the render thread is no longer touching the Surface before returning from this
+    // function.
+    eglSurfaceCreationRunnable.setSurface(null /* surface */);
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
+        renderThreadHandler.postAtFrontOfQueue(() -> {
+          if (eglBase != null) {
+            eglBase.detachCurrent();
+            eglBase.releaseSurface();
+          }
+          completionCallback.run();
+        });
+        return;
+      }
+    }
+    completionCallback.run();
+  }
+
+  /**
+   * Private helper function to post tasks safely.
+   */
+  private void postToRenderThread(Runnable runnable) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        renderThreadHandler.post(runnable);
+      }
+    }
+  }
+
+  private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
+    if (eglBase != null && eglBase.hasSurface()) {
+      logD("clearSurface");
+      GLES20.glClearColor(r, g, b, a);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      eglBase.swapBuffers();
+    }
+  }
+
+  /**
+   * Post a task to clear the surface to a transparent uniform color.
+   */
+  public void clearImage() {
+    clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+  }
+
+  /**
+   * Post a task to clear the surface to a specific color.
+   */
+  public void clearImage(final float r, final float g, final float b, final float a) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        return;
+      }
+      renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
+    }
+  }
+
+  /**
+   * Renders and releases |pendingFrame|.
+   */
+  private void renderFrameOnRenderThread() {
+    // Fetch and render |pendingFrame|.
+    final VideoFrame frame;
+    synchronized (frameLock) {
+      if (pendingFrame == null) {
+        return;
+      }
+      frame = pendingFrame;
+      pendingFrame = null;
+    }
+    if (eglBase == null || !eglBase.hasSurface()) {
+      logD("Dropping frame - No surface");
+      frame.release();
+      return;
+    }
+    // Check if fps reduction is active.
+    final boolean shouldRenderFrame;
+    synchronized (fpsReductionLock) {
+      if (minRenderPeriodNs == Long.MAX_VALUE) {
+        // Rendering is paused.
+        shouldRenderFrame = false;
+      } else if (minRenderPeriodNs <= 0) {
+        // FPS reduction is disabled.
+        shouldRenderFrame = true;
+      } else {
+        final long currentTimeNs = System.nanoTime();
+        if (currentTimeNs < nextFrameTimeNs) {
+          logD("Skipping frame rendering - fps reduction is active.");
+          shouldRenderFrame = false;
+        } else {
+          nextFrameTimeNs += minRenderPeriodNs;
+          // The time for the next frame should always be in the future.
+          nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
+          shouldRenderFrame = true;
+        }
+      }
+    }
+
+    final long startTimeNs = System.nanoTime();
+
+    final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
+    final float drawnAspectRatio;
+    synchronized (layoutLock) {
+      drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
+    }
+
+    final float scaleX;
+    final float scaleY;
+
+    if (frameAspectRatio > drawnAspectRatio) {
+      scaleX = drawnAspectRatio / frameAspectRatio;
+      scaleY = 1f;
+    } else {
+      scaleX = 1f;
+      scaleY = frameAspectRatio / drawnAspectRatio;
+    }
+
+    drawMatrix.reset();
+    drawMatrix.preTranslate(0.5f, 0.5f);
+    drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+    drawMatrix.preScale(scaleX, scaleY);
+    drawMatrix.preTranslate(-0.5f, -0.5f);
+
+    if (shouldRenderFrame) {
+      GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
+          eglBase.surfaceWidth(), eglBase.surfaceHeight());
+
+      final long swapBuffersStartTimeNs = System.nanoTime();
+      if (usePresentationTimeStamp) {
+        eglBase.swapBuffers(frame.getTimestampNs());
+      } else {
+        eglBase.swapBuffers();
+      }
+
+      final long currentTimeNs = System.nanoTime();
+      synchronized (statisticsLock) {
+        ++framesRendered;
+        renderTimeNs += (currentTimeNs - startTimeNs);
+        renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
+      }
+    }
+
+    notifyCallbacks(frame, shouldRenderFrame);
+    frame.release();
+  }
+
+  private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
+    if (frameListeners.isEmpty())
+      return;
+
+    drawMatrix.reset();
+    drawMatrix.preTranslate(0.5f, 0.5f);
+    drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+    drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
+    drawMatrix.preTranslate(-0.5f, -0.5f);
+
+    Iterator<FrameListenerAndParams> it = frameListeners.iterator();
+    while (it.hasNext()) {
+      FrameListenerAndParams listenerAndParams = it.next();
+      if (!wasRendered && listenerAndParams.applyFpsReduction) {
+        continue;
+      }
+      it.remove();
+
+      final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
+      final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
+
+      if (scaledWidth == 0 || scaledHeight == 0) {
+        listenerAndParams.listener.onFrame(null);
+        continue;
+      }
+
+      bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
+
+      GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
+      GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+          GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
+
+      GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
+          0 /* viewportY */, scaledWidth, scaledHeight);
+
+      final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
+      GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
+      GLES20.glReadPixels(
+          0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
+
+      GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+      GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
+
+      final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
+      bitmap.copyPixelsFromBuffer(bitmapBuffer);
+      listenerAndParams.listener.onFrame(bitmap);
+    }
+  }
+
+  private String averageTimeAsString(long sumTimeNs, int count) {
+    return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us";
+  }
+
+  private void logStatistics() {
+    final DecimalFormat fpsFormat = new DecimalFormat("#.0");
+    final long currentTimeNs = System.nanoTime();
+    synchronized (statisticsLock) {
+      final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
+      if (elapsedTimeNs <= 0) {
+        return;
+      }
+      final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
+      logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+          + " Frames received: " + framesReceived + "."
+          + " Dropped: " + framesDropped + "."
+          + " Rendered: " + framesRendered + "."
+          + " Render fps: " + fpsFormat.format(renderFps) + "."
+          + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+          + " Average swapBuffer time: "
+          + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
+      resetStatistics(currentTimeNs);
+    }
+  }
+
+  private void logD(String string) {
+    Logging.d(TAG, name + string);
+  }
+
+  private void logW(String string) {
+    Logging.w(TAG, name + string);
+  }
+}

+ 139 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EncodedImage.java

@@ -0,0 +1,139 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * An encoded frame from a video stream. Used as an input for decoders and as an output for
+ * encoders.
+ */
+public class EncodedImage {
+  // Must be kept in sync with common_types.h FrameType.
+  public enum FrameType {
+    EmptyFrame(0),
+    VideoFrameKey(3),
+    VideoFrameDelta(4);
+
+    private final int nativeIndex;
+
+    private FrameType(int nativeIndex) {
+      this.nativeIndex = nativeIndex;
+    }
+
+    public int getNative() {
+      return nativeIndex;
+    }
+
+    @CalledByNative("FrameType")
+    static FrameType fromNativeIndex(int nativeIndex) {
+      for (FrameType type : FrameType.values()) {
+        if (type.getNative() == nativeIndex) {
+          return type;
+        }
+      }
+      throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
+    }
+  }
+
+  public final ByteBuffer buffer;
+  public final int encodedWidth;
+  public final int encodedHeight;
+  public final long captureTimeMs; // Deprecated
+  public final long captureTimeNs;
+  public final FrameType frameType;
+  public final int rotation;
+  public final boolean completeFrame;
+  public final Integer qp;
+
+  @CalledByNative
+  private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
+      FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
+    this.buffer = buffer;
+    this.encodedWidth = encodedWidth;
+    this.encodedHeight = encodedHeight;
+    this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
+    this.captureTimeNs = captureTimeNs;
+    this.frameType = frameType;
+    this.rotation = rotation;
+    this.completeFrame = completeFrame;
+    this.qp = qp;
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class Builder {
+    private ByteBuffer buffer;
+    private int encodedWidth;
+    private int encodedHeight;
+    private long captureTimeNs;
+    private EncodedImage.FrameType frameType;
+    private int rotation;
+    private boolean completeFrame;
+    private Integer qp;
+
+    private Builder() {}
+
+    public Builder setBuffer(ByteBuffer buffer) {
+      this.buffer = buffer;
+      return this;
+    }
+
+    public Builder setEncodedWidth(int encodedWidth) {
+      this.encodedWidth = encodedWidth;
+      return this;
+    }
+
+    public Builder setEncodedHeight(int encodedHeight) {
+      this.encodedHeight = encodedHeight;
+      return this;
+    }
+
+    @Deprecated
+    public Builder setCaptureTimeMs(long captureTimeMs) {
+      this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
+      return this;
+    }
+
+    public Builder setCaptureTimeNs(long captureTimeNs) {
+      this.captureTimeNs = captureTimeNs;
+      return this;
+    }
+
+    public Builder setFrameType(EncodedImage.FrameType frameType) {
+      this.frameType = frameType;
+      return this;
+    }
+
+    public Builder setRotation(int rotation) {
+      this.rotation = rotation;
+      return this;
+    }
+
+    public Builder setCompleteFrame(boolean completeFrame) {
+      this.completeFrame = completeFrame;
+      return this;
+    }
+
+    public Builder setQp(Integer qp) {
+      this.qp = qp;
+      return this;
+    }
+
+    public EncodedImage createEncodedImage() {
+      return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
+          rotation, completeFrame, qp);
+    }
+  }
+}

+ 22 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java

@@ -0,0 +1,22 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Factory for creating webrtc::FecControllerFactory instances.
+ */
+public interface FecControllerFactoryFactoryInterface {
+  /**
+   * Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it.
+   * The caller takes ownership of the object.
+   */
+  public long createNative();
+}

+ 201 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FileVideoCapturer.java

@@ -0,0 +1,201 @@
+/*
+ *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.Charset;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.TimeUnit;
+
+public class FileVideoCapturer implements VideoCapturer {
+  private interface VideoReader {
+    VideoFrame getNextFrame();
+    void close();
+  }
+
+  /**
+   * Read video data from file for the .y4m container.
+   */
+  @SuppressWarnings("StringSplitter")
+  private static class VideoReaderY4M implements VideoReader {
+    private static final String TAG = "VideoReaderY4M";
+    private static final String Y4M_FRAME_DELIMETER = "FRAME";
+    private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;
+
+    private final int frameWidth;
+    private final int frameHeight;
+    // First char after header
+    private final long videoStart;
+    private final RandomAccessFile mediaFile;
+    private final FileChannel mediaFileChannel;
+
+    public VideoReaderY4M(String file) throws IOException {
+      mediaFile = new RandomAccessFile(file, "r");
+      mediaFileChannel = mediaFile.getChannel();
+      StringBuilder builder = new StringBuilder();
+      for (;;) {
+        int c = mediaFile.read();
+        if (c == -1) {
+          // End of file reached.
+          throw new RuntimeException("Found end of file before end of header for file: " + file);
+        }
+        if (c == '\n') {
+          // End of header found.
+          break;
+        }
+        builder.append((char) c);
+      }
+      videoStart = mediaFileChannel.position();
+      String header = builder.toString();
+      String[] headerTokens = header.split("[ ]");
+      int w = 0;
+      int h = 0;
+      String colorSpace = "";
+      for (String tok : headerTokens) {
+        char c = tok.charAt(0);
+        switch (c) {
+          case 'W':
+            w = Integer.parseInt(tok.substring(1));
+            break;
+          case 'H':
+            h = Integer.parseInt(tok.substring(1));
+            break;
+          case 'C':
+            colorSpace = tok.substring(1);
+            break;
+        }
+      }
+      Logging.d(TAG, "Color space: " + colorSpace);
+      if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
+        throw new IllegalArgumentException(
+            "Does not support any other color space than I420 or I420mpeg2");
+      }
+      if ((w % 2) == 1 || (h % 2) == 1) {
+        throw new IllegalArgumentException("Does not support odd width or height");
+      }
+      frameWidth = w;
+      frameHeight = h;
+      Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
+    }
+
+    @Override
+    public VideoFrame getNextFrame() {
+      final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+      final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
+      final ByteBuffer dataY = buffer.getDataY();
+      final ByteBuffer dataU = buffer.getDataU();
+      final ByteBuffer dataV = buffer.getDataV();
+      final int chromaHeight = (frameHeight + 1) / 2;
+      final int sizeY = frameHeight * buffer.getStrideY();
+      final int sizeU = chromaHeight * buffer.getStrideU();
+      final int sizeV = chromaHeight * buffer.getStrideV();
+
+      try {
+        ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
+        if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+          // We reach end of file, loop
+          mediaFileChannel.position(videoStart);
+          if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+            throw new RuntimeException("Error looping video");
+          }
+        }
+        String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
+        if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
+          throw new RuntimeException(
+              "Frames should be delimited by FRAME plus newline, found delimter was: '"
+              + frameDelimStr + "'");
+        }
+
+        mediaFileChannel.read(dataY);
+        mediaFileChannel.read(dataU);
+        mediaFileChannel.read(dataV);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+
+      return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
+    }
+
+    @Override
+    public void close() {
+      try {
+        // Closing a file also closes the channel.
+        mediaFile.close();
+      } catch (IOException e) {
+        Logging.e(TAG, "Problem closing file", e);
+      }
+    }
+  }
+
+  private final static String TAG = "FileVideoCapturer";
+  private final VideoReader videoReader;
+  private CapturerObserver capturerObserver;
+  private final Timer timer = new Timer();
+
+  private final TimerTask tickTask = new TimerTask() {
+    @Override
+    public void run() {
+      tick();
+    }
+  };
+
+  public FileVideoCapturer(String inputFile) throws IOException {
+    try {
+      videoReader = new VideoReaderY4M(inputFile);
+    } catch (IOException e) {
+      Logging.d(TAG, "Could not open video file: " + inputFile);
+      throw e;
+    }
+  }
+
+  public void tick() {
+    VideoFrame videoFrame = videoReader.getNextFrame();
+    capturerObserver.onFrameCaptured(videoFrame);
+    videoFrame.release();
+  }
+
+  @Override
+  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+      CapturerObserver capturerObserver) {
+    this.capturerObserver = capturerObserver;
+  }
+
+  @Override
+  public void startCapture(int width, int height, int framerate) {
+    timer.schedule(tickTask, 0, 1000 / framerate);
+  }
+
+  @Override
+  public void stopCapture() throws InterruptedException {
+    timer.cancel();
+  }
+
+  @Override
+  public void changeCaptureFormat(int width, int height, int framerate) {
+    // Empty on purpose
+  }
+
+  @Override
+  public void dispose() {
+    videoReader.close();
+  }
+
+  @Override
+  public boolean isScreencast() {
+    return false;
+  }
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameDecryptor.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameDecryptor interface allows Java API users to provide a
+ * pointer to their native implementation of the FrameDecryptorInterface.
+ * FrameDecryptors are extremely performance sensitive as they must process all
+ * incoming video and audio frames. Due to this reason they should always be
+ * backed by a native implementation
+ * @note Not ready for production use.
+ */
+public interface FrameDecryptor {
+  /**
+   * @return A FrameDecryptorInterface pointer.
+   */
+  long getNativeFrameDecryptor();
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameEncryptor.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameEncryptor interface allows Java API users to provide a pointer to
+ * their native implementation of the FrameEncryptorInterface.
+ * FrameEncyptors are extremely performance sensitive as they must process all
+ * outgoing video and audio frames. Due to this reason they should always be
+ * backed by a native implementation.
+ * @note Not ready for production use.
+ */
+public interface FrameEncryptor {
+  /**
+   * @return A FrameEncryptorInterface pointer.
+   */
+  long getNativeFrameEncryptor();
+}

+ 31 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlRectDrawer.java

@@ -0,0 +1,31 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Simplest possible GL shader that just draws frames as opaque quads. */
+public class GlRectDrawer extends GlGenericDrawer {
+  private static final String FRAGMENT_SHADER = "void main() {\n"
+      + "  gl_FragColor = sample(tc);\n"
+      + "}\n";
+
+  private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
+    @Override
+    public void onNewShader(GlShader shader) {}
+
+    @Override
+    public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
+        int viewportWidth, int viewportHeight) {}
+  }
+
+  public GlRectDrawer() {
+    super(FRAGMENT_SHADER, new ShaderCallbacks());
+  }
+}

+ 129 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlShader.java

@@ -0,0 +1,129 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+  private static final String TAG = "GlShader";
+
+  private static int compileShader(int shaderType, String source) {
+    final int shader = GLES20.glCreateShader(shaderType);
+    if (shader == 0) {
+      throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
+    }
+    GLES20.glShaderSource(shader, source);
+    GLES20.glCompileShader(shader);
+    int[] compileStatus = new int[] {GLES20.GL_FALSE};
+    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+    if (compileStatus[0] != GLES20.GL_TRUE) {
+      Logging.e(
+          TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
+      throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+    }
+    GlUtil.checkNoGLES2Error("compileShader");
+    return shader;
+  }
+
+  private int program;
+
+  public GlShader(String vertexSource, String fragmentSource) {
+    final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+    final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+    program = GLES20.glCreateProgram();
+    if (program == 0) {
+      throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
+    }
+    GLES20.glAttachShader(program, vertexShader);
+    GLES20.glAttachShader(program, fragmentShader);
+    GLES20.glLinkProgram(program);
+    int[] linkStatus = new int[] {GLES20.GL_FALSE};
+    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+    if (linkStatus[0] != GLES20.GL_TRUE) {
+      Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
+      throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+    }
+    // According to the documentation of glLinkProgram():
+    // "After the link operation, applications are free to modify attached shader objects, compile
+    // attached shader objects, detach shader objects, delete shader objects, and attach additional
+    // shader objects. None of these operations affects the information log or the program that is
+    // part of the program object."
+    // But in practice, detaching shaders from the program seems to break some devices. Deleting the
+    // shaders are fine however - it will delete them when they are no longer attached to a program.
+    GLES20.glDeleteShader(vertexShader);
+    GLES20.glDeleteShader(fragmentShader);
+    GlUtil.checkNoGLES2Error("Creating GlShader");
+  }
+
+  public int getAttribLocation(String label) {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    int location = GLES20.glGetAttribLocation(program, label);
+    if (location < 0) {
+      throw new RuntimeException("Could not locate '" + label + "' in program");
+    }
+    return location;
+  }
+
+  /**
+   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
+   * |buffer| with |dimension| number of components per vertex.
+   */
+  public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+    setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
+  }
+
+  /**
+   * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
+   * |buffer| with |dimension| number of components per vertex and specified |stride|.
+   */
+  public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    int location = getAttribLocation(label);
+    GLES20.glEnableVertexAttribArray(location);
+    GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
+    GlUtil.checkNoGLES2Error("setVertexAttribArray");
+  }
+
+  public int getUniformLocation(String label) {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    int location = GLES20.glGetUniformLocation(program, label);
+    if (location < 0) {
+      throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+    }
+    return location;
+  }
+
+  public void useProgram() {
+    if (program == -1) {
+      throw new RuntimeException("The program has been released");
+    }
+    GLES20.glUseProgram(program);
+    GlUtil.checkNoGLES2Error("glUseProgram");
+  }
+
+  public void release() {
+    Logging.d(TAG, "Deleting shader.");
+    // Delete program, automatically detaching any shaders from it.
+    if (program != -1) {
+      GLES20.glDeleteProgram(program);
+      program = -1;
+    }
+  }
+}

+ 122 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlTextureFrameBuffer.java

@@ -0,0 +1,122 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion. This class is not thread safe and must be used by a thread with an active GL context.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+  private final int pixelFormat;
+  private int frameBufferId;
+  private int textureId;
+  private int width;
+  private int height;
+
+  /**
+   * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+   * when calling this function. The framebuffer is not complete until setSize() is called.
+   */
+  public GlTextureFrameBuffer(int pixelFormat) {
+    switch (pixelFormat) {
+      case GLES20.GL_LUMINANCE:
+      case GLES20.GL_RGB:
+      case GLES20.GL_RGBA:
+        this.pixelFormat = pixelFormat;
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+    }
+    this.width = 0;
+    this.height = 0;
+  }
+
+  /**
+   * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+   * EGLContext must be bound on the current thread when calling this function. Must be called at
+   * least once before using the framebuffer. May be called multiple times to change size.
+   */
+  public void setSize(int width, int height) {
+    if (width <= 0 || height <= 0) {
+      throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+    }
+    if (width == this.width && height == this.height) {
+      return;
+    }
+    this.width = width;
+    this.height = height;
+    // Lazy allocation the first time setSize() is called.
+    if (textureId == 0) {
+      textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+    }
+    if (frameBufferId == 0) {
+      final int frameBuffers[] = new int[1];
+      GLES20.glGenFramebuffers(1, frameBuffers, 0);
+      frameBufferId = frameBuffers[0];
+    }
+
+    // Allocate texture.
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+        GLES20.GL_UNSIGNED_BYTE, null);
+    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+    GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
+
+    // Attach the texture to the framebuffer as color attachment.
+    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+    GLES20.glFramebufferTexture2D(
+        GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
+
+    // Check that the framebuffer is in a good state.
+    final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+    if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+      throw new IllegalStateException("Framebuffer not complete, status: " + status);
+    }
+
+    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+  }
+
+  public int getWidth() {
+    return width;
+  }
+
+  public int getHeight() {
+    return height;
+  }
+
+  /** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
+  public int getFrameBufferId() {
+    return frameBufferId;
+  }
+
+  /** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
+  public int getTextureId() {
+    return textureId;
+  }
+
+  /**
+   * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+   * this function. This object should not be used after this call.
+   */
+  public void release() {
+    GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+    textureId = 0;
+    GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+    frameBufferId = 0;
+    width = 0;
+    height = 0;
+  }
+}

+ 58 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlUtil.java

@@ -0,0 +1,58 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+  private GlUtil() {}
+
+  // Assert that no OpenGL ES 2.0 error has been raised.
+  public static void checkNoGLES2Error(String msg) {
+    int error = GLES20.glGetError();
+    if (error != GLES20.GL_NO_ERROR) {
+      throw new RuntimeException(msg + ": GLES20 error: " + error);
+    }
+  }
+
+  public static FloatBuffer createFloatBuffer(float[] coords) {
+    // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+    ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+    bb.order(ByteOrder.nativeOrder());
+    FloatBuffer fb = bb.asFloatBuffer();
+    fb.put(coords);
+    fb.position(0);
+    return fb;
+  }
+
+  /**
+   * Generate texture with standard parameters.
+   */
+  public static int generateTexture(int target) {
+    final int textureArray[] = new int[1];
+    GLES20.glGenTextures(1, textureArray, 0);
+    final int textureId = textureArray[0];
+    GLES20.glBindTexture(target, textureId);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+    GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+    checkNoGLES2Error("generateTexture");
+    return textureId;
+  }
+}

+ 72 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java

@@ -0,0 +1,72 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+
+
+import java.util.Arrays;
+
+/**
+ * Factory for Android hardware VideoDecoders.
+ */
+public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+    private final static Predicate<MediaCodecInfo> defaultAllowedPredicate =
+            new Predicate<MediaCodecInfo>() {
+                private String[] prefixBlacklist =
+                        Arrays.copyOf(MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES,
+                                MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES.length);
+
+                @Override
+                public boolean test(MediaCodecInfo arg) {
+                    final String name = arg.getName();
+                    for (String prefix : prefixBlacklist) {
+                        if (name.startsWith(prefix)) {
+                            return false;
+                        }
+                    }
+                    return true;
+                }
+            };
+
+    /**
+     * Creates a HardwareVideoDecoderFactory that does not use surface textures.
+     */
+    @Deprecated // Not removed yet to avoid breaking callers.
+    public HardwareVideoDecoderFactory() {
+        this(null);
+    }
+
+    /**
+     * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+     *
+     * @param sharedContext The textures generated will be accessible from this context. May be null,
+     *                      this disables texture support.
+     */
+    public HardwareVideoDecoderFactory(  EglBase.Context sharedContext) {
+        this(sharedContext, /* codecAllowedPredicate= */ null);
+    }
+
+    /**
+     * Creates a HardwareVideoDecoderFactory that supports surface texture rendering.
+     *
+     * @param sharedContext         The textures generated will be accessible from this context. May be null,
+     *                              this disables texture support.
+     * @param codecAllowedPredicate predicate to filter codecs. It is combined with the default
+     *                              predicate that only allows hardware codecs.
+     */
+    public HardwareVideoDecoderFactory(  EglBase.Context sharedContext,
+                                         Predicate<MediaCodecInfo> codecAllowedPredicate) {
+        super(sharedContext,
+                (codecAllowedPredicate == null ? defaultAllowedPredicate
+                        : codecAllowedPredicate.and(defaultAllowedPredicate)));
+    }
+}

+ 295 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java

@@ -0,0 +1,295 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.os.Build;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
+import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
+import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
+
+/**
+ * Factory for android hardware video encoders.
+ */
+@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
+public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
+    private static final String TAG = "HardwareVideoEncoderFactory";
+
+    // Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
+    private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
+    private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
+    private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
+
+    // List of devices with poor H.264 encoder quality.
+    // HW H.264 encoder on below devices has poor bitrate control - actual
+    // bitrates deviates a lot from the target value.
+    private static final List<String> H264_HW_EXCEPTION_MODELS =
+            Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
+
+
+    private final EglBase14.Context sharedContext;
+    private final boolean enableIntelVp8Encoder;
+    private final boolean enableH264HighProfile;
+
+    private final Predicate<MediaCodecInfo> codecAllowedPredicate;
+
+    /**
+     * Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
+     *
+     * @param sharedContext         The textures generated will be accessible from this context. May be null,
+     *                              this disables texture support.
+     * @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled.
+     * @param enableH264HighProfile true if H264 High Profile enabled.
+     */
+    public HardwareVideoEncoderFactory(
+            EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+        this(sharedContext, enableIntelVp8Encoder, enableH264HighProfile,
+                /* codecAllowedPredicate= */ null);
+    }
+
+    /**
+     * Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
+     *
+     * @param sharedContext         The textures generated will be accessible from this context. May be null,
+     *                              this disables texture support.
+     * @param enableIntelVp8Encoder true if Intel's VP8 encoder enabled.
+     * @param enableH264HighProfile true if H264 High Profile enabled.
+     * @param codecAllowedPredicate optional predicate to filter codecs. All codecs are allowed
+     *                              when predicate is not provided.
+     */
+    public HardwareVideoEncoderFactory(EglBase.Context sharedContext, boolean enableIntelVp8Encoder,
+                                       boolean enableH264HighProfile, Predicate<MediaCodecInfo> codecAllowedPredicate) {
+        // Texture mode requires EglBase14.
+        if (sharedContext instanceof EglBase14.Context) {
+            this.sharedContext = (EglBase14.Context) sharedContext;
+        } else {
+            Logging.w(TAG, "No shared EglBase.Context.  Encoders will not use texture mode.");
+            this.sharedContext = null;
+        }
+        this.enableIntelVp8Encoder = enableIntelVp8Encoder;
+        this.enableH264HighProfile = enableH264HighProfile;
+        this.codecAllowedPredicate = codecAllowedPredicate;
+    }
+
+    @Deprecated
+    public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+        this(null, enableIntelVp8Encoder, enableH264HighProfile);
+    }
+
+
+    @Override
+    public VideoEncoder createEncoder(VideoCodecInfo input) {
+
+        // HW encoding is not supported below Android Kitkat.
+        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+            return null;
+        }
+
+        VideoCodecType type = VideoCodecType.valueOf(input.name);
+        MediaCodecInfo info = findCodecForType(type);
+
+        if (info == null) {
+            return null;
+        }
+
+        String codecName = info.getName();
+        String mime = type.mimeType();
+
+        Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat(
+                MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime));
+        Integer yuvColorFormat = MediaCodecUtils.selectColorFormat(
+                MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
+
+        if (type == VideoCodecType.H264) {
+            boolean isHighProfile = H264Utils.isSameH264Profile(
+                    input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true));
+            boolean isBaselineProfile = H264Utils.isSameH264Profile(
+                    input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false));
+
+            if (!isHighProfile && !isBaselineProfile) {
+                return null;
+            }
+            if (isHighProfile && !isH264HighProfileSupported(info)) {
+                return null;
+            }
+        }
+
+        return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type,
+                surfaceColorFormat, yuvColorFormat, input.params, getKeyFrameIntervalSec(type),
+                getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName),
+                sharedContext);
+    }
+
+    @Override
+    public VideoCodecInfo[] getSupportedCodecs() {
+        // HW encoding is not supported below Android Kitkat.
+        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+            return new VideoCodecInfo[0];
+        }
+
+        List<VideoCodecInfo> supportedCodecInfos = new ArrayList<>();
+        // Generate a list of supported codecs in order of preference:
+        // VP8, VP9, H264 (high profile), and H264 (baseline profile).
+        for (VideoCodecType type : new VideoCodecType[]{VideoCodecType.VP8, VideoCodecType.VP9, VideoCodecType.H264}) {
+            MediaCodecInfo codec = findCodecForType(type);
+            if (codec != null) {
+                String name = type.name();
+                // supported by the decoder.
+                if (type == VideoCodecType.H264 && isH264HighProfileSupported(codec)) {
+                    supportedCodecInfos.add(new VideoCodecInfo(
+                            name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)));
+                }
+
+                supportedCodecInfos.add(new VideoCodecInfo(
+                        name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)));
+            }
+        }
+
+        return supportedCodecInfos.toArray(new VideoCodecInfo[0]);
+    }
+
+    private MediaCodecInfo findCodecForType(VideoCodecType type) {
+        for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+            MediaCodecInfo info = null;
+            try {
+                info = MediaCodecList.getCodecInfoAt(i);
+            } catch (IllegalArgumentException e) {
+                Logging.e(TAG, "Cannot retrieve encoder codec info", e);
+            }
+
+            if (info == null || !info.isEncoder()) {
+                continue;
+            }
+
+            if (isSupportedCodec(info, type)) {
+                return info;
+            }
+        }
+        return null; // No support for this type.
+    }
+
+    // Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
+    private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
+        if (!MediaCodecUtils.codecSupportsType(info, type)) {
+            return false;
+        }
+        // Check for a supported color format.
+        if (MediaCodecUtils.selectColorFormat(
+                MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
+                == null) {
+            return false;
+        }
+        return isHardwareSupportedInCurrentSdk(info, type) && isMediaCodecAllowed(info);
+    }
+
+    // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
+    // current SDK.
+    private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecType type) {
+        switch (type) {
+            case VP8:
+                return isHardwareSupportedInCurrentSdkVp8(info);
+            case VP9:
+                return isHardwareSupportedInCurrentSdkVp9(info);
+            case H264:
+                return isHardwareSupportedInCurrentSdkH264(info);
+        }
+        return false;
+    }
+
+    private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
+        String name = info.getName();
+        // QCOM Vp8 encoder is supported in KITKAT or later.
+        return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
+                // Exynos VP8 encoder is supported in M or later.
+                || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
+                // Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
+                || (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
+                && enableIntelVp8Encoder);
+    }
+
+    private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
+        String name = info.getName();
+        return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX))
+                // Both QCOM and Exynos VP9 encoders are supported in N or later.
+                && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
+    }
+
+    private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
+        // First, H264 hardware might perform poorly on this model.
+        if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
+            return false;
+        }
+        String name = info.getName();
+        // QCOM H264 encoder is supported in KITKAT or later.
+        return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) ||
+                // Exynos H264 encoder is supported in LOLLIPOP or later.
+                (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) ||
+                // dds_modify @ fix  华为手机无法硬编码的问题
+                (name.startsWith("OMX.google.") && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
+    }
+
+    private boolean isMediaCodecAllowed(MediaCodecInfo info) {
+        if (codecAllowedPredicate == null) {
+            return true;
+        }
+        return codecAllowedPredicate.test(info);
+    }
+
+    private int getKeyFrameIntervalSec(VideoCodecType type) {
+        switch (type) {
+            case VP8: // Fallthrough intended.
+            case VP9:
+                return 100;
+            case H264:
+                return 20;
+        }
+        throw new IllegalArgumentException("Unsupported VideoCodecType " + type);
+    }
+
+    private int getForcedKeyFrameIntervalMs(VideoCodecType type, String codecName) {
+        if (type == VideoCodecType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
+            if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
+                    || Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
+                return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
+            } else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
+                return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
+            } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
+                return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
+            }
+        }
+        // Other codecs don't need key frame forcing.
+        return 0;
+    }
+
+    private BitrateAdjuster createBitrateAdjuster(VideoCodecType type, String codecName) {
+        if (codecName.startsWith(EXYNOS_PREFIX)) {
+            if (type == VideoCodecType.VP8) {
+                // Exynos VP8 encoders need dynamic bitrate adjustment.
+                return new DynamicBitrateAdjuster();
+            } else {
+                // Exynos VP9 and H264 encoders need framerate-based bitrate adjustment.
+                return new FramerateBitrateAdjuster();
+            }
+        }
+        // Other codecs don't need bitrate adjustment.
+        return new BaseBitrateAdjuster();
+    }
+
+    private boolean isH264HighProfileSupported(MediaCodecInfo info) {
+        return enableH264HighProfile && Build.VERSION.SDK_INT > Build.VERSION_CODES.M && info.getName().startsWith(EXYNOS_PREFIX);
+    }
+}

+ 52 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/IceCandidate.java

@@ -0,0 +1,52 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Representation of a single ICE Candidate, mirroring
+ * {@code IceCandidateInterface} in the C++ API.
+ */
+public class IceCandidate {
+  public final String sdpMid;
+  public final int sdpMLineIndex;
+  public final String sdp;
+  public final String serverUrl;
+
+  public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
+    this.sdpMid = sdpMid;
+    this.sdpMLineIndex = sdpMLineIndex;
+    this.sdp = sdp;
+    this.serverUrl = "";
+  }
+
+  @CalledByNative
+  IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl) {
+    this.sdpMid = sdpMid;
+    this.sdpMLineIndex = sdpMLineIndex;
+    this.sdp = sdp;
+    this.serverUrl = serverUrl;
+  }
+
+  @Override
+  public String toString() {
+    return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl;
+  }
+
+  @CalledByNative
+  String getSdpMid() {
+    return sdpMid;
+  }
+
+  @CalledByNative
+  String getSdp() {
+    return sdp;
+  }
+}

+ 200 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/JavaI420Buffer.java

@@ -0,0 +1,200 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+import java.nio.ByteBuffer;
+import org.webrtc.VideoFrame.I420Buffer;
+
+/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
+public class JavaI420Buffer implements VideoFrame.I420Buffer {
+  private final int width;
+  private final int height;
+  private final ByteBuffer dataY;
+  private final ByteBuffer dataU;
+  private final ByteBuffer dataV;
+  private final int strideY;
+  private final int strideU;
+  private final int strideV;
+  private final RefCountDelegate refCountDelegate;
+
+  private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
+      int strideU, ByteBuffer dataV, int strideV,   Runnable releaseCallback) {
+    this.width = width;
+    this.height = height;
+    this.dataY = dataY;
+    this.dataU = dataU;
+    this.dataV = dataV;
+    this.strideY = strideY;
+    this.strideU = strideU;
+    this.strideV = strideV;
+    this.refCountDelegate = new RefCountDelegate(releaseCallback);
+  }
+
+  private static void checkCapacity(ByteBuffer data, int width, int height, int stride) {
+    // The last row does not necessarily need padding.
+    final int minCapacity = stride * (height - 1) + width;
+    if (data.capacity() < minCapacity) {
+      throw new IllegalArgumentException(
+          "Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity());
+    }
+  }
+
+  /** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
+  public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
+      ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV,
+        Runnable releaseCallback) {
+    if (dataY == null || dataU == null || dataV == null) {
+      throw new IllegalArgumentException("Data buffers cannot be null.");
+    }
+    if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
+      throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
+    }
+
+    // Slice the buffers to prevent external modifications to the position / limit of the buffer.
+    // Note that this doesn't protect the contents of the buffers from modifications.
+    dataY = dataY.slice();
+    dataU = dataU.slice();
+    dataV = dataV.slice();
+
+    final int chromaWidth = (width + 1) / 2;
+    final int chromaHeight = (height + 1) / 2;
+    checkCapacity(dataY, width, height, strideY);
+    checkCapacity(dataU, chromaWidth, chromaHeight, strideU);
+    checkCapacity(dataV, chromaWidth, chromaHeight, strideV);
+
+    return new JavaI420Buffer(
+        width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
+  }
+
+  /** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
+  public static JavaI420Buffer allocate(int width, int height) {
+    int chromaHeight = (height + 1) / 2;
+    int strideUV = (width + 1) / 2;
+    int yPos = 0;
+    int uPos = yPos + width * height;
+    int vPos = uPos + strideUV * chromaHeight;
+
+    ByteBuffer buffer =
+        JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight);
+
+    buffer.position(yPos);
+    buffer.limit(uPos);
+    ByteBuffer dataY = buffer.slice();
+
+    buffer.position(uPos);
+    buffer.limit(vPos);
+    ByteBuffer dataU = buffer.slice();
+
+    buffer.position(vPos);
+    buffer.limit(vPos + strideUV * chromaHeight);
+    ByteBuffer dataV = buffer.slice();
+
+    return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
+        () -> { JniCommon.nativeFreeByteBuffer(buffer); });
+  }
+
+  @Override
+  public int getWidth() {
+    return width;
+  }
+
+  @Override
+  public int getHeight() {
+    return height;
+  }
+
+  @Override
+  public ByteBuffer getDataY() {
+    // Return a slice to prevent relative reads from changing the position.
+    return dataY.slice();
+  }
+
+  @Override
+  public ByteBuffer getDataU() {
+    // Return a slice to prevent relative reads from changing the position.
+    return dataU.slice();
+  }
+
+  @Override
+  public ByteBuffer getDataV() {
+    // Return a slice to prevent relative reads from changing the position.
+    return dataV.slice();
+  }
+
+  @Override
+  public int getStrideY() {
+    return strideY;
+  }
+
+  @Override
+  public int getStrideU() {
+    return strideU;
+  }
+
+  @Override
+  public int getStrideV() {
+    return strideV;
+  }
+
+  @Override
+  public I420Buffer toI420() {
+    retain();
+    return this;
+  }
+
+  @Override
+  public void retain() {
+    refCountDelegate.retain();
+  }
+
+  @Override
+  public void release() {
+    refCountDelegate.release();
+  }
+
+  @Override
+  public VideoFrame.Buffer cropAndScale(
+      int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+    return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
+  }
+
+  public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
+      int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
+    if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
+      // No scaling.
+      ByteBuffer dataY = buffer.getDataY();
+      ByteBuffer dataU = buffer.getDataU();
+      ByteBuffer dataV = buffer.getDataV();
+
+      dataY.position(cropX + cropY * buffer.getStrideY());
+      dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
+      dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
+
+      buffer.retain();
+      return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(),
+          dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release);
+    }
+
+    JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
+    nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
+        buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
+        cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
+        newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
+        scaleHeight);
+    return newBuffer;
+  }
+
+  private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
+      ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
+      int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
+      int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp8Decoder.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Decoder extends WrappedNativeVideoDecoder {
+  @Override
+  public long createNativeVideoDecoder() {
+    return nativeCreateDecoder();
+  }
+
+  static native long nativeCreateDecoder();
+}

+ 25 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp8Encoder.java

@@ -0,0 +1,25 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder {
+  @Override
+  public long createNativeVideoEncoder() {
+    return nativeCreateEncoder();
+  }
+
+  static native long nativeCreateEncoder();
+
+  @Override
+  public boolean isHardwareEncoder() {
+    return false;
+  }
+}

+ 22 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp9Decoder.java

@@ -0,0 +1,22 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Decoder extends WrappedNativeVideoDecoder {
+  @Override
+  public long createNativeVideoDecoder() {
+    return nativeCreateDecoder();
+  }
+
+  static native long nativeCreateDecoder();
+
+  static native boolean nativeIsSupported();
+}

+ 27 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java

@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder {
+  @Override
+  public long createNativeVideoEncoder() {
+    return nativeCreateEncoder();
+  }
+
+  static native long nativeCreateEncoder();
+
+  @Override
+  public boolean isHardwareEncoder() {
+    return false;
+  }
+
+  static native boolean nativeIsSupported();
+}

Fichier diff supprimé car celui-ci est trop grand
+ 1021 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java


Fichier diff supprimé car celui-ci est trop grand
+ 1124 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java


+ 98 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaConstraints.java

@@ -0,0 +1,98 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+  /** Simple String key/value pair. */
+  public static class KeyValuePair {
+    private final String key;
+    private final String value;
+
+    public KeyValuePair(String key, String value) {
+      this.key = key;
+      this.value = value;
+    }
+
+    @CalledByNative("KeyValuePair")
+    public String getKey() {
+      return key;
+    }
+
+    @CalledByNative("KeyValuePair")
+    public String getValue() {
+      return value;
+    }
+
+    @Override
+    public String toString() {
+      return key + ": " + value;
+    }
+
+    @Override
+    public boolean equals(  Object other) {
+      if (this == other) {
+        return true;
+      }
+      if (other == null || getClass() != other.getClass()) {
+        return false;
+      }
+      KeyValuePair that = (KeyValuePair) other;
+      return key.equals(that.key) && value.equals(that.value);
+    }
+
+    @Override
+    public int hashCode() {
+      return key.hashCode() + value.hashCode();
+    }
+  }
+
+  public final List<KeyValuePair> mandatory;
+  public final List<KeyValuePair> optional;
+
+  public MediaConstraints() {
+    mandatory = new ArrayList<KeyValuePair>();
+    optional = new ArrayList<KeyValuePair>();
+  }
+
+  private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+    StringBuilder builder = new StringBuilder("[");
+    for (KeyValuePair pair : list) {
+      if (builder.length() > 1) {
+        builder.append(", ");
+      }
+      builder.append(pair.toString());
+    }
+    return builder.append("]").toString();
+  }
+
+  @Override
+  public String toString() {
+    return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
+        + stringifyKeyValuePairList(optional);
+  }
+
+  @CalledByNative
+  List<KeyValuePair> getMandatory() {
+    return mandatory;
+  }
+
+  @CalledByNative
+  List<KeyValuePair> getOptional() {
+    return optional;
+  }
+}

+ 58 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaSource.java

@@ -0,0 +1,58 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+  /** Tracks MediaSourceInterface.SourceState */
+  public enum State {
+    INITIALIZING,
+    LIVE,
+    ENDED,
+    MUTED;
+
+    @CalledByNative("State")
+    static State fromNativeIndex(int nativeIndex) {
+      return values()[nativeIndex];
+    }
+  }
+
+  private long nativeSource;
+
+  public MediaSource(long nativeSource) {
+    this.nativeSource = nativeSource;
+  }
+
+  public State state() {
+    checkMediaSourceExists();
+    return nativeGetState(nativeSource);
+  }
+
+  public void dispose() {
+    checkMediaSourceExists();
+    JniCommon.nativeReleaseRef(nativeSource);
+    nativeSource = 0;
+  }
+
+  /** Returns a pointer to webrtc::MediaSourceInterface. */
+  protected long getNativeMediaSource() {
+    checkMediaSourceExists();
+    return nativeSource;
+  }
+
+  private void checkMediaSourceExists() {
+    if (nativeSource == 0) {
+      throw new IllegalStateException("MediaSource has been disposed.");
+    }
+  }
+
+  private static native State nativeGetState(long pointer);
+}

+ 159 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaStream.java

@@ -0,0 +1,159 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/** Java wrapper for a C++ MediaStreamInterface. */
+public class MediaStream {
+  private static final String TAG = "MediaStream";
+
+  public final List<AudioTrack> audioTracks = new ArrayList<>();
+  public final List<VideoTrack> videoTracks = new ArrayList<>();
+  public final List<VideoTrack> preservedVideoTracks = new ArrayList<>();
+  private long nativeStream;
+
+  @CalledByNative
+  public MediaStream(long nativeStream) {
+    this.nativeStream = nativeStream;
+  }
+
+  public boolean addTrack(AudioTrack track) {
+    checkMediaStreamExists();
+    if (nativeAddAudioTrackToNativeStream(nativeStream, track.getNativeAudioTrack())) {
+      audioTracks.add(track);
+      return true;
+    }
+    return false;
+  }
+
+  public boolean addTrack(VideoTrack track) {
+    checkMediaStreamExists();
+    if (nativeAddVideoTrackToNativeStream(nativeStream, track.getNativeVideoTrack())) {
+      videoTracks.add(track);
+      return true;
+    }
+    return false;
+  }
+
+  // Tracks added in addTrack() call will be auto released once MediaStream.dispose()
+  // is called. If video track need to be preserved after MediaStream is destroyed it
+  // should be added to MediaStream using addPreservedTrack() call.
+  public boolean addPreservedTrack(VideoTrack track) {
+    checkMediaStreamExists();
+    if (nativeAddVideoTrackToNativeStream(nativeStream, track.getNativeVideoTrack())) {
+      preservedVideoTracks.add(track);
+      return true;
+    }
+    return false;
+  }
+
+  public boolean removeTrack(AudioTrack track) {
+    checkMediaStreamExists();
+    audioTracks.remove(track);
+    return nativeRemoveAudioTrack(nativeStream, track.getNativeAudioTrack());
+  }
+
+  public boolean removeTrack(VideoTrack track) {
+    checkMediaStreamExists();
+    videoTracks.remove(track);
+    preservedVideoTracks.remove(track);
+    return nativeRemoveVideoTrack(nativeStream, track.getNativeVideoTrack());
+  }
+
+  @CalledByNative
+  public void dispose() {
+    checkMediaStreamExists();
+    // Remove and release previously added audio and video tracks.
+    while (!audioTracks.isEmpty()) {
+      AudioTrack track = audioTracks.get(0 /* index */);
+      removeTrack(track);
+      track.dispose();
+    }
+    while (!videoTracks.isEmpty()) {
+      VideoTrack track = videoTracks.get(0 /* index */);
+      removeTrack(track);
+      track.dispose();
+    }
+    // Remove, but do not release preserved video tracks.
+    while (!preservedVideoTracks.isEmpty()) {
+      removeTrack(preservedVideoTracks.get(0 /* index */));
+    }
+    JniCommon.nativeReleaseRef(nativeStream);
+    nativeStream = 0;
+  }
+
+  public String getId() {
+    checkMediaStreamExists();
+    return nativeGetId(nativeStream);
+  }
+
+  @Override
+  public String toString() {
+    return "[" + getId() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
+  }
+
+  @CalledByNative
+  void addNativeAudioTrack(long nativeTrack) {
+    audioTracks.add(new AudioTrack(nativeTrack));
+  }
+
+  @CalledByNative
+  void addNativeVideoTrack(long nativeTrack) {
+    videoTracks.add(new VideoTrack(nativeTrack));
+  }
+
+  @CalledByNative
+  void removeAudioTrack(long nativeTrack) {
+    removeMediaStreamTrack(audioTracks, nativeTrack);
+  }
+
+  @CalledByNative
+  void removeVideoTrack(long nativeTrack) {
+    removeMediaStreamTrack(videoTracks, nativeTrack);
+  }
+
+  /** Returns a pointer to webrtc::MediaStreamInterface. */
+  long getNativeMediaStream() {
+    checkMediaStreamExists();
+    return nativeStream;
+  }
+
+  private void checkMediaStreamExists() {
+    if (nativeStream == 0) {
+      throw new IllegalStateException("MediaStream has been disposed.");
+    }
+  }
+
+  private static void removeMediaStreamTrack(
+      List<? extends MediaStreamTrack> tracks, long nativeTrack) {
+    final Iterator<? extends MediaStreamTrack> it = tracks.iterator();
+    while (it.hasNext()) {
+      MediaStreamTrack track = it.next();
+      if (track.getNativeMediaStreamTrack() == nativeTrack) {
+        track.dispose();
+        it.remove();
+        return;
+      }
+    }
+    Logging.e(TAG, "Couldn't not find track");
+  }
+
+  private static native boolean nativeAddAudioTrackToNativeStream(
+      long stream, long nativeAudioTrack);
+  private static native boolean nativeAddVideoTrackToNativeStream(
+      long stream, long nativeVideoTrack);
+  private static native boolean nativeRemoveAudioTrack(long stream, long nativeAudioTrack);
+  private static native boolean nativeRemoveVideoTrack(long stream, long nativeVideoTrack);
+  private static native String nativeGetId(long stream);
+}

+ 129 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaStreamTrack.java

@@ -0,0 +1,129 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+  public static final String AUDIO_TRACK_KIND = "audio";
+  public static final String VIDEO_TRACK_KIND = "video";
+
+  /** Tracks MediaStreamTrackInterface.TrackState */
+  public enum State {
+    LIVE,
+    ENDED;
+
+    @CalledByNative("State")
+    static State fromNativeIndex(int nativeIndex) {
+      return values()[nativeIndex];
+    }
+  }
+
+  // Must be kept in sync with cricket::MediaType.
+  public enum MediaType {
+    MEDIA_TYPE_AUDIO(0),
+    MEDIA_TYPE_VIDEO(1);
+
+    private final int nativeIndex;
+
+    private MediaType(int nativeIndex) {
+      this.nativeIndex = nativeIndex;
+    }
+
+    @CalledByNative("MediaType")
+    int getNative() {
+      return nativeIndex;
+    }
+
+    @CalledByNative("MediaType")
+    static MediaType fromNativeIndex(int nativeIndex) {
+      for (MediaType type : MediaType.values()) {
+        if (type.getNative() == nativeIndex) {
+          return type;
+        }
+      }
+      throw new IllegalArgumentException("Unknown native media type: " + nativeIndex);
+    }
+  }
+
+  /** Factory method to create an AudioTrack or VideoTrack subclass. */
+  static   MediaStreamTrack createMediaStreamTrack(long nativeTrack) {
+    if (nativeTrack == 0) {
+      return null;
+    }
+    String trackKind = nativeGetKind(nativeTrack);
+    if (trackKind.equals(AUDIO_TRACK_KIND)) {
+      return new AudioTrack(nativeTrack);
+    } else if (trackKind.equals(VIDEO_TRACK_KIND)) {
+      return new VideoTrack(nativeTrack);
+    } else {
+      return null;
+    }
+  }
+
+  private long nativeTrack;
+
+  public MediaStreamTrack(long nativeTrack) {
+    if (nativeTrack == 0) {
+      throw new IllegalArgumentException("nativeTrack may not be null");
+    }
+    this.nativeTrack = nativeTrack;
+  }
+
+  public String id() {
+    checkMediaStreamTrackExists();
+    return nativeGetId(nativeTrack);
+  }
+
+  public String kind() {
+    checkMediaStreamTrackExists();
+    return nativeGetKind(nativeTrack);
+  }
+
+  public boolean enabled() {
+    checkMediaStreamTrackExists();
+    return nativeGetEnabled(nativeTrack);
+  }
+
+  public boolean setEnabled(boolean enable) {
+    checkMediaStreamTrackExists();
+    return nativeSetEnabled(nativeTrack, enable);
+  }
+
+  public State state() {
+    checkMediaStreamTrackExists();
+    return nativeGetState(nativeTrack);
+  }
+
+  public void dispose() {
+    checkMediaStreamTrackExists();
+    JniCommon.nativeReleaseRef(nativeTrack);
+    nativeTrack = 0;
+  }
+
+  long getNativeMediaStreamTrack() {
+    checkMediaStreamTrackExists();
+    return nativeTrack;
+  }
+
+  private void checkMediaStreamTrackExists() {
+    if (nativeTrack == 0) {
+      throw new IllegalStateException("MediaStreamTrack has been disposed.");
+    }
+  }
+
+  private static native String nativeGetId(long track);
+  private static native String nativeGetKind(long track);
+  private static native boolean nativeGetEnabled(long track);
+  private static native boolean nativeSetEnabled(long track, boolean enabled);
+  private static native State nativeGetState(long track);
+}

+ 22 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/MediaTransportFactoryFactory.java

@@ -0,0 +1,22 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Factory for creating webrtc::MediaTransportFactory instances.
+ */
+public interface MediaTransportFactoryFactory {
+  /**
+   * Dynamically allocates a webrtc::MediaTransportFactory instance and returns a pointer to it.
+   * The caller takes ownership of the object.
+   */
+  public long createNativeMediaTransportFactory();
+}

+ 81 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Metrics.java

@@ -0,0 +1,81 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+// Java-side of androidmetrics.cc
+//
+// Rtc histograms can be queried through the API, getAndReset().
+// The returned map holds the name of a histogram and its samples.
+//
+// Example of |map| with one histogram:
+// |name|: "WebRTC.Video.InputFramesPerSecond"
+//     |min|: 1
+//     |max|: 100
+//     |bucketCount|: 50
+//     |samples|: [30]:1
+//
+// Most histograms are not updated frequently (e.g. most video metrics are an
+// average over the call and recorded when a stream is removed).
+// The metrics can for example be retrieved when a peer connection is closed.
+public class Metrics {
+  private static final String TAG = "Metrics";
+
+  public final Map<String, HistogramInfo> map =
+      new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
+
+  @CalledByNative
+  Metrics() {}
+
+  /**
+   * Class holding histogram information.
+   */
+  public static class HistogramInfo {
+    public final int min;
+    public final int max;
+    public final int bucketCount;
+    public final Map<Integer, Integer> samples =
+        new HashMap<Integer, Integer>(); // <value, # of events>
+
+    @CalledByNative("HistogramInfo")
+    public HistogramInfo(int min, int max, int bucketCount) {
+      this.min = min;
+      this.max = max;
+      this.bucketCount = bucketCount;
+    }
+
+    @CalledByNative("HistogramInfo")
+    public void addSample(int value, int numEvents) {
+      samples.put(value, numEvents);
+    }
+  }
+
+  @CalledByNative
+  private void add(String name, HistogramInfo info) {
+    map.put(name, info);
+  }
+
+  // Enables gathering of metrics (which can be fetched with getAndReset()).
+  // Must be called before PeerConnectionFactory is created.
+  public static void enable() {
+    nativeEnable();
+  }
+
+  // Gets and clears native histograms.
+  public static Metrics getAndReset() {
+    return nativeGetAndReset();
+  }
+
+  private static native void nativeEnable();
+  private static native Metrics nativeGetAndReset();
+}

+ 24 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/NativeLibraryLoader.java

@@ -0,0 +1,24 @@
+/*
+ *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for loading native libraries. A custom loader can be passed to
+ * PeerConnectionFactory.initialize.
+ */
+public interface NativeLibraryLoader {
+  /**
+   * Loads a native library with the given name.
+   *
+   * @return True on success
+   */
+  boolean load(String name);
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/NativePeerConnectionFactory.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::jni::OwnedPeerConnection instances. */
+public interface NativePeerConnectionFactory {
+  /**
+   * Create a new webrtc::jni::OwnedPeerConnection instance and returns a pointer to it.
+   * The caller takes ownership of the object.
+   */
+  long createNativePeerConnection();
+}

+ 314 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/NetworkMonitor.java

@@ -0,0 +1,314 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Build;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
+ *
+ * <p>Triggers updates to the underlying network state from OS networking events.
+ *
+ * <p>This class is thread-safe.
+ */
+public class NetworkMonitor {
+  /**
+   * Alerted when the connection type of the network changes. The alert is fired on the UI thread.
+   */
+  public interface NetworkObserver {
+    public void onConnectionTypeChanged(NetworkMonitorAutoDetect.ConnectionType connectionType);
+  }
+
+  private static final String TAG = "NetworkMonitor";
+
+  // Lazy initialization holder class idiom for static fields.
+  private static class InstanceHolder {
+    // We are storing application context so it is okay.
+    static final NetworkMonitor instance = new NetworkMonitor();
+  }
+
+  // Native observers of the connection type changes.
+  private final ArrayList<Long> nativeNetworkObservers;
+  // Java observers of the connection type changes.
+  private final ArrayList<NetworkObserver> networkObservers;
+
+  private final Object autoDetectLock = new Object();
+  // Object that detects the connection type changes and brings up mobile networks.
+    private NetworkMonitorAutoDetect autoDetect;
+  // Also guarded by autoDetectLock.
+  private int numObservers;
+
+  private volatile NetworkMonitorAutoDetect.ConnectionType currentConnectionType;
+
+  private NetworkMonitor() {
+    nativeNetworkObservers = new ArrayList<Long>();
+    networkObservers = new ArrayList<NetworkObserver>();
+    numObservers = 0;
+    currentConnectionType = NetworkMonitorAutoDetect.ConnectionType.CONNECTION_UNKNOWN;
+  }
+
+  // TODO(sakal): Remove once downstream dependencies have been updated.
+  @Deprecated
+  public static void init(Context context) {}
+
+  /** Returns the singleton instance. This may be called from native or from Java code. */
+  @CalledByNative
+  public static NetworkMonitor getInstance() {
+    return InstanceHolder.instance;
+  }
+
+  private static void assertIsTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected to be true");
+    }
+  }
+
+  /**
+   * Enables auto detection of the network state change and brings up mobile networks for using
+   * multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and
+   * CHANGE_NETWORK_STATE permission.
+   */
+  public void startMonitoring(Context applicationContext) {
+    synchronized (autoDetectLock) {
+      ++numObservers;
+      if (autoDetect == null) {
+        autoDetect = createAutoDetect(applicationContext);
+      }
+      currentConnectionType =
+          NetworkMonitorAutoDetect.getConnectionType(autoDetect.getCurrentNetworkState());
+    }
+  }
+
+  /** Deprecated, pass in application context in startMonitoring instead. */
+  @Deprecated
+  public void startMonitoring() {
+    startMonitoring(ContextUtils.getApplicationContext());
+  }
+
+  /**
+   * Enables auto detection of the network state change and brings up mobile networks for using
+   * multi-networking. This requires the embedding app have the platform ACCESS_NETWORK_STATE and
+   * CHANGE_NETWORK_STATE permission.
+   */
+  @CalledByNative
+  private void startMonitoring(  Context applicationContext, long nativeObserver) {
+    Logging.d(TAG, "Start monitoring with native observer " + nativeObserver);
+
+    startMonitoring(
+        applicationContext != null ? applicationContext : ContextUtils.getApplicationContext());
+    // The native observers expect a network list update after they call startMonitoring.
+    synchronized (nativeNetworkObservers) {
+      nativeNetworkObservers.add(nativeObserver);
+    }
+    updateObserverActiveNetworkList(nativeObserver);
+    // currentConnectionType was updated in startMonitoring().
+    // Need to notify the native observers here.
+    notifyObserversOfConnectionTypeChange(currentConnectionType);
+  }
+
+  /** Stop network monitoring. If no one is monitoring networks, destroy and reset autoDetect. */
+  public void stopMonitoring() {
+    synchronized (autoDetectLock) {
+      if (--numObservers == 0) {
+        autoDetect.destroy();
+        autoDetect = null;
+      }
+    }
+  }
+
+  @CalledByNative
+  private void stopMonitoring(long nativeObserver) {
+    Logging.d(TAG, "Stop monitoring with native observer " + nativeObserver);
+    stopMonitoring();
+    synchronized (nativeNetworkObservers) {
+      nativeNetworkObservers.remove(nativeObserver);
+    }
+  }
+
+  // Returns true if network binding is supported on this platform.
+  @CalledByNative
+  private boolean networkBindingSupported() {
+    synchronized (autoDetectLock) {
+      return autoDetect != null && autoDetect.supportNetworkCallback();
+    }
+  }
+
+  @CalledByNative
+  private static int androidSdkInt() {
+    return Build.VERSION.SDK_INT;
+  }
+
+  private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
+    return currentConnectionType;
+  }
+
+  private long getCurrentDefaultNetId() {
+    synchronized (autoDetectLock) {
+      return autoDetect == null ? INVALID_NET_ID : autoDetect.getDefaultNetId();
+    }
+  }
+
+  private NetworkMonitorAutoDetect createAutoDetect(Context appContext) {
+    return new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
+
+      @Override
+      public void onConnectionTypeChanged(
+          NetworkMonitorAutoDetect.ConnectionType newConnectionType) {
+        updateCurrentConnectionType(newConnectionType);
+      }
+
+      @Override
+      public void onNetworkConnect(NetworkMonitorAutoDetect.NetworkInformation networkInfo) {
+        notifyObserversOfNetworkConnect(networkInfo);
+      }
+
+      @Override
+      public void onNetworkDisconnect(long networkHandle) {
+        notifyObserversOfNetworkDisconnect(networkHandle);
+      }
+    }, appContext);
+  }
+
+  private void updateCurrentConnectionType(
+      NetworkMonitorAutoDetect.ConnectionType newConnectionType) {
+    currentConnectionType = newConnectionType;
+    notifyObserversOfConnectionTypeChange(newConnectionType);
+  }
+
+  /** Alerts all observers of a connection change. */
+  private void notifyObserversOfConnectionTypeChange(
+      NetworkMonitorAutoDetect.ConnectionType newConnectionType) {
+    List<Long> nativeObservers = getNativeNetworkObserversSync();
+    for (Long nativeObserver : nativeObservers) {
+      nativeNotifyConnectionTypeChanged(nativeObserver);
+    }
+    // This avoids calling external methods while locking on an object.
+    List<NetworkObserver> javaObservers;
+    synchronized (networkObservers) {
+      javaObservers = new ArrayList<>(networkObservers);
+    }
+    for (NetworkObserver observer : javaObservers) {
+      observer.onConnectionTypeChanged(newConnectionType);
+    }
+  }
+
+  private void notifyObserversOfNetworkConnect(
+      NetworkMonitorAutoDetect.NetworkInformation networkInfo) {
+    List<Long> nativeObservers = getNativeNetworkObserversSync();
+    for (Long nativeObserver : nativeObservers) {
+      nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
+    }
+  }
+
+  private void notifyObserversOfNetworkDisconnect(long networkHandle) {
+    List<Long> nativeObservers = getNativeNetworkObserversSync();
+    for (Long nativeObserver : nativeObservers) {
+      nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
+    }
+  }
+
+  private void updateObserverActiveNetworkList(long nativeObserver) {
+    List<NetworkMonitorAutoDetect.NetworkInformation> networkInfoList;
+    synchronized (autoDetectLock) {
+      networkInfoList = (autoDetect == null) ? null : autoDetect.getActiveNetworkList();
+    }
+    if (networkInfoList == null || networkInfoList.size() == 0) {
+      return;
+    }
+
+    NetworkMonitorAutoDetect.NetworkInformation[] networkInfos =
+        new NetworkMonitorAutoDetect.NetworkInformation[networkInfoList.size()];
+    networkInfos = networkInfoList.toArray(networkInfos);
+    nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
+  }
+
+  private List<Long> getNativeNetworkObserversSync() {
+    synchronized (nativeNetworkObservers) {
+      return new ArrayList<>(nativeNetworkObservers);
+    }
+  }
+
+  /**
+   * Adds an observer for any connection type changes.
+   *
+   * @deprecated Use getInstance(appContext).addObserver instead.
+   */
+  @Deprecated
+  public static void addNetworkObserver(NetworkObserver observer) {
+    getInstance().addObserver(observer);
+  }
+
+  public void addObserver(NetworkObserver observer) {
+    synchronized (networkObservers) {
+      networkObservers.add(observer);
+    }
+  }
+
+  /**
+   * Removes an observer for any connection type changes.
+   *
+   * @deprecated Use getInstance(appContext).removeObserver instead.
+   */
+  @Deprecated
+  public static void removeNetworkObserver(NetworkObserver observer) {
+    getInstance().removeObserver(observer);
+  }
+
+  public void removeObserver(NetworkObserver observer) {
+    synchronized (networkObservers) {
+      networkObservers.remove(observer);
+    }
+  }
+
+  /** Checks if there currently is connectivity. */
+  public static boolean isOnline() {
+    NetworkMonitorAutoDetect.ConnectionType connectionType =
+        getInstance().getCurrentConnectionType();
+    return connectionType != NetworkMonitorAutoDetect.ConnectionType.CONNECTION_NONE;
+  }
+
+  private native void nativeNotifyConnectionTypeChanged(long nativeAndroidNetworkMonitor);
+  private native void nativeNotifyOfNetworkConnect(
+      long nativeAndroidNetworkMonitor, NetworkMonitorAutoDetect.NetworkInformation networkInfo);
+  private native void nativeNotifyOfNetworkDisconnect(
+      long nativeAndroidNetworkMonitor, long networkHandle);
+  private native void nativeNotifyOfActiveNetworkList(
+      long nativeAndroidNetworkMonitor, NetworkMonitorAutoDetect.NetworkInformation[] networkInfos);
+
+  // For testing only.
+
+  NetworkMonitorAutoDetect getNetworkMonitorAutoDetect() {
+    synchronized (autoDetectLock) {
+      return autoDetect;
+    }
+  }
+
+  // For testing only.
+  int getNumObservers() {
+    synchronized (autoDetectLock) {
+      return numObservers;
+    }
+  }
+
+  // For testing only.
+  static NetworkMonitorAutoDetect createAndSetAutoDetectForTest(Context context) {
+    NetworkMonitor networkMonitor = getInstance();
+    NetworkMonitorAutoDetect autoDetect = networkMonitor.createAutoDetect(context);
+    return networkMonitor.autoDetect = autoDetect;
+  }
+}

+ 870 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java

@@ -0,0 +1,870 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.SuppressLint;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.net.ConnectivityManager;
+import android.net.ConnectivityManager.NetworkCallback;
+import android.net.LinkAddress;
+import android.net.LinkProperties;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkInfo;
+import android.net.NetworkRequest;
+import android.net.wifi.WifiInfo;
+import android.net.wifi.WifiManager;
+import android.net.wifi.p2p.WifiP2pGroup;
+import android.net.wifi.p2p.WifiP2pManager;
+import android.os.Build;
+
+import android.telephony.TelephonyManager;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
+ *
+ * Used by the NetworkMonitor to listen to platform changes in connectivity.
+ * Note that use of this class requires that the app have the platform
+ * ACCESS_NETWORK_STATE permission.
+ */
+public class NetworkMonitorAutoDetect extends BroadcastReceiver {
+  public static enum ConnectionType {
+    CONNECTION_UNKNOWN,
+    CONNECTION_ETHERNET,
+    CONNECTION_WIFI,
+    CONNECTION_4G,
+    CONNECTION_3G,
+    CONNECTION_2G,
+    CONNECTION_UNKNOWN_CELLULAR,
+    CONNECTION_BLUETOOTH,
+    CONNECTION_VPN,
+    CONNECTION_NONE
+  }
+
+  public static class IPAddress {
+    public final byte[] address;
+    public IPAddress(byte[] address) {
+      this.address = address;
+    }
+
+    @CalledByNative("IPAddress")
+    private byte[] getAddress() {
+      return address;
+    }
+  }
+
+  /** Java version of NetworkMonitor.NetworkInformation */
+  public static class NetworkInformation {
+    public final String name;
+    public final ConnectionType type;
+    // Used to specify the underlying network type if the type is CONNECTION_VPN.
+    public final ConnectionType underlyingTypeForVpn;
+    public final long handle;
+    public final IPAddress[] ipAddresses;
+    public NetworkInformation(String name, ConnectionType type, ConnectionType underlyingTypeForVpn,
+        long handle, IPAddress[] addresses) {
+      this.name = name;
+      this.type = type;
+      this.underlyingTypeForVpn = underlyingTypeForVpn;
+      this.handle = handle;
+      this.ipAddresses = addresses;
+    }
+
+    @CalledByNative("NetworkInformation")
+    private IPAddress[] getIpAddresses() {
+      return ipAddresses;
+    }
+
+    @CalledByNative("NetworkInformation")
+    private ConnectionType getConnectionType() {
+      return type;
+    }
+
+    @CalledByNative("NetworkInformation")
+    private ConnectionType getUnderlyingConnectionTypeForVpn() {
+      return underlyingTypeForVpn;
+    }
+
+    @CalledByNative("NetworkInformation")
+    private long getHandle() {
+      return handle;
+    }
+
+    @CalledByNative("NetworkInformation")
+    private String getName() {
+      return name;
+    }
+  };
+
+  static class NetworkState {
+    private final boolean connected;
+    // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
+    // further divided into 2G, 3G, or 4G from the subtype.
+    private final int type;
+    // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
+    // Will be useful to find the maximum bandwidth.
+    private final int subtype;
+    // When the type is TYPE_VPN, the following two fields specify the similar type and subtype as
+    // above for the underlying network that is used by the VPN.
+    private final int underlyingNetworkTypeForVpn;
+    private final int underlyingNetworkSubtypeForVpn;
+
+    public NetworkState(boolean connected, int type, int subtype, int underlyingNetworkTypeForVpn,
+        int underlyingNetworkSubtypeForVpn) {
+      this.connected = connected;
+      this.type = type;
+      this.subtype = subtype;
+      this.underlyingNetworkTypeForVpn = underlyingNetworkTypeForVpn;
+      this.underlyingNetworkSubtypeForVpn = underlyingNetworkSubtypeForVpn;
+    }
+
+    public boolean isConnected() {
+      return connected;
+    }
+
+    public int getNetworkType() {
+      return type;
+    }
+
+    public int getNetworkSubType() {
+      return subtype;
+    }
+
+    public int getUnderlyingNetworkTypeForVpn() {
+      return underlyingNetworkTypeForVpn;
+    }
+
+    public int getUnderlyingNetworkSubtypeForVpn() {
+      return underlyingNetworkSubtypeForVpn;
+    }
+  }
+  /**
+   * The methods in this class get called when the network changes if the callback
+   * is registered with a proper network request. It is only available in Android Lollipop
+   * and above.
+   */
+  @SuppressLint("NewApi")
+  private class SimpleNetworkCallback extends NetworkCallback {
+    @Override
+    public void onAvailable(Network network) {
+      Logging.d(TAG, "Network becomes available: " + network.toString());
+      onNetworkChanged(network);
+    }
+
+    @Override
+    public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
+      // A capabilities change may indicate the ConnectionType has changed,
+      // so forward the new NetworkInformation along to the observer.
+      Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
+      onNetworkChanged(network);
+    }
+
+    @Override
+    public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
+      // A link property change may indicate the IP address changes.
+      // so forward the new NetworkInformation to the observer.
+      Logging.d(TAG, "link properties changed: " + linkProperties.toString());
+      onNetworkChanged(network);
+    }
+
+    @Override
+    public void onLosing(Network network, int maxMsToLive) {
+      // Tell the network is going to lose in MaxMsToLive milliseconds.
+      // We may use this signal later.
+      Logging.d(
+          TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
+    }
+
+    @Override
+    public void onLost(Network network) {
+      Logging.d(TAG, "Network " + network.toString() + " is disconnected");
+      observer.onNetworkDisconnect(networkToNetId(network));
+    }
+
+    private void onNetworkChanged(Network network) {
+      NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
+      if (networkInformation != null) {
+        observer.onNetworkConnect(networkInformation);
+      }
+    }
+  }
+
+  /** Queries the ConnectivityManager for information about the current connection. */
+  static class ConnectivityManagerDelegate {
+    /**
+     *  Note: In some rare Android systems connectivityManager is null.  We handle that
+     *  gracefully below.
+     */
+      private final ConnectivityManager connectivityManager;
+
+    ConnectivityManagerDelegate(Context context) {
+      connectivityManager =
+          (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
+    }
+
+    // For testing.
+    ConnectivityManagerDelegate() {
+      // All the methods below should be overridden.
+      connectivityManager = null;
+    }
+
+    /**
+     * Returns connection type and status information about the current
+     * default network.
+     */
+    NetworkState getNetworkState() {
+      if (connectivityManager == null) {
+        return new NetworkState(false, -1, -1, -1, -1);
+      }
+      return getNetworkState(connectivityManager.getActiveNetworkInfo());
+    }
+
+    /**
+     * Returns connection type and status information about |network|.
+     * Only callable on Lollipop and newer releases.
+     */
+    @SuppressLint("NewApi")
+    NetworkState getNetworkState(  Network network) {
+      if (network == null || connectivityManager == null) {
+        return new NetworkState(false, -1, -1, -1, -1);
+      }
+      NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+      if (networkInfo == null) {
+        Logging.w(TAG, "Couldn't retrieve information from network " + network.toString());
+        return new NetworkState(false, -1, -1, -1, -1);
+      }
+      // The general logic of handling a VPN in this method is as follows. getNetworkInfo will
+      // return the info of the network with the same id as in |network| when it is registered via
+      // ConnectivityManager.registerNetworkAgent in Android. |networkInfo| may or may not indicate
+      // the type TYPE_VPN if |network| is a VPN. To reliably detect the VPN interface, we need to
+      // query the network capability as below in the case when networkInfo.getType() is not
+      // TYPE_VPN. On the other hand when networkInfo.getType() is TYPE_VPN, the only solution so
+      // far to obtain the underlying network information is to query the active network interface.
+      // However, the active network interface may not be used for the VPN, for example, if the VPN
+      // is restricted to WiFi by the implementation but the WiFi interface is currently turned
+      // off and the active interface is the Cell. Using directly the result from
+      // getActiveNetworkInfo may thus give the wrong interface information, and one should note
+      // that getActiveNetworkInfo would return the default network interface if the VPN does not
+      // specify its underlying networks in the implementation. Therefore, we need further compare
+      // |network| to the active network. If they are not the same network, we will have to fall
+      // back to report an unknown network.
+
+      if (networkInfo.getType() != ConnectivityManager.TYPE_VPN) {
+        // Note that getNetworkCapabilities returns null if the network is unknown.
+        NetworkCapabilities networkCapabilities =
+            connectivityManager.getNetworkCapabilities(network);
+        if (networkCapabilities == null
+            || !networkCapabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN)) {
+          return getNetworkState(networkInfo);
+        }
+        // When |network| is in fact a VPN after querying its capability but |networkInfo| is not of
+        // type TYPE_VPN, |networkInfo| contains the info for the underlying network, and we return
+        // a NetworkState constructed from it.
+        return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1,
+            networkInfo.getType(), networkInfo.getSubtype());
+      }
+
+      // When |networkInfo| is of type TYPE_VPN, which implies |network| is a VPN, we return the
+      // NetworkState of the active network via getActiveNetworkInfo(), if |network| is the active
+      // network that supports the VPN. Otherwise, NetworkState of an unknown network with type -1
+      // will be returned.
+      //
+      // Note that getActiveNetwork and getActiveNetworkInfo return null if no default network is
+      // currently active.
+      if (networkInfo.getType() == ConnectivityManager.TYPE_VPN) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
+            && network.equals(connectivityManager.getActiveNetwork())) {
+          // If a VPN network is in place, we can find the underlying network type via querying the
+          // active network info thanks to
+          // https://android.googlesource.com/platform/frameworks/base/+/d6a7980d
+          NetworkInfo underlyingActiveNetworkInfo = connectivityManager.getActiveNetworkInfo();
+          // We use the NetworkInfo of the underlying network if it is not of TYPE_VPN itself.
+          if (underlyingActiveNetworkInfo != null
+              && underlyingActiveNetworkInfo.getType() != ConnectivityManager.TYPE_VPN) {
+            return new NetworkState(networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1,
+                underlyingActiveNetworkInfo.getType(), underlyingActiveNetworkInfo.getSubtype());
+          }
+        }
+        return new NetworkState(
+            networkInfo.isConnected(), ConnectivityManager.TYPE_VPN, -1, -1, -1);
+      }
+
+      return getNetworkState(networkInfo);
+    }
+
+    /**
+     * Returns connection type and status information gleaned from networkInfo. Note that to obtain
+     * the complete information about a VPN including the type of the underlying network, one should
+     * use the above method getNetworkState with a Network object.
+     */
+    private NetworkState getNetworkState(  NetworkInfo networkInfo) {
+      if (networkInfo == null || !networkInfo.isConnected()) {
+        return new NetworkState(false, -1, -1, -1, -1);
+      }
+      return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype(), -1, -1);
+    }
+
+    /**
+     * Returns all connected networks.
+     * Only callable on Lollipop and newer releases.
+     */
+    @SuppressLint("NewApi")
+    Network[] getAllNetworks() {
+      if (connectivityManager == null) {
+        return new Network[0];
+      }
+      return connectivityManager.getAllNetworks();
+    }
+
+
+    List<NetworkInformation> getActiveNetworkList() {
+      if (!supportNetworkCallback()) {
+        return null;
+      }
+      ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
+      for (Network network : getAllNetworks()) {
+        NetworkInformation info = networkToInfo(network);
+        if (info != null) {
+          netInfoList.add(info);
+        }
+      }
+      return netInfoList;
+    }
+
+    /**
+     * Returns the NetID of the current default network. Returns
+     * INVALID_NET_ID if no current default network connected.
+     * Only callable on Lollipop and newer releases.
+     */
+    @SuppressLint("NewApi")
+    long getDefaultNetId() {
+      if (!supportNetworkCallback()) {
+        return INVALID_NET_ID;
+      }
+      // Android Lollipop had no API to get the default network; only an
+      // API to return the NetworkInfo for the default network. To
+      // determine the default network one can find the network with
+      // type matching that of the default network.
+      final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
+      if (defaultNetworkInfo == null) {
+        return INVALID_NET_ID;
+      }
+      final Network[] networks = getAllNetworks();
+      long defaultNetId = INVALID_NET_ID;
+      for (Network network : networks) {
+        if (!hasInternetCapability(network)) {
+          continue;
+        }
+        final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+        if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
+          // There should not be multiple connected networks of the
+          // same type. At least as of Android Marshmallow this is
+          // not supported. If this becomes supported this assertion
+          // may trigger. At that point we could consider using
+          // ConnectivityManager.getDefaultNetwork() though this
+          // may give confusing results with VPNs and is only
+          // available with Android Marshmallow.
+          if (defaultNetId != INVALID_NET_ID) {
+            throw new RuntimeException(
+                "Multiple connected networks of same type are not supported.");
+          }
+          defaultNetId = networkToNetId(network);
+        }
+      }
+      return defaultNetId;
+    }
+
+    @SuppressLint("NewApi")
+    private   NetworkInformation networkToInfo(  Network network) {
+      if (network == null || connectivityManager == null) {
+        return null;
+      }
+      LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
+      // getLinkProperties will return null if the network is unknown.
+      if (linkProperties == null) {
+        Logging.w(TAG, "Detected unknown network: " + network.toString());
+        return null;
+      }
+      if (linkProperties.getInterfaceName() == null) {
+        Logging.w(TAG, "Null interface name for network " + network.toString());
+        return null;
+      }
+
+      NetworkState networkState = getNetworkState(network);
+      ConnectionType connectionType = getConnectionType(networkState);
+      if (connectionType == ConnectionType.CONNECTION_NONE) {
+        // This may not be an error. The OS may signal a network event with connection type
+        // NONE when the network disconnects.
+        Logging.d(TAG, "Network " + network.toString() + " is disconnected");
+        return null;
+      }
+
+      // Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
+      // which appears to be usable. Just log them here.
+      if (connectionType == ConnectionType.CONNECTION_UNKNOWN
+          || connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
+        Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+                + " because it has type " + networkState.getNetworkType() + " and subtype "
+                + networkState.getNetworkSubType());
+      }
+      // ConnectionType.CONNECTION_UNKNOWN if the network is not a VPN or the underlying network is
+      // unknown.
+      ConnectionType underlyingConnectionTypeForVpn =
+          getUnderlyingConnectionTypeForVpn(networkState);
+
+      NetworkInformation networkInformation = new NetworkInformation(
+          linkProperties.getInterfaceName(), connectionType, underlyingConnectionTypeForVpn,
+          networkToNetId(network), getIPAddresses(linkProperties));
+      return networkInformation;
+    }
+
+    /**
+     * Returns true if {@code network} can provide Internet access. Can be used to
+     * ignore specialized networks (e.g. IMS, FOTA).
+     */
+    @SuppressLint("NewApi")
+    boolean hasInternetCapability(Network network) {
+      if (connectivityManager == null) {
+        return false;
+      }
+      final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
+      return capabilities != null
+          && capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
+    }
+
+    /** Only callable on Lollipop and newer releases. */
+    @SuppressLint("NewApi")
+    public void registerNetworkCallback(NetworkCallback networkCallback) {
+      connectivityManager.registerNetworkCallback(
+          new NetworkRequest.Builder()
+              .addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+              .build(),
+          networkCallback);
+    }
+
+    /** Only callable on Lollipop and newer releases. */
+    @SuppressLint("NewApi")
+    public void requestMobileNetwork(NetworkCallback networkCallback) {
+      NetworkRequest.Builder builder = new NetworkRequest.Builder();
+      builder.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
+          .addTransportType(NetworkCapabilities.TRANSPORT_CELLULAR);
+      connectivityManager.requestNetwork(builder.build(), networkCallback);
+    }
+
+    @SuppressLint("NewApi")
+    IPAddress[] getIPAddresses(LinkProperties linkProperties) {
+      IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
+      int i = 0;
+      for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
+        ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
+        ++i;
+      }
+      return ipAddresses;
+    }
+
+    @SuppressLint("NewApi")
+    public void releaseCallback(NetworkCallback networkCallback) {
+      if (supportNetworkCallback()) {
+        Logging.d(TAG, "Unregister network callback");
+        connectivityManager.unregisterNetworkCallback(networkCallback);
+      }
+    }
+
+    public boolean supportNetworkCallback() {
+      return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
+    }
+  }
+
+  /** Queries the WifiManager for SSID of the current Wifi connection. */
+  static class WifiManagerDelegate {
+      private final Context context;
+    WifiManagerDelegate(Context context) {
+      this.context = context;
+    }
+
+    // For testing.
+    WifiManagerDelegate() {
+      // All the methods below should be overridden.
+      context = null;
+    }
+
+    String getWifiSSID() {
+      final Intent intent = context.registerReceiver(
+          null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+      if (intent != null) {
+        final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
+        if (wifiInfo != null) {
+          final String ssid = wifiInfo.getSSID();
+          if (ssid != null) {
+            return ssid;
+          }
+        }
+      }
+      return "";
+    }
+  }
+
+  /** Maintains the information about wifi direct (aka WifiP2p) networks. */
+  static class WifiDirectManagerDelegate extends BroadcastReceiver {
+    // Network "handle" for the Wifi P2p network. We have to bind to the default network id
+    // (NETWORK_UNSPECIFIED) for these addresses.
+    private static final int WIFI_P2P_NETWORK_HANDLE = 0;
+    private final Context context;
+    private final Observer observer;
+    // Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
+    // connected.
+      private NetworkInformation wifiP2pNetworkInfo;
+
+    WifiDirectManagerDelegate(Observer observer, Context context) {
+      this.context = context;
+      this.observer = observer;
+      IntentFilter intentFilter = new IntentFilter();
+      intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION);
+      intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION);
+      context.registerReceiver(this, intentFilter);
+    }
+
+    // BroadcastReceiver
+    @Override
+    @SuppressLint("InlinedApi")
+    public void onReceive(Context context, Intent intent) {
+      if (WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION.equals(intent.getAction())) {
+        WifiP2pGroup wifiP2pGroup = intent.getParcelableExtra(WifiP2pManager.EXTRA_WIFI_P2P_GROUP);
+        onWifiP2pGroupChange(wifiP2pGroup);
+      } else if (WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION.equals(intent.getAction())) {
+        int state = intent.getIntExtra(WifiP2pManager.EXTRA_WIFI_STATE, 0 /* default to unknown */);
+        onWifiP2pStateChange(state);
+      }
+    }
+
+    /** Releases the broadcast receiver. */
+    public void release() {
+      context.unregisterReceiver(this);
+    }
+
+    public List<NetworkInformation> getActiveNetworkList() {
+      if (wifiP2pNetworkInfo != null) {
+        return Collections.singletonList(wifiP2pNetworkInfo);
+      }
+
+      return Collections.emptyList();
+    }
+
+    /** Handle a change notification about the wifi p2p group. */
+    private void onWifiP2pGroupChange(  WifiP2pGroup wifiP2pGroup) {
+      if (wifiP2pGroup == null || wifiP2pGroup.getInterface() == null) {
+        return;
+      }
+
+      NetworkInterface wifiP2pInterface;
+      try {
+        wifiP2pInterface = NetworkInterface.getByName(wifiP2pGroup.getInterface());
+      } catch (SocketException e) {
+        Logging.e(TAG, "Unable to get WifiP2p network interface", e);
+        return;
+      }
+
+      List<InetAddress> interfaceAddresses = Collections.list(wifiP2pInterface.getInetAddresses());
+      IPAddress[] ipAddresses = new IPAddress[interfaceAddresses.size()];
+      for (int i = 0; i < interfaceAddresses.size(); ++i) {
+        ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress());
+      }
+
+      wifiP2pNetworkInfo =
+          new NetworkInformation(wifiP2pGroup.getInterface(), ConnectionType.CONNECTION_WIFI,
+              ConnectionType.CONNECTION_NONE, WIFI_P2P_NETWORK_HANDLE, ipAddresses);
+      observer.onNetworkConnect(wifiP2pNetworkInfo);
+    }
+
+    /** Handle a state change notification about wifi p2p. */
+    private void onWifiP2pStateChange(int state) {
+      if (state == WifiP2pManager.WIFI_P2P_STATE_DISABLED) {
+        wifiP2pNetworkInfo = null;
+        observer.onNetworkDisconnect(WIFI_P2P_NETWORK_HANDLE);
+      }
+    }
+  }
+
+  static final long INVALID_NET_ID = -1;
+  private static final String TAG = "NetworkMonitorAutoDetect";
+
+  // Observer for the connection type change.
+  private final Observer observer;
+  private final IntentFilter intentFilter;
+  private final Context context;
+  // Used to request mobile network. It does not do anything except for keeping
+  // the callback for releasing the request.
+    private final NetworkCallback mobileNetworkCallback;
+  // Used to receive updates on all networks.
+    private final NetworkCallback allNetworkCallback;
+  // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
+  private ConnectivityManagerDelegate connectivityManagerDelegate;
+  private WifiManagerDelegate wifiManagerDelegate;
+  private WifiDirectManagerDelegate wifiDirectManagerDelegate;
+
+  private boolean isRegistered;
+  private ConnectionType connectionType;
+  private String wifiSSID;
+
+  /**
+   * Observer interface by which observer is notified of network changes.
+   */
+  public static interface Observer {
+    /**
+     * Called when default network changes.
+     */
+    public void onConnectionTypeChanged(ConnectionType newConnectionType);
+    public void onNetworkConnect(NetworkInformation networkInfo);
+    public void onNetworkDisconnect(long networkHandle);
+  }
+
+  /**
+   * Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
+   */
+  @SuppressLint("NewApi")
+  public NetworkMonitorAutoDetect(Observer observer, Context context) {
+    this.observer = observer;
+    this.context = context;
+    connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
+    wifiManagerDelegate = new WifiManagerDelegate(context);
+
+    final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
+    connectionType = getConnectionType(networkState);
+    wifiSSID = getWifiSSID(networkState);
+    intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
+
+    if (PeerConnectionFactory.fieldTrialsFindFullName("IncludeWifiDirect").equals("Enabled")) {
+      wifiDirectManagerDelegate = new WifiDirectManagerDelegate(observer, context);
+    }
+
+    registerReceiver();
+    if (connectivityManagerDelegate.supportNetworkCallback()) {
+      // On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
+      // requestNetwork, so it will fail. This was fixed in Android 6.0.1.
+      NetworkCallback tempNetworkCallback = new NetworkCallback();
+      try {
+        connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
+      } catch (java.lang.SecurityException e) {
+        Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
+        tempNetworkCallback = null;
+      }
+      mobileNetworkCallback = tempNetworkCallback;
+      allNetworkCallback = new SimpleNetworkCallback();
+      connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
+    } else {
+      mobileNetworkCallback = null;
+      allNetworkCallback = null;
+    }
+  }
+
+  public boolean supportNetworkCallback() {
+    return connectivityManagerDelegate.supportNetworkCallback();
+  }
+
+  /**
+   * Allows overriding the ConnectivityManagerDelegate for tests.
+   */
+  void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
+    connectivityManagerDelegate = delegate;
+  }
+
+  /**
+   * Allows overriding the WifiManagerDelegate for tests.
+   */
+  void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
+    wifiManagerDelegate = delegate;
+  }
+
+  /**
+   * Returns whether the object has registered to receive network connectivity intents.
+   * Visible for testing.
+   */
+  boolean isReceiverRegisteredForTesting() {
+    return isRegistered;
+  }
+
+
+  List<NetworkInformation> getActiveNetworkList() {
+    List<NetworkInformation> connectivityManagerList =
+        connectivityManagerDelegate.getActiveNetworkList();
+    if (connectivityManagerList == null) {
+      return null;
+    }
+    ArrayList<NetworkInformation> result =
+        new ArrayList<NetworkInformation>(connectivityManagerList);
+    if (wifiDirectManagerDelegate != null) {
+      result.addAll(wifiDirectManagerDelegate.getActiveNetworkList());
+    }
+    return result;
+  }
+
+  public void destroy() {
+    if (allNetworkCallback != null) {
+      connectivityManagerDelegate.releaseCallback(allNetworkCallback);
+    }
+    if (mobileNetworkCallback != null) {
+      connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
+    }
+    if (wifiDirectManagerDelegate != null) {
+      wifiDirectManagerDelegate.release();
+    }
+    unregisterReceiver();
+  }
+
+  /**
+   * Registers a BroadcastReceiver in the given context.
+   */
+  private void registerReceiver() {
+    if (isRegistered)
+      return;
+
+    isRegistered = true;
+    context.registerReceiver(this, intentFilter);
+  }
+
+  /**
+   * Unregisters the BroadcastReceiver in the given context.
+   */
+  private void unregisterReceiver() {
+    if (!isRegistered)
+      return;
+
+    isRegistered = false;
+    context.unregisterReceiver(this);
+  }
+
+  public NetworkState getCurrentNetworkState() {
+    return connectivityManagerDelegate.getNetworkState();
+  }
+
+  /**
+   * Returns NetID of device's current default connected network used for
+   * communication.
+   * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
+   * when not implemented.
+   */
+  public long getDefaultNetId() {
+    return connectivityManagerDelegate.getDefaultNetId();
+  }
+
+  private static ConnectionType getConnectionType(
+      boolean isConnected, int networkType, int networkSubtype) {
+    if (!isConnected) {
+      return ConnectionType.CONNECTION_NONE;
+    }
+
+    switch (networkType) {
+      case ConnectivityManager.TYPE_ETHERNET:
+        return ConnectionType.CONNECTION_ETHERNET;
+      case ConnectivityManager.TYPE_WIFI:
+        return ConnectionType.CONNECTION_WIFI;
+      case ConnectivityManager.TYPE_WIMAX:
+        return ConnectionType.CONNECTION_4G;
+      case ConnectivityManager.TYPE_BLUETOOTH:
+        return ConnectionType.CONNECTION_BLUETOOTH;
+      case ConnectivityManager.TYPE_MOBILE:
+        // Use information from TelephonyManager to classify the connection.
+        switch (networkSubtype) {
+          case TelephonyManager.NETWORK_TYPE_GPRS:
+          case TelephonyManager.NETWORK_TYPE_EDGE:
+          case TelephonyManager.NETWORK_TYPE_CDMA:
+          case TelephonyManager.NETWORK_TYPE_1xRTT:
+          case TelephonyManager.NETWORK_TYPE_IDEN:
+            return ConnectionType.CONNECTION_2G;
+          case TelephonyManager.NETWORK_TYPE_UMTS:
+          case TelephonyManager.NETWORK_TYPE_EVDO_0:
+          case TelephonyManager.NETWORK_TYPE_EVDO_A:
+          case TelephonyManager.NETWORK_TYPE_HSDPA:
+          case TelephonyManager.NETWORK_TYPE_HSUPA:
+          case TelephonyManager.NETWORK_TYPE_HSPA:
+          case TelephonyManager.NETWORK_TYPE_EVDO_B:
+          case TelephonyManager.NETWORK_TYPE_EHRPD:
+          case TelephonyManager.NETWORK_TYPE_HSPAP:
+            return ConnectionType.CONNECTION_3G;
+          case TelephonyManager.NETWORK_TYPE_LTE:
+            return ConnectionType.CONNECTION_4G;
+          default:
+            return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
+        }
+      case ConnectivityManager.TYPE_VPN:
+        return ConnectionType.CONNECTION_VPN;
+      default:
+        return ConnectionType.CONNECTION_UNKNOWN;
+    }
+  }
+
+  public static ConnectionType getConnectionType(NetworkState networkState) {
+    return getConnectionType(networkState.isConnected(), networkState.getNetworkType(),
+        networkState.getNetworkSubType());
+  }
+
+  private static ConnectionType getUnderlyingConnectionTypeForVpn(NetworkState networkState) {
+    if (networkState.getNetworkType() != ConnectivityManager.TYPE_VPN) {
+      return ConnectionType.CONNECTION_NONE;
+    }
+    return getConnectionType(networkState.isConnected(),
+        networkState.getUnderlyingNetworkTypeForVpn(),
+        networkState.getUnderlyingNetworkSubtypeForVpn());
+  }
+
+  private String getWifiSSID(NetworkState networkState) {
+    if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
+      return "";
+    return wifiManagerDelegate.getWifiSSID();
+  }
+
+  // BroadcastReceiver
+  @Override
+  public void onReceive(Context context, Intent intent) {
+    final NetworkState networkState = getCurrentNetworkState();
+    if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
+      connectionTypeChanged(networkState);
+    }
+  }
+
+  private void connectionTypeChanged(NetworkState networkState) {
+    ConnectionType newConnectionType = getConnectionType(networkState);
+    String newWifiSSID = getWifiSSID(networkState);
+    if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
+      return;
+
+    connectionType = newConnectionType;
+    wifiSSID = newWifiSSID;
+    Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
+    observer.onConnectionTypeChanged(newConnectionType);
+  }
+
+  /**
+   * Extracts NetID of network on Lollipop and NetworkHandle (which is mungled
+   * NetID) on Marshmallow and newer releases. Only available on Lollipop and
+   * newer releases. Returns long since getNetworkHandle returns long.
+   */
+  @SuppressLint("NewApi")
+  private static long networkToNetId(Network network) {
+    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+      return network.getNetworkHandle();
+    }
+
+    // NOTE(honghaiz): This depends on Android framework implementation details.
+    // These details cannot change because Lollipop has been released.
+    return Integer.parseInt(network.toString());
+  }
+}

+ 3 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/OWNERS

@@ -0,0 +1,3 @@
+per-file Camera*=sakal@webrtc.org
+per-file Histogram.java=sakal@webrtc.org
+per-file Metrics.java=sakal@webrtc.org

Fichier diff supprimé car celui-ci est trop grand
+ 1244 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/PeerConnection.java


+ 65 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/PeerConnectionDependencies.java

@@ -0,0 +1,65 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+
+/**
+ * PeerConnectionDependencies holds all PeerConnection dependencies that are
+ * applied per PeerConnection. A dependency is distinct from a configuration
+ * as it defines significant executable code that can be provided by a user of
+ * the API.
+ */
+public final class PeerConnectionDependencies {
+  // Mandatory dependencies.
+  private final PeerConnection.Observer observer;
+
+  // Optional fields.
+  private final SSLCertificateVerifier sslCertificateVerifier;
+
+  public static class Builder {
+    private PeerConnection.Observer observer;
+    private SSLCertificateVerifier sslCertificateVerifier;
+
+    private Builder(PeerConnection.Observer observer) {
+      this.observer = observer;
+    }
+
+    public Builder setSSLCertificateVerifier(SSLCertificateVerifier sslCertificateVerifier) {
+      this.sslCertificateVerifier = sslCertificateVerifier;
+      return this;
+    }
+
+    // Observer is a required dependency and so is forced in the construction of the object.
+    public PeerConnectionDependencies createPeerConnectionDependencies() {
+      return new PeerConnectionDependencies(observer, sslCertificateVerifier);
+    }
+  }
+
+  public static Builder builder(PeerConnection.Observer observer) {
+    return new Builder(observer);
+  }
+
+  PeerConnection.Observer getObserver() {
+    return observer;
+  }
+
+
+  SSLCertificateVerifier getSSLCertificateVerifier() {
+    return sslCertificateVerifier;
+  }
+
+  private PeerConnectionDependencies(
+      PeerConnection.Observer observer, SSLCertificateVerifier sslCertificateVerifier) {
+    this.observer = observer;
+    this.sslCertificateVerifier = sslCertificateVerifier;
+  }
+}

+ 598 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/PeerConnectionFactory.java

@@ -0,0 +1,598 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.Process;
+
+import java.util.List;
+import org.webrtc.Logging.Severity;
+import org.webrtc.audio.AudioDeviceModule;
+import org.webrtc.audio.JavaAudioDeviceModule;
+
+/**
+ * Java wrapper for a C++ PeerConnectionFactoryInterface.  Main entry point to
+ * the PeerConnection API for clients.
+ */
+public class PeerConnectionFactory {
+  public static final String TRIAL_ENABLED = "Enabled";
+  @Deprecated public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
+
+  private static final String TAG = "PeerConnectionFactory";
+  private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
+
+  /** Helper class holding both Java and C++ thread info. */
+  private static class ThreadInfo {
+    final Thread thread;
+    final int tid;
+
+    public static ThreadInfo getCurrent() {
+      return new ThreadInfo(Thread.currentThread(), Process.myTid());
+    }
+
+    private ThreadInfo(Thread thread, int tid) {
+      this.thread = thread;
+      this.tid = tid;
+    }
+  }
+
+  private static volatile boolean internalTracerInitialized;
+
+  // Remove these once deprecated static printStackTrace() is gone.
+    private static ThreadInfo staticNetworkThread;
+    private static ThreadInfo staticWorkerThread;
+    private static ThreadInfo staticSignalingThread;
+
+  private long nativeFactory;
+    private volatile ThreadInfo networkThread;
+    private volatile ThreadInfo workerThread;
+    private volatile ThreadInfo signalingThread;
+
+  public static class InitializationOptions {
+    final Context applicationContext;
+    final String fieldTrials;
+    final boolean enableInternalTracer;
+    final NativeLibraryLoader nativeLibraryLoader;
+    final String nativeLibraryName;
+      Loggable loggable;
+      Severity loggableSeverity;
+
+    private InitializationOptions(Context applicationContext, String fieldTrials,
+        boolean enableInternalTracer, NativeLibraryLoader nativeLibraryLoader,
+        String nativeLibraryName,   Loggable loggable,
+          Severity loggableSeverity) {
+      this.applicationContext = applicationContext;
+      this.fieldTrials = fieldTrials;
+      this.enableInternalTracer = enableInternalTracer;
+      this.nativeLibraryLoader = nativeLibraryLoader;
+      this.nativeLibraryName = nativeLibraryName;
+      this.loggable = loggable;
+      this.loggableSeverity = loggableSeverity;
+    }
+
+    public static Builder builder(Context applicationContext) {
+      return new Builder(applicationContext);
+    }
+
+    public static class Builder {
+      private final Context applicationContext;
+      private String fieldTrials = "";
+      private boolean enableInternalTracer;
+      private NativeLibraryLoader nativeLibraryLoader = new NativeLibrary.DefaultLoader();
+      private String nativeLibraryName = "jingle_peerconnection_so";
+        private Loggable loggable;
+        private Severity loggableSeverity;
+
+      Builder(Context applicationContext) {
+        this.applicationContext = applicationContext;
+      }
+
+      public Builder setFieldTrials(String fieldTrials) {
+        this.fieldTrials = fieldTrials;
+        return this;
+      }
+
+      public Builder setEnableInternalTracer(boolean enableInternalTracer) {
+        this.enableInternalTracer = enableInternalTracer;
+        return this;
+      }
+
+      public Builder setNativeLibraryLoader(NativeLibraryLoader nativeLibraryLoader) {
+        this.nativeLibraryLoader = nativeLibraryLoader;
+        return this;
+      }
+
+      public Builder setNativeLibraryName(String nativeLibraryName) {
+        this.nativeLibraryName = nativeLibraryName;
+        return this;
+      }
+
+      public Builder setInjectableLogger(Loggable loggable, Severity severity) {
+        this.loggable = loggable;
+        this.loggableSeverity = severity;
+        return this;
+      }
+
+      public PeerConnectionFactory.InitializationOptions createInitializationOptions() {
+        return new PeerConnectionFactory.InitializationOptions(applicationContext, fieldTrials,
+            enableInternalTracer, nativeLibraryLoader, nativeLibraryName, loggable,
+            loggableSeverity);
+      }
+    }
+  }
+
+  public static class Options {
+    // Keep in sync with webrtc/rtc_base/network.h!
+    //
+    // These bit fields are defined for |networkIgnoreMask| below.
+    static final int ADAPTER_TYPE_UNKNOWN = 0;
+    static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
+    static final int ADAPTER_TYPE_WIFI = 1 << 1;
+    static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
+    static final int ADAPTER_TYPE_VPN = 1 << 3;
+    static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
+    static final int ADAPTER_TYPE_ANY = 1 << 5;
+
+    public int networkIgnoreMask;
+    public boolean disableEncryption;
+    public boolean disableNetworkMonitor;
+
+    @CalledByNative("Options")
+    int getNetworkIgnoreMask() {
+      return networkIgnoreMask;
+    }
+
+    @CalledByNative("Options")
+    boolean getDisableEncryption() {
+      return disableEncryption;
+    }
+
+    @CalledByNative("Options")
+    boolean getDisableNetworkMonitor() {
+      return disableNetworkMonitor;
+    }
+  }
+
+  public static class Builder {
+      private Options options;
+      private AudioDeviceModule audioDeviceModule;
+    private AudioEncoderFactoryFactory audioEncoderFactoryFactory =
+        new BuiltinAudioEncoderFactoryFactory();
+    private AudioDecoderFactoryFactory audioDecoderFactoryFactory =
+        new BuiltinAudioDecoderFactoryFactory();
+      private VideoEncoderFactory videoEncoderFactory;
+      private VideoDecoderFactory videoDecoderFactory;
+      private AudioProcessingFactory audioProcessingFactory;
+      private FecControllerFactoryFactoryInterface fecControllerFactoryFactory;
+      private MediaTransportFactoryFactory mediaTransportFactoryFactory;
+
+    private Builder() {}
+
+    public Builder setOptions(Options options) {
+      this.options = options;
+      return this;
+    }
+
+    public Builder setAudioDeviceModule(AudioDeviceModule audioDeviceModule) {
+      this.audioDeviceModule = audioDeviceModule;
+      return this;
+    }
+
+    public Builder setAudioEncoderFactoryFactory(
+        AudioEncoderFactoryFactory audioEncoderFactoryFactory) {
+      if (audioEncoderFactoryFactory == null) {
+        throw new IllegalArgumentException(
+            "PeerConnectionFactory.Builder does not accept a null AudioEncoderFactoryFactory.");
+      }
+      this.audioEncoderFactoryFactory = audioEncoderFactoryFactory;
+      return this;
+    }
+
+    public Builder setAudioDecoderFactoryFactory(
+        AudioDecoderFactoryFactory audioDecoderFactoryFactory) {
+      if (audioDecoderFactoryFactory == null) {
+        throw new IllegalArgumentException(
+            "PeerConnectionFactory.Builder does not accept a null AudioDecoderFactoryFactory.");
+      }
+      this.audioDecoderFactoryFactory = audioDecoderFactoryFactory;
+      return this;
+    }
+
+    public Builder setVideoEncoderFactory(VideoEncoderFactory videoEncoderFactory) {
+      this.videoEncoderFactory = videoEncoderFactory;
+      return this;
+    }
+
+    public Builder setVideoDecoderFactory(VideoDecoderFactory videoDecoderFactory) {
+      this.videoDecoderFactory = videoDecoderFactory;
+      return this;
+    }
+
+    public Builder setAudioProcessingFactory(AudioProcessingFactory audioProcessingFactory) {
+      if (audioProcessingFactory == null) {
+        throw new NullPointerException(
+            "PeerConnectionFactory builder does not accept a null AudioProcessingFactory.");
+      }
+      this.audioProcessingFactory = audioProcessingFactory;
+      return this;
+    }
+
+    public Builder setFecControllerFactoryFactoryInterface(
+        FecControllerFactoryFactoryInterface fecControllerFactoryFactory) {
+      this.fecControllerFactoryFactory = fecControllerFactoryFactory;
+      return this;
+    }
+
+    /** Sets a MediaTransportFactoryFactory for a PeerConnectionFactory. */
+    public Builder setMediaTransportFactoryFactory(
+        MediaTransportFactoryFactory mediaTransportFactoryFactory) {
+      this.mediaTransportFactoryFactory = mediaTransportFactoryFactory;
+      return this;
+    }
+
+    public PeerConnectionFactory createPeerConnectionFactory() {
+      checkInitializeHasBeenCalled();
+      if (audioDeviceModule == null) {
+        audioDeviceModule = JavaAudioDeviceModule.builder(ContextUtils.getApplicationContext())
+                                .createAudioDeviceModule();
+      }
+      return nativeCreatePeerConnectionFactory(ContextUtils.getApplicationContext(), options,
+          audioDeviceModule.getNativeAudioDeviceModulePointer(),
+          audioEncoderFactoryFactory.createNativeAudioEncoderFactory(),
+          audioDecoderFactoryFactory.createNativeAudioDecoderFactory(), videoEncoderFactory,
+          videoDecoderFactory,
+          audioProcessingFactory == null ? 0 : audioProcessingFactory.createNative(),
+          fecControllerFactoryFactory == null ? 0 : fecControllerFactoryFactory.createNative(),
+          mediaTransportFactoryFactory == null
+              ? 0
+              : mediaTransportFactoryFactory.createNativeMediaTransportFactory());
+    }
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  /**
+   * Loads and initializes WebRTC. This must be called at least once before creating a
+   * PeerConnectionFactory. Replaces all the old initialization methods. Must not be called while
+   * a PeerConnectionFactory is alive.
+   */
+  public static void initialize(InitializationOptions options) {
+    ContextUtils.initialize(options.applicationContext);
+    NativeLibrary.initialize(options.nativeLibraryLoader, options.nativeLibraryName);
+    nativeInitializeAndroidGlobals();
+    nativeInitializeFieldTrials(options.fieldTrials);
+    if (options.enableInternalTracer && !internalTracerInitialized) {
+      initializeInternalTracer();
+    }
+    if (options.loggable != null) {
+      Logging.injectLoggable(options.loggable, options.loggableSeverity);
+      nativeInjectLoggable(new JNILogging(options.loggable), options.loggableSeverity.ordinal());
+    } else {
+      Logging.d(TAG,
+          "PeerConnectionFactory was initialized without an injected Loggable. "
+              + "Any existing Loggable will be deleted.");
+      Logging.deleteInjectedLoggable();
+      nativeDeleteLoggable();
+    }
+  }
+
+  private static void checkInitializeHasBeenCalled() {
+    if (!NativeLibrary.isLoaded() || ContextUtils.getApplicationContext() == null) {
+      throw new IllegalStateException(
+          "PeerConnectionFactory.initialize was not called before creating a "
+          + "PeerConnectionFactory.");
+    }
+  }
+
+  private static void initializeInternalTracer() {
+    internalTracerInitialized = true;
+    nativeInitializeInternalTracer();
+  }
+
+  public static void shutdownInternalTracer() {
+    internalTracerInitialized = false;
+    nativeShutdownInternalTracer();
+  }
+
+  // Field trial initialization. Must be called before PeerConnectionFactory
+  // is created.
+  // Deprecated, use PeerConnectionFactory.initialize instead.
+  @Deprecated
+  public static void initializeFieldTrials(String fieldTrialsInitString) {
+    nativeInitializeFieldTrials(fieldTrialsInitString);
+  }
+
+  // Wrapper of webrtc::field_trial::FindFullName. Develop the feature with default behaviour off.
+  // Example usage:
+  // if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTCExperiment").equals("Enabled")) {
+  //   method1();
+  // } else {
+  //   method2();
+  // }
+  public static String fieldTrialsFindFullName(String name) {
+    return NativeLibrary.isLoaded() ? nativeFindFieldTrialsFullName(name) : "";
+  }
+  // Start/stop internal capturing of internal tracing.
+  public static boolean startInternalTracingCapture(String tracingFilename) {
+    return nativeStartInternalTracingCapture(tracingFilename);
+  }
+
+  public static void stopInternalTracingCapture() {
+    nativeStopInternalTracingCapture();
+  }
+
+  @CalledByNative
+  PeerConnectionFactory(long nativeFactory) {
+    checkInitializeHasBeenCalled();
+    if (nativeFactory == 0) {
+      throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
+    }
+    this.nativeFactory = nativeFactory;
+  }
+
+  /**
+   * Internal helper function to pass the parameters down into the native JNI bridge.
+   */
+
+  PeerConnection createPeerConnectionInternal(PeerConnection.RTCConfiguration rtcConfig,
+      MediaConstraints constraints, PeerConnection.Observer observer,
+      SSLCertificateVerifier sslCertificateVerifier) {
+    checkPeerConnectionFactoryExists();
+    long nativeObserver = PeerConnection.createNativePeerConnectionObserver(observer);
+    if (nativeObserver == 0) {
+      return null;
+    }
+    long nativePeerConnection = nativeCreatePeerConnection(
+        nativeFactory, rtcConfig, constraints, nativeObserver, sslCertificateVerifier);
+    if (nativePeerConnection == 0) {
+      return null;
+    }
+    return new PeerConnection(nativePeerConnection);
+  }
+
+  /**
+   * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct
+   * instead and use the method without constraints in the signature.
+   */
+
+  @Deprecated
+  public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
+      MediaConstraints constraints, PeerConnection.Observer observer) {
+    return createPeerConnectionInternal(
+        rtcConfig, constraints, observer, /* sslCertificateVerifier= */ null);
+  }
+
+  /**
+   * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct
+   * instead and use the method without constraints in the signature.
+   */
+
+  @Deprecated
+  public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
+      MediaConstraints constraints, PeerConnection.Observer observer) {
+    PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+    return createPeerConnection(rtcConfig, constraints, observer);
+  }
+
+
+  public PeerConnection createPeerConnection(
+      List<PeerConnection.IceServer> iceServers, PeerConnection.Observer observer) {
+    PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
+    return createPeerConnection(rtcConfig, observer);
+  }
+
+
+  public PeerConnection createPeerConnection(
+      PeerConnection.RTCConfiguration rtcConfig, PeerConnection.Observer observer) {
+    return createPeerConnection(rtcConfig, null /* constraints */, observer);
+  }
+
+
+  public PeerConnection createPeerConnection(
+      PeerConnection.RTCConfiguration rtcConfig, PeerConnectionDependencies dependencies) {
+    return createPeerConnectionInternal(rtcConfig, null /* constraints */,
+        dependencies.getObserver(), dependencies.getSSLCertificateVerifier());
+  }
+
+  public MediaStream createLocalMediaStream(String label) {
+    checkPeerConnectionFactoryExists();
+    return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
+  }
+
+  /**
+   * Create video source with given parameters. If alignTimestamps is false, the caller is
+   * responsible for aligning the frame timestamps to rtc::TimeNanos(). This can be used to achieve
+   * higher accuracy if there is a big delay between frame creation and frames being delivered to
+   * the returned video source. If alignTimestamps is true, timestamps will be aligned to
+   * rtc::TimeNanos() when they arrive to the returned video source.
+   */
+  public VideoSource createVideoSource(boolean isScreencast, boolean alignTimestamps) {
+    checkPeerConnectionFactoryExists();
+    return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast, alignTimestamps));
+  }
+
+  /**
+   * Same as above with alignTimestamps set to true.
+   *
+   * @see #createVideoSource(boolean, boolean)
+   */
+  public VideoSource createVideoSource(boolean isScreencast) {
+    return createVideoSource(isScreencast, /* alignTimestamps= */ true);
+  }
+
+  public VideoTrack createVideoTrack(String id, VideoSource source) {
+    checkPeerConnectionFactoryExists();
+    return new VideoTrack(
+        nativeCreateVideoTrack(nativeFactory, id, source.getNativeVideoTrackSource()));
+  }
+
+  public AudioSource createAudioSource(MediaConstraints constraints) {
+    checkPeerConnectionFactoryExists();
+    return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
+  }
+
+  public AudioTrack createAudioTrack(String id, AudioSource source) {
+    checkPeerConnectionFactoryExists();
+    return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.getNativeAudioSource()));
+  }
+
+  // Starts recording an AEC dump. Ownership of the file is transfered to the
+  // native code. If an AEC dump is already in progress, it will be stopped and
+  // a new one will start using the provided file.
+  public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
+    checkPeerConnectionFactoryExists();
+    return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
+  }
+
+  // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+  // this call will have no effect.
+  public void stopAecDump() {
+    checkPeerConnectionFactoryExists();
+    nativeStopAecDump(nativeFactory);
+  }
+
+  public void dispose() {
+    checkPeerConnectionFactoryExists();
+    nativeFreeFactory(nativeFactory);
+    networkThread = null;
+    workerThread = null;
+    signalingThread = null;
+    MediaCodecVideoEncoder.disposeEglContext();
+    MediaCodecVideoDecoder.disposeEglContext();
+    nativeFactory = 0;
+  }
+
+  /** Returns a pointer to the native webrtc::PeerConnectionFactoryInterface. */
+  public long getNativePeerConnectionFactory() {
+    checkPeerConnectionFactoryExists();
+    return nativeGetNativePeerConnectionFactory(nativeFactory);
+  }
+
+  /** Returns a pointer to the native OwnedFactoryAndThreads object */
+  public long getNativeOwnedFactoryAndThreads() {
+    checkPeerConnectionFactoryExists();
+    return nativeFactory;
+  }
+
+  private void checkPeerConnectionFactoryExists() {
+    if (nativeFactory == 0) {
+      throw new IllegalStateException("PeerConnectionFactory has been disposed.");
+    }
+  }
+
+  private static void printStackTrace(
+        ThreadInfo threadInfo, boolean printNativeStackTrace) {
+    if (threadInfo == null) {
+      // Thread callbacks have not been completed yet, ignore call.
+      return;
+    }
+    final String threadName = threadInfo.thread.getName();
+    StackTraceElement[] stackTraces = threadInfo.thread.getStackTrace();
+    if (stackTraces.length > 0) {
+      Logging.w(TAG, threadName + " stacktrace:");
+      for (StackTraceElement stackTrace : stackTraces) {
+        Logging.w(TAG, stackTrace.toString());
+      }
+    }
+    if (printNativeStackTrace) {
+      // Imitate output from debuggerd/tombstone so that stack trace can easily be symbolized with
+      // ndk-stack.
+      Logging.w(TAG, "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***");
+      Logging.w(TAG,
+          "pid: " + Process.myPid() + ", tid: " + threadInfo.tid + ", name: " + threadName
+              + "  >>> WebRTC <<<");
+      nativePrintStackTrace(threadInfo.tid);
+    }
+  }
+
+  /** Deprecated, use non-static version instead. */
+  @Deprecated
+  public static void printStackTraces() {
+    printStackTrace(staticNetworkThread, /* printNativeStackTrace= */ false);
+    printStackTrace(staticWorkerThread, /* printNativeStackTrace= */ false);
+    printStackTrace(staticSignalingThread, /* printNativeStackTrace= */ false);
+  }
+
+  /**
+   * Print the Java stack traces for the critical threads used by PeerConnectionFactory, namely;
+   * signaling thread, worker thread, and network thread. If printNativeStackTraces is true, also
+   * attempt to print the C++ stack traces for these (and some other) threads.
+   */
+  public void printInternalStackTraces(boolean printNativeStackTraces) {
+    printStackTrace(signalingThread, printNativeStackTraces);
+    printStackTrace(workerThread, printNativeStackTraces);
+    printStackTrace(networkThread, printNativeStackTraces);
+    if (printNativeStackTraces) {
+      nativePrintStackTracesOfRegisteredThreads();
+    }
+  }
+
+  @CalledByNative
+  private void onNetworkThreadReady() {
+    networkThread = ThreadInfo.getCurrent();
+    staticNetworkThread = networkThread;
+    Logging.d(TAG, "onNetworkThreadReady");
+  }
+
+  @CalledByNative
+  private void onWorkerThreadReady() {
+    workerThread = ThreadInfo.getCurrent();
+    staticWorkerThread = workerThread;
+    Logging.d(TAG, "onWorkerThreadReady");
+  }
+
+  @CalledByNative
+  private void onSignalingThreadReady() {
+    signalingThread = ThreadInfo.getCurrent();
+    staticSignalingThread = signalingThread;
+    Logging.d(TAG, "onSignalingThreadReady");
+  }
+
+  // Must be called at least once before creating a PeerConnectionFactory
+  // (for example, at application startup time).
+  private static native void nativeInitializeAndroidGlobals();
+  private static native void nativeInitializeFieldTrials(String fieldTrialsInitString);
+  private static native String nativeFindFieldTrialsFullName(String name);
+  private static native void nativeInitializeInternalTracer();
+  // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+  // PeerConnectionFactory is gone to prevent races with code performing tracing.
+  private static native void nativeShutdownInternalTracer();
+  private static native boolean nativeStartInternalTracingCapture(String tracingFilename);
+  private static native void nativeStopInternalTracingCapture();
+
+  private static native PeerConnectionFactory nativeCreatePeerConnectionFactory(Context context,
+      Options options, long nativeAudioDeviceModule, long audioEncoderFactory,
+      long audioDecoderFactory, VideoEncoderFactory encoderFactory,
+      VideoDecoderFactory decoderFactory, long nativeAudioProcessor,
+      long nativeFecControllerFactory, long mediaTransportFactory);
+
+  private static native long nativeCreatePeerConnection(long factory,
+      PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver,
+      SSLCertificateVerifier sslCertificateVerifier);
+  private static native long nativeCreateLocalMediaStream(long factory, String label);
+  private static native long nativeCreateVideoSource(
+      long factory, boolean is_screencast, boolean alignTimestamps);
+  private static native long nativeCreateVideoTrack(
+      long factory, String id, long nativeVideoSource);
+  private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints);
+  private static native long nativeCreateAudioTrack(long factory, String id, long nativeSource);
+  private static native boolean nativeStartAecDump(
+      long factory, int file_descriptor, int filesize_limit_bytes);
+  private static native void nativeStopAecDump(long factory);
+  private static native void nativeFreeFactory(long factory);
+  private static native long nativeGetNativePeerConnectionFactory(long factory);
+  private static native void nativeInjectLoggable(JNILogging jniLogging, int severity);
+  private static native void nativeDeleteLoggable();
+  private static native void nativePrintStackTrace(int tid);
+  private static native void nativePrintStackTracesOfRegisteredThreads();
+}

+ 49 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/PlatformSoftwareVideoDecoderFactory.java

@@ -0,0 +1,49 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodecInfo;
+
+import java.util.Arrays;
+
+/** Factory for Android platform software VideoDecoders. */
+public class PlatformSoftwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
+  /**
+   * Default allowed predicate.
+   */
+  private static final Predicate<MediaCodecInfo> defaultAllowedPredicate =
+      new Predicate<MediaCodecInfo>() {
+        private String[] prefixWhitelist =
+            Arrays.copyOf(MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES,
+                MediaCodecUtils.SOFTWARE_IMPLEMENTATION_PREFIXES.length);
+
+        @Override
+        public boolean test(MediaCodecInfo arg) {
+          final String name = arg.getName();
+          for (String prefix : prefixWhitelist) {
+            if (name.startsWith(prefix)) {
+              return true;
+            }
+          }
+          return false;
+        }
+      };
+
+  /**
+   * Creates a PlatformSoftwareVideoDecoderFactory that supports surface texture rendering.
+   *
+   * @param sharedContext The textures generated will be accessible from this context. May be null,
+   *                      this disables texture support.
+   */
+  public PlatformSoftwareVideoDecoderFactory(  EglBase.Context sharedContext) {
+    super(sharedContext, defaultAllowedPredicate);
+  }
+}

+ 73 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Predicate.java

@@ -0,0 +1,73 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Represents a predicate (boolean-valued function) of one argument.
+ */
+public interface Predicate<T> {
+  /**
+   * Evaluates this predicate on the given argument.
+   *
+   * @param arg the input argument
+   * @return true if the input argument matches the predicate, otherwise false
+   */
+  boolean test(T arg);
+
+  /**
+   * Returns a composed predicate that represents a short-circuiting logical OR of this predicate
+   * and another. When evaluating the composed predicate, if this predicate is true, then the other
+   * predicate is not evaluated.
+   *
+   * @param other a predicate that will be logically-ORed with this predicate
+   * @return a composed predicate that represents the short-circuiting logical OR of this predicate
+   *     and the other predicate
+   */
+  default Predicate<T> or(Predicate<? super T> other) {
+    return new Predicate<T>() {
+      @Override
+      public boolean test(T arg) {
+        return Predicate.this.test(arg) || other.test(arg);
+      }
+    };
+  }
+
+  /**
+   * Returns a composed predicate that represents a short-circuiting logical AND of this predicate
+   * and another.
+   *
+   * @param other a predicate that will be logically-ANDed with this predicate
+   * @return a composed predicate that represents the short-circuiting logical AND of this predicate
+   *     and the other predicate
+   */
+  default Predicate<T> and(Predicate<? super T> other) {
+    return new Predicate<T>() {
+      @Override
+      public boolean test(T arg) {
+        return Predicate.this.test(arg) && other.test(arg);
+      }
+    };
+  }
+
+  /**
+   * Returns a predicate that represents the logical negation of this predicate.
+   *
+   * @return a predicate that represents the logical negation of this predicate
+   */
+  default Predicate<T> negate() {
+    return new Predicate<T>() {
+      @Override
+      public boolean test(T arg) {
+        return !Predicate.this.test(arg);
+      }
+    };
+  }
+}

+ 113 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RTCStats.java

@@ -0,0 +1,113 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+
+/**
+ * Java version of webrtc::RTCStats. Represents an RTCStats object, as
+ * described in https://w3c.github.io/webrtc-stats/. The |id|, |timestampUs|
+ * and |type| accessors have the same meaning for this class as for the
+ * RTCStats dictionary. Each RTCStatsReport produced by getStats contains
+ * multiple RTCStats objects; one for each underlying object (codec, stream,
+ * transport, etc.) that was inspected to produce the stats.
+ */
+public class RTCStats {
+  private final long timestampUs;
+  private final String type;
+  private final String id;
+  private final Map<String, Object> members;
+
+  public RTCStats(long timestampUs, String type, String id, Map<String, Object> members) {
+    this.timestampUs = timestampUs;
+    this.type = type;
+    this.id = id;
+    this.members = members;
+  }
+
+  // Timestamp in microseconds.
+  public double getTimestampUs() {
+    return timestampUs;
+  }
+
+  // Equivalent to RTCStatsType in the stats spec. Indicates the type of the
+  // object that was inspected to produce the stats.
+  public String getType() {
+    return type;
+  }
+
+  // Unique ID representing this stats object. May be referred to by members of
+  // other stats objects.
+  public String getId() {
+    return id;
+  }
+
+  /**
+   * Returns map of member names to values. Returns as an ordered map so that
+   * the stats object can be serialized with a consistent ordering.
+   *
+   * Values will be one of the following objects:
+   * - Boolean
+   * - Integer (for 32-bit signed integers)
+   * - Long (for 32-bit unsigned and 64-bit signed integers)
+   * - BigInteger (for 64-bit unsigned integers)
+   * - Double
+   * - String
+   * - The array form of any of the above (e.g., Integer[])
+   */
+  public Map<String, Object> getMembers() {
+    return members;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("{ timestampUs: ")
+        .append(timestampUs)
+        .append(", type: ")
+        .append(type)
+        .append(", id: ")
+        .append(id);
+    boolean first = true;
+    for (Map.Entry<String, Object> entry : members.entrySet()) {
+      builder.append(", ").append(entry.getKey()).append(": ");
+      appendValue(builder, entry.getValue());
+    }
+    builder.append(" }");
+    return builder.toString();
+  }
+
+  private static void appendValue(StringBuilder builder, Object value) {
+    if (value instanceof Object[]) {
+      Object[] arrayValue = (Object[]) value;
+      builder.append('[');
+      for (int i = 0; i < arrayValue.length; ++i) {
+        if (i != 0) {
+          builder.append(", ");
+        }
+        appendValue(builder, arrayValue[i]);
+      }
+      builder.append(']');
+    } else if (value instanceof String) {
+      // Enclose strings in quotes to make it clear they're strings.
+      builder.append('"').append(value).append('"');
+    } else {
+      builder.append(value);
+    }
+  }
+
+  // TODO(bugs.webrtc.org/8557) Use ctor directly with full Map type.
+  @SuppressWarnings("unchecked")
+  @CalledByNative
+  static RTCStats create(long timestampUs, String type, String id, Map members) {
+    return new RTCStats(timestampUs, type, id, members);
+  }
+}

+ 17 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RTCStatsCollectorCallback.java

@@ -0,0 +1,17 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for receiving stats reports (see webrtc::RTCStatsCollectorCallback). */
+public interface RTCStatsCollectorCallback {
+  /** Called when the stats report is ready. */
+  @CalledByNative public void onStatsDelivered(RTCStatsReport report);
+}

+ 62 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RTCStatsReport.java

@@ -0,0 +1,62 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Map;
+
+/**
+ * Java version of webrtc::RTCStatsReport. Each RTCStatsReport produced by
+ * getStats contains multiple RTCStats objects; one for each underlying object
+ * (codec, stream, transport, etc.) that was inspected to produce the stats.
+ */
+public class RTCStatsReport {
+  private final long timestampUs;
+  private final Map<String, RTCStats> stats;
+
+  public RTCStatsReport(long timestampUs, Map<String, RTCStats> stats) {
+    this.timestampUs = timestampUs;
+    this.stats = stats;
+  }
+
+  // Timestamp in microseconds.
+  public double getTimestampUs() {
+    return timestampUs;
+  }
+
+  // Map of stats object IDs to stats objects. Can be used to easily look up
+  // other stats objects, when they refer to each other by ID.
+  public Map<String, RTCStats> getStatsMap() {
+    return stats;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("{ timestampUs: ").append(timestampUs).append(", stats: [\n");
+    boolean first = true;
+    for (RTCStats stat : stats.values()) {
+      if (!first) {
+        builder.append(",\n");
+      }
+      builder.append(stat);
+      first = false;
+    }
+    builder.append(" ] }");
+    return builder.toString();
+  }
+
+  // TODO(bugs.webrtc.org/8557) Use ctor directly with full Map type.
+  @SuppressWarnings("unchecked")
+  @CalledByNative
+  private static RTCStatsReport create(long timestampUs, Map stats) {
+    return new RTCStatsReport(timestampUs, stats);
+  }
+}

+ 28 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RefCounted.java

@@ -0,0 +1,28 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for ref counted objects in WebRTC. These objects have significant resources that need
+ * to be freed when they are no longer in use. Each objects starts with ref count of one when
+ * created. If a reference is passed as a parameter to a method, the caller has ownesrship of the
+ * object by default - calling release is not necessary unless retain is called.
+ */
+public interface RefCounted {
+  /** Increases ref count by one. */
+  void retain();
+
+  /**
+   * Decreases ref count by one. When the ref count reaches zero, resources related to the object
+   * will be freed.
+   */
+  void release();
+}

+ 249 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RendererCommon.java

@@ -0,0 +1,249 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.Matrix;
+import android.view.View;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+  /** Interface for reporting rendering events. */
+  public static interface RendererEvents {
+    /**
+     * Callback fired once first frame is rendered.
+     */
+    public void onFirstFrameRendered();
+
+    /**
+     * Callback fired when rendered frame resolution or rotation has changed.
+     */
+    public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+  }
+
+  /**
+   * Interface for rendering frames on an EGLSurface with specified viewport location. Rotation,
+   * mirror, and cropping is specified using a 4x4 texture coordinate transform matrix. The frame
+   * input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function
+   * release() must be called manually to free the resources held by this object.
+   */
+  public static interface GlDrawer {
+    /**
+     * Functions for drawing frames with different sources. The rendering surface target is
+     * implied by the current EGL context of the calling thread and requires no explicit argument.
+     * The coordinates specify the viewport location on the surface target.
+     */
+    void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+        int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+    void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
+        int viewportY, int viewportWidth, int viewportHeight);
+    void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+        int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+
+    /**
+     * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+     */
+    void release();
+  }
+
+  /**
+   * Helper class for determining layout size based on layout requirements, scaling type, and video
+   * aspect ratio.
+   */
+  public static class VideoLayoutMeasure {
+    // The scaling type determines how the video will fill the allowed layout area in measure(). It
+    // can be specified separately for the case when video has matched orientation with layout size
+    // and when there is an orientation mismatch.
+    private ScalingType scalingTypeMatchOrientation = ScalingType.SCALE_ASPECT_BALANCED;
+    private ScalingType scalingTypeMismatchOrientation = ScalingType.SCALE_ASPECT_BALANCED;
+
+    public void setScalingType(ScalingType scalingType) {
+      this.scalingTypeMatchOrientation = scalingType;
+      this.scalingTypeMismatchOrientation = scalingType;
+    }
+
+    public void setScalingType(
+        ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
+      this.scalingTypeMatchOrientation = scalingTypeMatchOrientation;
+      this.scalingTypeMismatchOrientation = scalingTypeMismatchOrientation;
+    }
+
+    public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
+      // Calculate max allowed layout size.
+      final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
+      final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
+      if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
+        return new Point(maxWidth, maxHeight);
+      }
+      // Calculate desired display size based on scaling type, video aspect ratio,
+      // and maximum layout size.
+      final float frameAspect = frameWidth / (float) frameHeight;
+      final float displayAspect = maxWidth / (float) maxHeight;
+      final ScalingType scalingType = (frameAspect > 1.0f) == (displayAspect > 1.0f)
+          ? scalingTypeMatchOrientation
+          : scalingTypeMismatchOrientation;
+      final Point layoutSize = getDisplaySize(scalingType, frameAspect, maxWidth, maxHeight);
+
+      // If the measure specification is forcing a specific size - yield.
+      if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
+        layoutSize.x = maxWidth;
+      }
+      if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
+        layoutSize.y = maxHeight;
+      }
+      return layoutSize;
+    }
+  }
+
+  // Types of video scaling:
+  // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+  //    maintaining the aspect ratio (black borders may be displayed).
+  // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+  //    maintaining the aspect ratio. Some portion of the video frame may be
+  //    clipped.
+  // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+  // possible of the view while maintaining aspect ratio, under the constraint that at least
+  // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
+  public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+  // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
+  // This limits excessive cropping when adjusting display size.
+  private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+
+  /**
+   * Returns layout transformation matrix that applies an optional mirror effect and compensates
+   * for video vs display aspect ratio.
+   */
+  public static float[] getLayoutMatrix(
+      boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+    float scaleX = 1;
+    float scaleY = 1;
+    // Scale X or Y dimension so that video and display size have same aspect ratio.
+    if (displayAspectRatio > videoAspectRatio) {
+      scaleY = videoAspectRatio / displayAspectRatio;
+    } else {
+      scaleX = displayAspectRatio / videoAspectRatio;
+    }
+    // Apply optional horizontal flip.
+    if (mirror) {
+      scaleX *= -1;
+    }
+    final float matrix[] = new float[16];
+    Matrix.setIdentityM(matrix, 0);
+    Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+    adjustOrigin(matrix);
+    return matrix;
+  }
+
+  /** Converts a float[16] matrix array to android.graphics.Matrix. */
+  public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
+    // clang-format off
+    float[] values = {
+        matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
+        matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
+        matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
+    };
+    // clang-format on
+
+    android.graphics.Matrix matrix = new android.graphics.Matrix();
+    matrix.setValues(values);
+    return matrix;
+  }
+
+  /** Converts android.graphics.Matrix to a float[16] matrix array. */
+  public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
+    float[] values = new float[9];
+    matrix.getValues(values);
+
+    // The android.graphics.Matrix looks like this:
+    // [x1 y1 w1]
+    // [x2 y2 w2]
+    // [x3 y3 w3]
+    // We want to contruct a matrix that looks like this:
+    // [x1 y1  0 w1]
+    // [x2 y2  0 w2]
+    // [ 0  0  1  0]
+    // [x3 y3  0 w3]
+    // Since it is stored in column-major order, it looks like this:
+    // [x1 x2 0 x3
+    //  y1 y2 0 y3
+    //   0  0 1  0
+    //  w1 w2 0 w3]
+    // clang-format off
+    float[] matrix4x4 = {
+        values[0 * 3 + 0],  values[1 * 3 + 0], 0,  values[2 * 3 + 0],
+        values[0 * 3 + 1],  values[1 * 3 + 1], 0,  values[2 * 3 + 1],
+        0,                  0,                 1,  0,
+        values[0 * 3 + 2],  values[1 * 3 + 2], 0,  values[2 * 3 + 2],
+    };
+    // clang-format on
+    return matrix4x4;
+  }
+
+  /**
+   * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+   */
+  public static Point getDisplaySize(
+      ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+    return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+        maxDisplayWidth, maxDisplayHeight);
+  }
+
+  /**
+   * Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+   * that are in the range 0 to 1.
+   */
+  private static void adjustOrigin(float[] matrix) {
+    // Note that OpenGL is using column-major order.
+    // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+    matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+    matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+    // Post translate with 0.5 to move coordinates to range [0, 1].
+    matrix[12] += 0.5f;
+    matrix[13] += 0.5f;
+  }
+
+  /**
+   * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+   * that must remain visible.
+   */
+  private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+    switch (scalingType) {
+      case SCALE_ASPECT_FIT:
+        return 1.0f;
+      case SCALE_ASPECT_FILL:
+        return 0.0f;
+      case SCALE_ASPECT_BALANCED:
+        return BALANCED_VISIBLE_FRACTION;
+      default:
+        throw new IllegalArgumentException();
+    }
+  }
+
+  /**
+   * Calculate display size based on minimum fraction of the video that must remain visible,
+   * video aspect ratio, and maximum display size.
+   */
+  private static Point getDisplaySize(
+      float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
+    // If there is no constraint on the amount of cropping, fill the allowed display area.
+    if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+      return new Point(maxDisplayWidth, maxDisplayHeight);
+    }
+    // Each dimension is constrained on max display size and how much we are allowed to crop.
+    final int width = Math.min(
+        maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+    final int height = Math.min(
+        maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+    return new Point(width, height);
+  }
+}

+ 75 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtcCertificatePem.java

@@ -0,0 +1,75 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.PeerConnection;
+
+/**
+ * Easily storable/serializable version of a native C++ RTCCertificatePEM.
+ */
+public class RtcCertificatePem {
+  /** PEM string representation of the private key. */
+  public final String privateKey;
+  /** PEM string representation of the certificate. */
+  public final String certificate;
+  /** Default expiration time of 30 days. */
+  private static final long DEFAULT_EXPIRY = 60 * 60 * 24 * 30;
+
+  /** Instantiate an RtcCertificatePem object from stored strings. */
+  @CalledByNative
+  public RtcCertificatePem(String privateKey, String certificate) {
+    this.privateKey = privateKey;
+    this.certificate = certificate;
+  }
+
+  @CalledByNative
+  String getPrivateKey() {
+    return privateKey;
+  }
+
+  @CalledByNative
+  String getCertificate() {
+    return certificate;
+  }
+
+  /**
+   * Generate a new RtcCertificatePem with the default settings of KeyType = ECDSA and
+   * expires = 30 days.
+   */
+  public static RtcCertificatePem generateCertificate() {
+    return nativeGenerateCertificate(PeerConnection.KeyType.ECDSA, DEFAULT_EXPIRY);
+  }
+
+  /**
+   * Generate a new RtcCertificatePem with a custom KeyType and the default setting of
+   * expires = 30 days.
+   */
+  public static RtcCertificatePem generateCertificate(PeerConnection.KeyType keyType) {
+    return nativeGenerateCertificate(keyType, DEFAULT_EXPIRY);
+  }
+
+  /**
+   * Generate a new RtcCertificatePem with a custom expires and the default setting of
+   * KeyType = ECDSA.
+   */
+  public static RtcCertificatePem generateCertificate(long expires) {
+    return nativeGenerateCertificate(PeerConnection.KeyType.ECDSA, expires);
+  }
+
+  /** Generate a new RtcCertificatePem with a custom KeyType and a custom expires. */
+  public static RtcCertificatePem generateCertificate(
+      PeerConnection.KeyType keyType, long expires) {
+    return nativeGenerateCertificate(keyType, expires);
+  }
+
+  private static native RtcCertificatePem nativeGenerateCertificate(
+      PeerConnection.KeyType keyType, long expires);
+}

+ 273 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpParameters.java

@@ -0,0 +1,273 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * The parameters for an {@code RtpSender}, as defined in
+ * http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface.
+ *
+ * Note: These structures use nullable Integer/etc. types because in the
+ * future, they may be used to construct ORTC RtpSender/RtpReceivers, in
+ * which case "null" will be used to represent "choose the implementation
+ * default value".
+ */
+public class RtpParameters {
+  public static class Encoding {
+    // If non-null, this represents the RID that identifies this encoding layer.
+    // RIDs are used to identify layers in simulcast.
+      public String rid;
+    // Set to true to cause this encoding to be sent, and false for it not to
+    // be sent.
+    public boolean active = true;
+    // If non-null, this represents the Transport Independent Application
+    // Specific maximum bandwidth defined in RFC3890. If null, there is no
+    // maximum bitrate.
+      public Integer maxBitrateBps;
+    // The minimum bitrate in bps for video.
+      public Integer minBitrateBps;
+    // The max framerate in fps for video.
+      public Integer maxFramerate;
+    // The number of temporal layers for video.
+      public Integer numTemporalLayers;
+    // If non-null, scale the width and height down by this factor for video. If null,
+    // implementation default scaling factor will be used.
+      public Double scaleResolutionDownBy;
+    // SSRC to be used by this encoding.
+    // Can't be changed between getParameters/setParameters.
+    public Long ssrc;
+
+    // This constructor is useful for creating simulcast layers.
+    Encoding(String rid, boolean active, Double scaleResolutionDownBy) {
+      this.rid = rid;
+      this.active = active;
+      this.scaleResolutionDownBy = scaleResolutionDownBy;
+    }
+
+    @CalledByNative("Encoding")
+    Encoding(String rid, boolean active, Integer maxBitrateBps, Integer minBitrateBps,
+        Integer maxFramerate, Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc) {
+      this.rid = rid;
+      this.active = active;
+      this.maxBitrateBps = maxBitrateBps;
+      this.minBitrateBps = minBitrateBps;
+      this.maxFramerate = maxFramerate;
+      this.numTemporalLayers = numTemporalLayers;
+      this.scaleResolutionDownBy = scaleResolutionDownBy;
+      this.ssrc = ssrc;
+    }
+
+
+    @CalledByNative("Encoding")
+    String getRid() {
+      return rid;
+    }
+
+    @CalledByNative("Encoding")
+    boolean getActive() {
+      return active;
+    }
+
+
+    @CalledByNative("Encoding")
+    Integer getMaxBitrateBps() {
+      return maxBitrateBps;
+    }
+
+
+    @CalledByNative("Encoding")
+    Integer getMinBitrateBps() {
+      return minBitrateBps;
+    }
+
+
+    @CalledByNative("Encoding")
+    Integer getMaxFramerate() {
+      return maxFramerate;
+    }
+
+
+    @CalledByNative("Encoding")
+    Integer getNumTemporalLayers() {
+      return numTemporalLayers;
+    }
+
+
+    @CalledByNative("Encoding")
+    Double getScaleResolutionDownBy() {
+      return scaleResolutionDownBy;
+    }
+
+    @CalledByNative("Encoding")
+    Long getSsrc() {
+      return ssrc;
+    }
+  }
+
+  public static class Codec {
+    // Payload type used to identify this codec in RTP packets.
+    public int payloadType;
+    // Name used to identify the codec. Equivalent to MIME subtype.
+    public String name;
+    // The media type of this codec. Equivalent to MIME top-level type.
+    MediaStreamTrack.MediaType kind;
+    // Clock rate in Hertz.
+    public Integer clockRate;
+    // The number of audio channels used. Set to null for video codecs.
+    public Integer numChannels;
+    // The "format specific parameters" field from the "a=fmtp" line in the SDP
+    public Map<String, String> parameters;
+
+    @CalledByNative("Codec")
+    Codec(int payloadType, String name, MediaStreamTrack.MediaType kind, Integer clockRate,
+        Integer numChannels, Map<String, String> parameters) {
+      this.payloadType = payloadType;
+      this.name = name;
+      this.kind = kind;
+      this.clockRate = clockRate;
+      this.numChannels = numChannels;
+      this.parameters = parameters;
+    }
+
+    @CalledByNative("Codec")
+    int getPayloadType() {
+      return payloadType;
+    }
+
+    @CalledByNative("Codec")
+    String getName() {
+      return name;
+    }
+
+    @CalledByNative("Codec")
+    MediaStreamTrack.MediaType getKind() {
+      return kind;
+    }
+
+    @CalledByNative("Codec")
+    Integer getClockRate() {
+      return clockRate;
+    }
+
+    @CalledByNative("Codec")
+    Integer getNumChannels() {
+      return numChannels;
+    }
+
+    @CalledByNative("Codec")
+    Map getParameters() {
+      return parameters;
+    }
+  }
+
+  public static class Rtcp {
+    /** The Canonical Name used by RTCP */
+    private final String cname;
+    /** Whether reduced size RTCP is configured or compound RTCP */
+    private final boolean reducedSize;
+
+    @CalledByNative("Rtcp")
+    Rtcp(String cname, boolean reducedSize) {
+      this.cname = cname;
+      this.reducedSize = reducedSize;
+    }
+
+    @CalledByNative("Rtcp")
+    public String getCname() {
+      return cname;
+    }
+
+    @CalledByNative("Rtcp")
+    public boolean getReducedSize() {
+      return reducedSize;
+    }
+  }
+
+  public static class HeaderExtension {
+    /** The URI of the RTP header extension, as defined in RFC5285. */
+    private final String uri;
+    /** The value put in the RTP packet to identify the header extension. */
+    private final int id;
+    /** Whether the header extension is encrypted or not. */
+    private final boolean encrypted;
+
+    @CalledByNative("HeaderExtension")
+    HeaderExtension(String uri, int id, boolean encrypted) {
+      this.uri = uri;
+      this.id = id;
+      this.encrypted = encrypted;
+    }
+
+    @CalledByNative("HeaderExtension")
+    public String getUri() {
+      return uri;
+    }
+
+    @CalledByNative("HeaderExtension")
+    public int getId() {
+      return id;
+    }
+
+    @CalledByNative("HeaderExtension")
+    public boolean getEncrypted() {
+      return encrypted;
+    }
+  }
+
+  public final String transactionId;
+
+  private final Rtcp rtcp;
+
+  private final List<HeaderExtension> headerExtensions;
+
+  public final List<Encoding> encodings;
+  // Codec parameters can't currently be changed between getParameters and
+  // setParameters. Though in the future it will be possible to reorder them or
+  // remove them.
+  public final List<Codec> codecs;
+
+  @CalledByNative
+  RtpParameters(String transactionId, Rtcp rtcp, List<HeaderExtension> headerExtensions,
+      List<Encoding> encodings, List<Codec> codecs) {
+    this.transactionId = transactionId;
+    this.rtcp = rtcp;
+    this.headerExtensions = headerExtensions;
+    this.encodings = encodings;
+    this.codecs = codecs;
+  }
+
+  @CalledByNative
+  String getTransactionId() {
+    return transactionId;
+  }
+
+  @CalledByNative
+  public Rtcp getRtcp() {
+    return rtcp;
+  }
+
+  @CalledByNative
+  public List<HeaderExtension> getHeaderExtensions() {
+    return headerExtensions;
+  }
+
+  @CalledByNative
+  List<Encoding> getEncodings() {
+    return encodings;
+  }
+
+  @CalledByNative
+  List<Codec> getCodecs() {
+    return codecs;
+  }
+}

+ 97 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpReceiver.java

@@ -0,0 +1,97 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+
+/** Java wrapper for a C++ RtpReceiverInterface. */
+public class RtpReceiver {
+  /** Java wrapper for a C++ RtpReceiverObserverInterface*/
+  public static interface Observer {
+    // Called when the first audio or video packet is received.
+    @CalledByNative("Observer")
+    public void onFirstPacketReceived(MediaStreamTrack.MediaType media_type);
+  }
+
+  private long nativeRtpReceiver;
+  private long nativeObserver;
+
+    private MediaStreamTrack cachedTrack;
+
+  @CalledByNative
+  public RtpReceiver(long nativeRtpReceiver) {
+    this.nativeRtpReceiver = nativeRtpReceiver;
+    long nativeTrack = nativeGetTrack(nativeRtpReceiver);
+    cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack);
+  }
+
+
+  public MediaStreamTrack track() {
+    return cachedTrack;
+  }
+
+  public boolean setParameters(  RtpParameters parameters) {
+    checkRtpReceiverExists();
+    return parameters == null ? false : nativeSetParameters(nativeRtpReceiver, parameters);
+  }
+
+  public RtpParameters getParameters() {
+    checkRtpReceiverExists();
+    return nativeGetParameters(nativeRtpReceiver);
+  }
+
+  public String id() {
+    checkRtpReceiverExists();
+    return nativeGetId(nativeRtpReceiver);
+  }
+
+  @CalledByNative
+  public void dispose() {
+    checkRtpReceiverExists();
+    cachedTrack.dispose();
+    if (nativeObserver != 0) {
+      nativeUnsetObserver(nativeRtpReceiver, nativeObserver);
+      nativeObserver = 0;
+    }
+    JniCommon.nativeReleaseRef(nativeRtpReceiver);
+    nativeRtpReceiver = 0;
+  }
+
+  public void SetObserver(Observer observer) {
+    checkRtpReceiverExists();
+    // Unset the existing one before setting a new one.
+    if (nativeObserver != 0) {
+      nativeUnsetObserver(nativeRtpReceiver, nativeObserver);
+    }
+    nativeObserver = nativeSetObserver(nativeRtpReceiver, observer);
+  }
+
+  public void setFrameDecryptor(FrameDecryptor frameDecryptor) {
+    checkRtpReceiverExists();
+    nativeSetFrameDecryptor(nativeRtpReceiver, frameDecryptor.getNativeFrameDecryptor());
+  }
+
+  private void checkRtpReceiverExists() {
+    if (nativeRtpReceiver == 0) {
+      throw new IllegalStateException("RtpReceiver has been disposed.");
+    }
+  }
+
+  // This should increment the reference count of the track.
+  // Will be released in dispose().
+  private static native long nativeGetTrack(long rtpReceiver);
+  private static native boolean nativeSetParameters(long rtpReceiver, RtpParameters parameters);
+  private static native RtpParameters nativeGetParameters(long rtpReceiver);
+  private static native String nativeGetId(long rtpReceiver);
+  private static native long nativeSetObserver(long rtpReceiver, Observer observer);
+  private static native void nativeUnsetObserver(long rtpReceiver, long nativeObserver);
+  private static native void nativeSetFrameDecryptor(long rtpReceiver, long nativeFrameDecryptor);
+};

+ 129 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpSender.java

@@ -0,0 +1,129 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpSenderInterface. */
+public class RtpSender {
+  private long nativeRtpSender;
+
+    private MediaStreamTrack cachedTrack;
+  private boolean ownsTrack = true;
+  private final   DtmfSender dtmfSender;
+
+  @CalledByNative
+  public RtpSender(long nativeRtpSender) {
+    this.nativeRtpSender = nativeRtpSender;
+    long nativeTrack = nativeGetTrack(nativeRtpSender);
+    cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack);
+
+    long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender);
+    dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null;
+  }
+
+  /**
+   * Starts sending a new track, without requiring additional SDP negotiation.
+   * <p>
+   * Note: This is equivalent to replaceTrack in the official WebRTC API. It
+   * was just implemented before the standards group settled on a name.
+   *
+   * @param takeOwnership If true, the RtpSender takes ownership of the track
+   *                      from the caller, and will auto-dispose of it when no
+   *                      longer needed. |takeOwnership| should only be used if
+   *                      the caller owns the track; it is not appropriate when
+   *                      the track is owned by, for example, another RtpSender
+   *                      or a MediaStream.
+   * @return              true on success and false on failure.
+   */
+  public boolean setTrack(  MediaStreamTrack track, boolean takeOwnership) {
+    checkRtpSenderExists();
+    if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.getNativeMediaStreamTrack())) {
+      return false;
+    }
+    if (cachedTrack != null && ownsTrack) {
+      cachedTrack.dispose();
+    }
+    cachedTrack = track;
+    ownsTrack = takeOwnership;
+    return true;
+  }
+
+
+  public MediaStreamTrack track() {
+    return cachedTrack;
+  }
+
+  public boolean setParameters(RtpParameters parameters) {
+    checkRtpSenderExists();
+    return nativeSetParameters(nativeRtpSender, parameters);
+  }
+
+  public RtpParameters getParameters() {
+    checkRtpSenderExists();
+    return nativeGetParameters(nativeRtpSender);
+  }
+
+  public String id() {
+    checkRtpSenderExists();
+    return nativeGetId(nativeRtpSender);
+  }
+
+
+  public DtmfSender dtmf() {
+    return dtmfSender;
+  }
+
+  public void setFrameEncryptor(FrameEncryptor frameEncryptor) {
+    checkRtpSenderExists();
+    nativeSetFrameEncryptor(nativeRtpSender, frameEncryptor.getNativeFrameEncryptor());
+  }
+
+  public void dispose() {
+    checkRtpSenderExists();
+    if (dtmfSender != null) {
+      dtmfSender.dispose();
+    }
+    if (cachedTrack != null && ownsTrack) {
+      cachedTrack.dispose();
+    }
+    JniCommon.nativeReleaseRef(nativeRtpSender);
+    nativeRtpSender = 0;
+  }
+
+  /** Returns a pointer to webrtc::RtpSenderInterface. */
+  long getNativeRtpSender() {
+    checkRtpSenderExists();
+    return nativeRtpSender;
+  }
+
+  private void checkRtpSenderExists() {
+    if (nativeRtpSender == 0) {
+      throw new IllegalStateException("RtpSender has been disposed.");
+    }
+  }
+
+  private static native boolean nativeSetTrack(long rtpSender, long nativeTrack);
+
+  // This should increment the reference count of the track.
+  // Will be released in dispose() or setTrack().
+  private static native long nativeGetTrack(long rtpSender);
+
+  // This should increment the reference count of the DTMF sender.
+  // Will be released in dispose().
+  private static native long nativeGetDtmfSender(long rtpSender);
+
+  private static native boolean nativeSetParameters(long rtpSender, RtpParameters parameters);
+
+  private static native RtpParameters nativeGetParameters(long rtpSender);
+
+  private static native String nativeGetId(long rtpSender);
+
+  private static native void nativeSetFrameEncryptor(long rtpSender, long nativeFrameEncryptor);
+};

+ 243 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/RtpTransceiver.java

@@ -0,0 +1,243 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import org.webrtc.MediaStreamTrack;
+import org.webrtc.RtpParameters;
+
+/**
+ * Java wrapper for a C++ RtpTransceiverInterface.
+ *
+ * <p>The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the WebRTC
+ * specification. A transceiver represents a combination of an RTCRtpSender
+ * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an
+ * RTCRtpTransceiver is said to be associated with a media description if its
+ * mid property is non-nil; otherwise, it is said to be disassociated.
+ * JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24
+ *
+ * <p>Note that RTCRtpTransceivers are only supported when using
+ * RTCPeerConnection with Unified Plan SDP.
+ *
+ * <p>WebRTC specification for RTCRtpTransceiver, the JavaScript analog:
+ * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver
+ */
+public class RtpTransceiver {
+  /** Java version of webrtc::RtpTransceiverDirection - the ordering must be kept in sync. */
+  public enum RtpTransceiverDirection {
+    SEND_RECV(0),
+    SEND_ONLY(1),
+    RECV_ONLY(2),
+    INACTIVE(3);
+
+    private final int nativeIndex;
+
+    private RtpTransceiverDirection(int nativeIndex) {
+      this.nativeIndex = nativeIndex;
+    }
+
+    @CalledByNative("RtpTransceiverDirection")
+    int getNativeIndex() {
+      return nativeIndex;
+    }
+
+    @CalledByNative("RtpTransceiverDirection")
+    static RtpTransceiverDirection fromNativeIndex(int nativeIndex) {
+      for (RtpTransceiverDirection type : RtpTransceiverDirection.values()) {
+        if (type.getNativeIndex() == nativeIndex) {
+          return type;
+        }
+      }
+      throw new IllegalArgumentException(
+          "Uknown native RtpTransceiverDirection type" + nativeIndex);
+    }
+  }
+
+  /**
+   * Tracks webrtc::RtpTransceiverInit. https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit
+   * A structure for initializing an RtpTransceiver in a call to addTransceiver.
+   * Note: This does not contain a list of encoding parameters, because they are currently
+   * not being used natively.
+   */
+  public static final class RtpTransceiverInit {
+    private final RtpTransceiverDirection direction;
+    private final List<String> streamIds;
+    private final List<RtpParameters.Encoding> sendEncodings;
+
+    public RtpTransceiverInit() {
+      this(RtpTransceiverDirection.SEND_RECV);
+    }
+
+    public RtpTransceiverInit(RtpTransceiverDirection direction) {
+      this(direction, Collections.emptyList(), Collections.emptyList());
+    }
+
+    public RtpTransceiverInit(RtpTransceiverDirection direction, List<String> streamIds) {
+      this(direction, streamIds, Collections.emptyList());
+    }
+
+    public RtpTransceiverInit(RtpTransceiverDirection direction, List<String> streamIds,
+        List<RtpParameters.Encoding> sendEncodings) {
+      this.direction = direction;
+      this.streamIds = new ArrayList<String>(streamIds);
+      this.sendEncodings = new ArrayList<RtpParameters.Encoding>(sendEncodings);
+    }
+
+    @CalledByNative("RtpTransceiverInit")
+    int getDirectionNativeIndex() {
+      return direction.getNativeIndex();
+    }
+
+    @CalledByNative("RtpTransceiverInit")
+    List<String> getStreamIds() {
+      return new ArrayList<String>(this.streamIds);
+    }
+
+    @CalledByNative("RtpTransceiverInit")
+    List<RtpParameters.Encoding> getSendEncodings() {
+      return new ArrayList<RtpParameters.Encoding>(this.sendEncodings);
+    }
+  }
+
+  private long nativeRtpTransceiver;
+  private RtpSender cachedSender;
+  private RtpReceiver cachedReceiver;
+
+  @CalledByNative
+  protected RtpTransceiver(long nativeRtpTransceiver) {
+    this.nativeRtpTransceiver = nativeRtpTransceiver;
+    cachedSender = nativeGetSender(nativeRtpTransceiver);
+    cachedReceiver = nativeGetReceiver(nativeRtpTransceiver);
+  }
+
+  /**
+   * Media type of the transceiver. Any sender(s)/receiver(s) will have this
+   * type as well.
+   */
+  public MediaStreamTrack.MediaType getMediaType() {
+    checkRtpTransceiverExists();
+    return nativeGetMediaType(nativeRtpTransceiver);
+  }
+
+  /**
+   * The mid attribute is the mid negotiated and present in the local and
+   * remote descriptions. Before negotiation is complete, the mid value may be
+   * null. After rollbacks, the value may change from a non-null value to null.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid
+   */
+  public String getMid() {
+    checkRtpTransceiverExists();
+    return nativeGetMid(nativeRtpTransceiver);
+  }
+
+  /**
+   * The sender attribute exposes the RtpSender corresponding to the RTP media
+   * that may be sent with the transceiver's mid. The sender is always present,
+   * regardless of the direction of media.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender
+   */
+  public RtpSender getSender() {
+    return cachedSender;
+  }
+
+  /**
+   * The receiver attribute exposes the RtpReceiver corresponding to the RTP
+   * media that may be received with the transceiver's mid. The receiver is
+   * always present, regardless of the direction of media.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver
+   */
+  public RtpReceiver getReceiver() {
+    return cachedReceiver;
+  }
+
+  /**
+   * The stopped attribute indicates that the sender of this transceiver will no
+   * longer send, and that the receiver will no longer receive. It is true if
+   * either stop has been called or if setting the local or remote description
+   * has caused the RtpTransceiver to be stopped.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped
+   */
+  public boolean isStopped() {
+    checkRtpTransceiverExists();
+    return nativeStopped(nativeRtpTransceiver);
+  }
+
+  /**
+   * The direction attribute indicates the preferred direction of this
+   * transceiver, which will be used in calls to CreateOffer and CreateAnswer.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
+   */
+  public RtpTransceiverDirection getDirection() {
+    checkRtpTransceiverExists();
+    return nativeDirection(nativeRtpTransceiver);
+  }
+
+  /**
+   * The current_direction attribute indicates the current direction negotiated
+   * for this transceiver. If this transceiver has never been represented in an
+   * offer/answer exchange, or if the transceiver is stopped, the value is null.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection
+   */
+  public RtpTransceiverDirection getCurrentDirection() {
+    checkRtpTransceiverExists();
+    return nativeCurrentDirection(nativeRtpTransceiver);
+  }
+
+  /**
+   * Sets the preferred direction of this transceiver. An update of
+   * directionality does not take effect immediately. Instead, future calls to
+   * CreateOffer and CreateAnswer mark the corresponding media descriptions as
+   * sendrecv, sendonly, recvonly, or inactive.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction
+   */
+  public void setDirection(RtpTransceiverDirection rtpTransceiverDirection) {
+    checkRtpTransceiverExists();
+    nativeSetDirection(nativeRtpTransceiver, rtpTransceiverDirection);
+  }
+
+  /**
+   * The Stop method irreversibly stops the RtpTransceiver. The sender of this
+   * transceiver will no longer send, the receiver will no longer receive.
+   * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop
+   */
+  public void stop() {
+    checkRtpTransceiverExists();
+    nativeStop(nativeRtpTransceiver);
+  }
+
+  @CalledByNative
+  public void dispose() {
+    checkRtpTransceiverExists();
+    cachedSender.dispose();
+    cachedReceiver.dispose();
+    JniCommon.nativeReleaseRef(nativeRtpTransceiver);
+    nativeRtpTransceiver = 0;
+  }
+
+  private void checkRtpTransceiverExists() {
+    if (nativeRtpTransceiver == 0) {
+      throw new IllegalStateException("RtpTransceiver has been disposed.");
+    }
+  }
+
+  private static native MediaStreamTrack.MediaType nativeGetMediaType(long rtpTransceiver);
+  private static native String nativeGetMid(long rtpTransceiver);
+  private static native RtpSender nativeGetSender(long rtpTransceiver);
+  private static native RtpReceiver nativeGetReceiver(long rtpTransceiver);
+  private static native boolean nativeStopped(long rtpTransceiver);
+  private static native RtpTransceiverDirection nativeDirection(long rtpTransceiver);
+  private static native RtpTransceiverDirection nativeCurrentDirection(long rtpTransceiver);
+  private static native void nativeStop(long rtpTransceiver);
+  private static native void nativeSetDirection(
+      long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection);
+}

+ 27 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SSLCertificateVerifier.java

@@ -0,0 +1,27 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The SSLCertificateVerifier interface allows API users to provide custom
+ * logic to verify certificates.
+ */
+public interface SSLCertificateVerifier {
+  /**
+   * Implementations of verify allow applications to provide custom logic for
+   * verifying certificates. This is not required by default and should be used
+   * with care.
+   *
+   * @param certificate A byte array containing a DER encoded X509 certificate.
+   * @return True if the certificate is verified and trusted else false.
+   */
+  @CalledByNative boolean verify(byte[] certificate);
+}

+ 211 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java

@@ -0,0 +1,211 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.hardware.display.DisplayManager;
+import android.hardware.display.VirtualDisplay;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+
+import android.view.Surface;
+
+/**
+ * An implementation of VideoCapturer to capture the screen content as a video stream.
+ * Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
+ * {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
+ * The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
+ * {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
+ * as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes
+ * place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
+ * the native code returns the buffer to the  {@code SurfaceTextureHelper} to be used for new
+ * frames. At any time, at most one frame is being processed.
+ *
+ * @note This class is only supported on Android Lollipop and above.
+ */
+@TargetApi(21)
+public class ScreenCapturerAndroid implements VideoCapturer, VideoSink {
+  private static final int DISPLAY_FLAGS =
+      DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
+  // DPI for VirtualDisplay, does not seem to matter for us.
+  private static final int VIRTUAL_DISPLAY_DPI = 400;
+
+  private final Intent mediaProjectionPermissionResultData;
+  private final MediaProjection.Callback mediaProjectionCallback;
+
+  private int width;
+  private int height;
+    private VirtualDisplay virtualDisplay;
+    private SurfaceTextureHelper surfaceTextureHelper;
+    private CapturerObserver capturerObserver;
+  private long numCapturedFrames;
+    private MediaProjection mediaProjection;
+  private boolean isDisposed;
+    private MediaProjectionManager mediaProjectionManager;
+
+  /**
+   * Constructs a new Screen Capturer.
+   *
+   * @param mediaProjectionPermissionResultData the result data of MediaProjection permission
+   *     activity; the calling app must validate that result code is Activity.RESULT_OK before
+   *     calling this method.
+   * @param mediaProjectionCallback MediaProjection callback to implement application specific
+   *     logic in events such as when the user revokes a previously granted capture permission.
+  **/
+  public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
+      MediaProjection.Callback mediaProjectionCallback) {
+    this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
+    this.mediaProjectionCallback = mediaProjectionCallback;
+  }
+
+  private void checkNotDisposed() {
+    if (isDisposed) {
+      throw new RuntimeException("capturer is disposed.");
+    }
+  }
+
+  @Override
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
+      final Context applicationContext, final CapturerObserver capturerObserver) {
+    checkNotDisposed();
+
+    if (capturerObserver == null) {
+      throw new RuntimeException("capturerObserver not set.");
+    }
+    this.capturerObserver = capturerObserver;
+
+    if (surfaceTextureHelper == null) {
+      throw new RuntimeException("surfaceTextureHelper not set.");
+    }
+    this.surfaceTextureHelper = surfaceTextureHelper;
+
+    mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
+        Context.MEDIA_PROJECTION_SERVICE);
+  }
+
+  @Override
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public synchronized void startCapture(
+      final int width, final int height, final int ignoredFramerate) {
+    checkNotDisposed();
+
+    this.width = width;
+    this.height = height;
+
+    mediaProjection = mediaProjectionManager.getMediaProjection(
+        Activity.RESULT_OK, mediaProjectionPermissionResultData);
+
+    // Let MediaProjection callback use the SurfaceTextureHelper thread.
+    mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
+
+    createVirtualDisplay();
+    capturerObserver.onCapturerStarted(true);
+    surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
+  }
+
+  @Override
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public synchronized void stopCapture() {
+    checkNotDisposed();
+    ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
+      @Override
+      public void run() {
+        surfaceTextureHelper.stopListening();
+        capturerObserver.onCapturerStopped();
+
+        if (virtualDisplay != null) {
+          virtualDisplay.release();
+          virtualDisplay = null;
+        }
+
+        if (mediaProjection != null) {
+          // Unregister the callback before stopping, otherwise the callback recursively
+          // calls this method.
+          mediaProjection.unregisterCallback(mediaProjectionCallback);
+          mediaProjection.stop();
+          mediaProjection = null;
+        }
+      }
+    });
+  }
+
+  @Override
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public synchronized void dispose() {
+    isDisposed = true;
+  }
+
+  /**
+   * Changes output video format. This method can be used to scale the output
+   * video, or to change orientation when the captured screen is rotated for example.
+   *
+   * @param width new output video width
+   * @param height new output video height
+   * @param ignoredFramerate ignored
+   */
+  @Override
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public synchronized void changeCaptureFormat(
+      final int width, final int height, final int ignoredFramerate) {
+    checkNotDisposed();
+
+    this.width = width;
+    this.height = height;
+
+    if (virtualDisplay == null) {
+      // Capturer is stopped, the virtual display will be created in startCaptuer().
+      return;
+    }
+
+    // Create a new virtual display on the surfaceTextureHelper thread to avoid interference
+    // with frame processing, which happens on the same thread (we serialize events by running
+    // them on the same thread).
+    ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
+      @Override
+      public void run() {
+        virtualDisplay.release();
+        createVirtualDisplay();
+      }
+    });
+  }
+
+  private void createVirtualDisplay() {
+    surfaceTextureHelper.setTextureSize(width, height);
+    virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
+        VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
+        null /* callback */, null /* callback handler */);
+  }
+
+  // This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
+  @Override
+  public void onFrame(VideoFrame frame) {
+    numCapturedFrames++;
+    capturerObserver.onFrameCaptured(frame);
+  }
+
+  @Override
+  public boolean isScreencast() {
+    return true;
+  }
+
+  public long getNumCapturedFrames() {
+    return numCapturedFrames;
+  }
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SdpObserver.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+  /** Called on success of Create{Offer,Answer}(). */
+  @CalledByNative void onCreateSuccess(SessionDescription sdp);
+
+  /** Called on success of Set{Local,Remote}Description(). */
+  @CalledByNative void onSetSuccess();
+
+  /** Called on error of Create{Offer,Answer}(). */
+  @CalledByNative void onCreateFailure(String error);
+
+  /** Called on error of Set{Local,Remote}Description(). */
+  @CalledByNative void onSetFailure(String error);
+}

+ 55 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SessionDescription.java

@@ -0,0 +1,55 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Locale;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+  /** Java-land enum version of SessionDescriptionInterface's type() string. */
+  public static enum Type {
+    OFFER,
+    PRANSWER,
+    ANSWER;
+
+    public String canonicalForm() {
+      return name().toLowerCase(Locale.US);
+    }
+
+    @CalledByNative("Type")
+    public static Type fromCanonicalForm(String canonical) {
+      return Type.valueOf(Type.class, canonical.toUpperCase(Locale.US));
+    }
+  }
+
+  public final Type type;
+  public final String description;
+
+  @CalledByNative
+  public SessionDescription(Type type, String description) {
+    this.type = type;
+    this.description = description;
+  }
+
+  @CalledByNative
+  String getDescription() {
+    return description;
+  }
+
+  @CalledByNative
+  String getTypeInCanonicalForm() {
+    return type.canonicalForm();
+  }
+}

+ 53 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java

@@ -0,0 +1,53 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
+  @Deprecated
+
+  @Override
+  public VideoDecoder createDecoder(String codecType) {
+    return createDecoder(new VideoCodecInfo(codecType, new HashMap<>()));
+  }
+
+
+  @Override
+  public VideoDecoder createDecoder(VideoCodecInfo codecType) {
+    if (codecType.getName().equalsIgnoreCase("VP8")) {
+      return new LibvpxVp8Decoder();
+    }
+    if (codecType.getName().equalsIgnoreCase("VP9") && LibvpxVp9Decoder.nativeIsSupported()) {
+      return new LibvpxVp9Decoder();
+    }
+
+    return null;
+  }
+
+  @Override
+  public VideoCodecInfo[] getSupportedCodecs() {
+    return supportedCodecs();
+  }
+
+  static VideoCodecInfo[] supportedCodecs() {
+    List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
+
+    codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
+    if (LibvpxVp9Decoder.nativeIsSupported()) {
+      codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
+    }
+
+    return codecs.toArray(new VideoCodecInfo[codecs.size()]);
+  }
+}

+ 47 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java

@@ -0,0 +1,47 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
+
+  @Override
+  public VideoEncoder createEncoder(VideoCodecInfo info) {
+    if (info.name.equalsIgnoreCase("VP8")) {
+      return new LibvpxVp8Encoder();
+    }
+    if (info.name.equalsIgnoreCase("VP9") && LibvpxVp9Encoder.nativeIsSupported()) {
+      return new LibvpxVp9Encoder();
+    }
+
+    return null;
+  }
+
+  @Override
+  public VideoCodecInfo[] getSupportedCodecs() {
+    return supportedCodecs();
+  }
+
+  static VideoCodecInfo[] supportedCodecs() {
+    List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
+
+    codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
+    if (LibvpxVp9Encoder.nativeIsSupported()) {
+      codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
+    }
+
+    return codecs.toArray(new VideoCodecInfo[codecs.size()]);
+  }
+}

+ 17 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/StatsObserver.java

@@ -0,0 +1,17 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+  /** Called when the reports are ready.*/
+  @CalledByNative public void onComplete(StatsReport[] reports);
+}

+ 63 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/StatsReport.java

@@ -0,0 +1,63 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+  /** Java version of webrtc::StatsReport::Value. */
+  public static class Value {
+    public final String name;
+    public final String value;
+
+    @CalledByNative("Value")
+    public Value(String name, String value) {
+      this.name = name;
+      this.value = value;
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder builder = new StringBuilder();
+      builder.append("[").append(name).append(": ").append(value).append("]");
+      return builder.toString();
+    }
+  }
+
+  public final String id;
+  public final String type;
+  // Time since 1970-01-01T00:00:00Z in milliseconds.
+  public final double timestamp;
+  public final Value[] values;
+
+  @CalledByNative
+  public StatsReport(String id, String type, double timestamp, Value[] values) {
+    this.id = id;
+    this.type = type;
+    this.timestamp = timestamp;
+    this.values = values;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("id: ")
+        .append(id)
+        .append(", type: ")
+        .append(type)
+        .append(", timestamp: ")
+        .append(timestamp)
+        .append(", values: ");
+    for (int i = 0; i < values.length; ++i) {
+      builder.append(values[i].toString()).append(", ");
+    }
+    return builder.toString();
+  }
+}

+ 160 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SurfaceEglRenderer.java

@@ -0,0 +1,160 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.view.SurfaceHolder;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Display the video stream on a Surface.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially three different threads:
+ * Interaction from the main app in init, release and setMirror.
+ * Interaction from C++ rtc::VideoSinkInterface in renderFrame.
+ * Interaction from SurfaceHolder lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ */
+public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Callback {
+  private static final String TAG = "SurfaceEglRenderer";
+
+  // Callback for reporting renderer events. Read-only after initilization so no lock required.
+  private RendererCommon.RendererEvents rendererEvents;
+
+  private final Object layoutLock = new Object();
+  private boolean isRenderingPaused;
+  private boolean isFirstFrameRendered;
+  private int rotatedFrameWidth;
+  private int rotatedFrameHeight;
+  private int frameRotation;
+
+  /**
+   * In order to render something, you must first call init().
+   */
+  public SurfaceEglRenderer(String name) {
+    super(name);
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+   * init()/release() cycle.
+   */
+  public void init(final EglBase.Context sharedContext,
+      RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer) {
+    ThreadUtils.checkIsOnMainThread();
+    this.rendererEvents = rendererEvents;
+    synchronized (layoutLock) {
+      isFirstFrameRendered = false;
+      rotatedFrameWidth = 0;
+      rotatedFrameHeight = 0;
+      frameRotation = 0;
+    }
+    super.init(sharedContext, configAttributes, drawer);
+  }
+
+  @Override
+  public void init(final EglBase.Context sharedContext, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer) {
+    init(sharedContext, null /* rendererEvents */, configAttributes, drawer);
+  }
+
+  /**
+   * Limit render framerate.
+   *
+   * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+   *            reduction.
+   */
+  @Override
+  public void setFpsReduction(float fps) {
+    synchronized (layoutLock) {
+      isRenderingPaused = fps == 0f;
+    }
+    super.setFpsReduction(fps);
+  }
+
+  @Override
+  public void disableFpsReduction() {
+    synchronized (layoutLock) {
+      isRenderingPaused = false;
+    }
+    super.disableFpsReduction();
+  }
+
+  @Override
+  public void pauseVideo() {
+    synchronized (layoutLock) {
+      isRenderingPaused = true;
+    }
+    super.pauseVideo();
+  }
+
+  // VideoSink interface.
+  @Override
+  public void onFrame(VideoFrame frame) {
+    updateFrameDimensionsAndReportEvents(frame);
+    super.onFrame(frame);
+  }
+
+  // SurfaceHolder.Callback interface.
+  @Override
+  public void surfaceCreated(final SurfaceHolder holder) {
+    ThreadUtils.checkIsOnMainThread();
+    createEglSurface(holder.getSurface());
+  }
+
+  @Override
+  public void surfaceDestroyed(SurfaceHolder holder) {
+    ThreadUtils.checkIsOnMainThread();
+    final CountDownLatch completionLatch = new CountDownLatch(1);
+    releaseEglSurface(completionLatch::countDown);
+    ThreadUtils.awaitUninterruptibly(completionLatch);
+  }
+
+  @Override
+  public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+    ThreadUtils.checkIsOnMainThread();
+    logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
+  }
+
+  // Update frame dimensions and report any changes to |rendererEvents|.
+  private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
+    synchronized (layoutLock) {
+      if (isRenderingPaused) {
+        return;
+      }
+      if (!isFirstFrameRendered) {
+        isFirstFrameRendered = true;
+        logD("Reporting first rendered frame.");
+        if (rendererEvents != null) {
+          rendererEvents.onFirstFrameRendered();
+        }
+      }
+      if (rotatedFrameWidth != frame.getRotatedWidth()
+          || rotatedFrameHeight != frame.getRotatedHeight()
+          || frameRotation != frame.getRotation()) {
+        logD("Reporting frame resolution changed to " + frame.getBuffer().getWidth() + "x"
+            + frame.getBuffer().getHeight() + " with rotation " + frame.getRotation());
+        if (rendererEvents != null) {
+          rendererEvents.onFrameResolutionChanged(
+              frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation());
+        }
+        rotatedFrameWidth = frame.getRotatedWidth();
+        rotatedFrameHeight = frame.getRotatedHeight();
+        frameRotation = frame.getRotation();
+      }
+    }
+  }
+
+  private void logD(String string) {
+    Logging.d(TAG, name + ": " + string);
+  }
+}

+ 327 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SurfaceTextureHelper.java

@@ -0,0 +1,327 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+
+
+import java.util.concurrent.Callable;
+
+import org.webrtc.VideoFrame.TextureBuffer;
+
+/**
+ * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
+ * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
+ * one texture frame can be in flight at once, so the frame must be released in order to receive a
+ * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
+ * resources once the texture frame is released.
+ */
+public class SurfaceTextureHelper {
+  private static final String TAG = "SurfaceTextureHelper";
+  /**
+   * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
+   * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
+   * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
+   * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
+   * rtc::TimeNanos() there is no need for aligning timestamps again in
+   * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
+   * closer to actual creation time.
+   */
+  public static SurfaceTextureHelper create(final String threadName,
+      final EglBase.Context sharedContext, boolean alignTimestamps,
+      final YuvConverter yuvConverter) {
+    final HandlerThread thread = new HandlerThread(threadName);
+    thread.start();
+    final Handler handler = new Handler(thread.getLooper());
+
+    // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+    // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+    // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+    // is constructed on the |handler| thread.
+    return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
+
+      @Override
+      public SurfaceTextureHelper call() {
+        try {
+          return new SurfaceTextureHelper(sharedContext, handler, alignTimestamps, yuvConverter);
+        } catch (RuntimeException e) {
+          Logging.e(TAG, threadName + " create failure", e);
+          return null;
+        }
+      }
+    });
+  }
+
+  /**
+   * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter.
+   *
+   * @see #create(String, EglBase.Context, boolean, YuvConverter)
+   */
+  public static SurfaceTextureHelper create(
+      final String threadName, final EglBase.Context sharedContext) {
+    return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter());
+  }
+
+  /**
+   * Same as above with yuvConverter set to new YuvConverter.
+   *
+   * @see #create(String, EglBase.Context, boolean, YuvConverter)
+   */
+  public static SurfaceTextureHelper create(
+      final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
+    return create(threadName, sharedContext, alignTimestamps, new YuvConverter());
+  }
+
+  private final Handler handler;
+  private final EglBase eglBase;
+  private final SurfaceTexture surfaceTexture;
+  private final int oesTextureId;
+  private final YuvConverter yuvConverter;
+    private final TimestampAligner timestampAligner;
+
+  // These variables are only accessed from the |handler| thread.
+    private VideoSink listener;
+  // The possible states of this class.
+  private boolean hasPendingTexture;
+  private volatile boolean isTextureInUse;
+  private boolean isQuitting;
+  private int frameRotation;
+  private int textureWidth;
+  private int textureHeight;
+  // |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
+  // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
+    private VideoSink pendingListener;
+  final Runnable setListenerRunnable = new Runnable() {
+    @Override
+    public void run() {
+      Logging.d(TAG, "Setting listener to " + pendingListener);
+      listener = pendingListener;
+      pendingListener = null;
+      // May have a pending frame from the previous capture session - drop it.
+      if (hasPendingTexture) {
+        // Calling updateTexImage() is neccessary in order to receive new frames.
+        updateTexImage();
+        hasPendingTexture = false;
+      }
+    }
+  };
+
+  private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler,
+      boolean alignTimestamps, YuvConverter yuvConverter) {
+    if (handler.getLooper().getThread() != Thread.currentThread()) {
+      throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+    }
+    this.handler = handler;
+    this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
+    this.yuvConverter = yuvConverter;
+
+    eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+    try {
+      // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
+      eglBase.createDummyPbufferSurface();
+      eglBase.makeCurrent();
+    } catch (RuntimeException e) {
+      // Clean up before rethrowing the exception.
+      eglBase.release();
+      handler.getLooper().quit();
+      throw e;
+    }
+
+    oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+    surfaceTexture = new SurfaceTexture(oesTextureId);
+    setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
+      hasPendingTexture = true;
+      tryDeliverTextureFrame();
+    }, handler);
+  }
+
+  @TargetApi(21)
+  private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
+      SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
+    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+      surfaceTexture.setOnFrameAvailableListener(listener, handler);
+    } else {
+      // The documentation states that the listener will be called on an arbitrary thread, but in
+      // pratice, it is always the thread on which the SurfaceTexture was constructed. There are
+      // assertions in place in case this ever changes. For API >= 21, we use the new API to
+      // explicitly specify the handler.
+      surfaceTexture.setOnFrameAvailableListener(listener);
+    }
+  }
+
+  /**
+   * Start to stream textures to the given |listener|. If you need to change listener, you need to
+   * call stopListening() first.
+   */
+  public void startListening(final VideoSink listener) {
+    if (this.listener != null || this.pendingListener != null) {
+      throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+    }
+    this.pendingListener = listener;
+    handler.post(setListenerRunnable);
+  }
+
+  /**
+   * Stop listening. The listener set in startListening() is guaranteded to not receive any more
+   * onFrame() callbacks after this function returns.
+   */
+  public void stopListening() {
+    Logging.d(TAG, "stopListening()");
+    handler.removeCallbacks(setListenerRunnable);
+    ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+      listener = null;
+      pendingListener = null;
+    });
+  }
+
+  /**
+   * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
+   * since this class needs to be aware of the texture size.
+   */
+  public void setTextureSize(int textureWidth, int textureHeight) {
+    if (textureWidth <= 0) {
+      throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
+    }
+    if (textureHeight <= 0) {
+      throw new IllegalArgumentException(
+          "Texture height must be positive, but was " + textureHeight);
+    }
+    surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
+    handler.post(() -> {
+      this.textureWidth = textureWidth;
+      this.textureHeight = textureHeight;
+    });
+  }
+
+  /** Set the rotation of the delivered frames. */
+  public void setFrameRotation(int rotation) {
+    handler.post(() -> this.frameRotation = rotation);
+  }
+
+  /**
+   * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+   * producer such as a camera or decoder.
+   */
+  public SurfaceTexture getSurfaceTexture() {
+    return surfaceTexture;
+  }
+
+  /** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */
+  public Handler getHandler() {
+    return handler;
+  }
+
+  /**
+   * This function is called when the texture frame is released. Only one texture frame can be in
+   * flight at once, so this function must be called before a new frame is delivered.
+   */
+  private void returnTextureFrame() {
+    handler.post(() -> {
+      isTextureInUse = false;
+      if (isQuitting) {
+        release();
+      } else {
+        tryDeliverTextureFrame();
+      }
+    });
+  }
+
+  public boolean isTextureInUse() {
+    return isTextureInUse;
+  }
+
+  /**
+   * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
+   * stopped when the texture frame has been released. You are guaranteed to not receive any more
+   * onFrame() after this function returns.
+   */
+  public void dispose() {
+    Logging.d(TAG, "dispose()");
+    ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
+      isQuitting = true;
+      if (!isTextureInUse) {
+        release();
+      }
+    });
+  }
+
+  /**
+   * Posts to the correct thread to convert |textureBuffer| to I420.
+   *
+   * @deprecated Use toI420() instead.
+   */
+  @Deprecated
+  public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
+    return textureBuffer.toI420();
+  }
+
+  private void updateTexImage() {
+    // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
+    // as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
+    // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+    synchronized (EglBase.lock) {
+      surfaceTexture.updateTexImage();
+    }
+  }
+
+  private void tryDeliverTextureFrame() {
+    if (handler.getLooper().getThread() != Thread.currentThread()) {
+      throw new IllegalStateException("Wrong thread.");
+    }
+    if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
+      return;
+    }
+    isTextureInUse = true;
+    hasPendingTexture = false;
+
+    updateTexImage();
+
+    final float[] transformMatrix = new float[16];
+    surfaceTexture.getTransformMatrix(transformMatrix);
+    long timestampNs = surfaceTexture.getTimestamp();
+    if (timestampAligner != null) {
+      timestampNs = timestampAligner.translateTimestamp(timestampNs);
+    }
+    if (textureWidth == 0 || textureHeight == 0) {
+      throw new RuntimeException("Texture size has not been set.");
+    }
+    final VideoFrame.Buffer buffer =
+        new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId,
+            RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler,
+            yuvConverter, this ::returnTextureFrame);
+    final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
+    ((VideoSink) listener).onFrame(frame);
+    frame.release();
+  }
+
+  private void release() {
+    if (handler.getLooper().getThread() != Thread.currentThread()) {
+      throw new IllegalStateException("Wrong thread.");
+    }
+    if (isTextureInUse || !isQuitting) {
+      throw new IllegalStateException("Unexpected release.");
+    }
+    yuvConverter.release();
+    GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+    surfaceTexture.release();
+    eglBase.release();
+    handler.getLooper().quit();
+    if (timestampAligner != null) {
+      timestampAligner.dispose();
+    }
+  }
+}

+ 300 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/SurfaceViewRenderer.java

@@ -0,0 +1,300 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.content.res.Resources.NotFoundException;
+import android.graphics.Point;
+import android.os.Looper;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+/**
+ * Display the video stream on a SurfaceView.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+    implements SurfaceHolder.Callback, VideoSink, RendererCommon.RendererEvents {
+  private static final String TAG = "SurfaceViewRenderer";
+
+  // Cached resource name.
+  private final String resourceName;
+  private final RendererCommon.VideoLayoutMeasure videoLayoutMeasure =
+      new RendererCommon.VideoLayoutMeasure();
+  private final SurfaceEglRenderer eglRenderer;
+
+  // Callback for reporting renderer events. Read-only after initilization so no lock required.
+  private RendererCommon.RendererEvents rendererEvents;
+
+  // Accessed only on the main thread.
+  private int rotatedFrameWidth;
+  private int rotatedFrameHeight;
+  private boolean enableFixedSize;
+  private int surfaceWidth;
+  private int surfaceHeight;
+
+  /**
+   * Standard View constructor. In order to render something, you must first call init().
+   */
+  public SurfaceViewRenderer(Context context) {
+    super(context);
+    this.resourceName = getResourceName();
+    eglRenderer = new SurfaceEglRenderer(resourceName);
+    getHolder().addCallback(this);
+    getHolder().addCallback(eglRenderer);
+  }
+
+  /**
+   * Standard View constructor. In order to render something, you must first call init().
+   */
+  public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+    super(context, attrs);
+    this.resourceName = getResourceName();
+    eglRenderer = new SurfaceEglRenderer(resourceName);
+    getHolder().addCallback(this);
+    getHolder().addCallback(eglRenderer);
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
+   * reinitialize the renderer after a previous init()/release() cycle.
+   */
+  public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+    init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+   * init()/release() cycle.
+   */
+  public void init(final EglBase.Context sharedContext,
+      RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer) {
+    ThreadUtils.checkIsOnMainThread();
+    this.rendererEvents = rendererEvents;
+    rotatedFrameWidth = 0;
+    rotatedFrameHeight = 0;
+    eglRenderer.init(sharedContext, this /* rendererEvents */, configAttributes, drawer);
+  }
+
+  /**
+   * Block until any pending frame is returned and all GL resources released, even if an interrupt
+   * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+   * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+   * don't call this function, the GL resources might leak.
+   */
+  public void release() {
+    eglRenderer.release();
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   * @param drawer   Custom drawer to use for this frame listener.
+   */
+  public void addFrameListener(
+      EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) {
+    eglRenderer.addFrameListener(listener, scale, drawerParam);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received. This version uses
+   * the drawer of the EglRenderer that was passed in init.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   */
+  public void addFrameListener(EglRenderer.FrameListener listener, float scale) {
+    eglRenderer.addFrameListener(listener, scale);
+  }
+
+  public void removeFrameListener(EglRenderer.FrameListener listener) {
+    eglRenderer.removeFrameListener(listener);
+  }
+
+  /**
+   * Enables fixed size for the surface. This provides better performance but might be buggy on some
+   * devices. By default this is turned off.
+   */
+  public void setEnableHardwareScaler(boolean enabled) {
+    ThreadUtils.checkIsOnMainThread();
+    enableFixedSize = enabled;
+    updateSurfaceSize();
+  }
+
+  /**
+   * Set if the video stream should be mirrored or not.
+   */
+  public void setMirror(final boolean mirror) {
+    eglRenderer.setMirror(mirror);
+  }
+
+  /**
+   * Set how the video will fill the allowed layout area.
+   */
+  public void setScalingType(RendererCommon.ScalingType scalingType) {
+    ThreadUtils.checkIsOnMainThread();
+    videoLayoutMeasure.setScalingType(scalingType);
+    requestLayout();
+  }
+
+  public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation,
+      RendererCommon.ScalingType scalingTypeMismatchOrientation) {
+    ThreadUtils.checkIsOnMainThread();
+    videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation);
+    requestLayout();
+  }
+
+  /**
+   * Limit render framerate.
+   *
+   * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+   *            reduction.
+   */
+  public void setFpsReduction(float fps) {
+    eglRenderer.setFpsReduction(fps);
+  }
+
+  public void disableFpsReduction() {
+    eglRenderer.disableFpsReduction();
+  }
+
+  public void pauseVideo() {
+    eglRenderer.pauseVideo();
+  }
+
+  // VideoSink interface.
+  @Override
+  public void onFrame(VideoFrame frame) {
+    eglRenderer.onFrame(frame);
+  }
+
+  // View layout interface.
+  @Override
+  protected void onMeasure(int widthSpec, int heightSpec) {
+    ThreadUtils.checkIsOnMainThread();
+    Point size =
+        videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
+    setMeasuredDimension(size.x, size.y);
+    logD("onMeasure(). New size: " + size.x + "x" + size.y);
+  }
+
+  @Override
+  protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+    ThreadUtils.checkIsOnMainThread();
+    eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top));
+    updateSurfaceSize();
+  }
+
+  private void updateSurfaceSize() {
+    ThreadUtils.checkIsOnMainThread();
+    if (enableFixedSize && rotatedFrameWidth != 0 && rotatedFrameHeight != 0 && getWidth() != 0
+        && getHeight() != 0) {
+      final float layoutAspectRatio = getWidth() / (float) getHeight();
+      final float frameAspectRatio = rotatedFrameWidth / (float) rotatedFrameHeight;
+      final int drawnFrameWidth;
+      final int drawnFrameHeight;
+      if (frameAspectRatio > layoutAspectRatio) {
+        drawnFrameWidth = (int) (rotatedFrameHeight * layoutAspectRatio);
+        drawnFrameHeight = rotatedFrameHeight;
+      } else {
+        drawnFrameWidth = rotatedFrameWidth;
+        drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
+      }
+      // Aspect ratio of the drawn frame and the view is the same.
+      final int width = Math.min(getWidth(), drawnFrameWidth);
+      final int height = Math.min(getHeight(), drawnFrameHeight);
+      logD("updateSurfaceSize. Layout size: " + getWidth() + "x" + getHeight() + ", frame size: "
+          + rotatedFrameWidth + "x" + rotatedFrameHeight + ", requested surface size: " + width
+          + "x" + height + ", old surface size: " + surfaceWidth + "x" + surfaceHeight);
+      if (width != surfaceWidth || height != surfaceHeight) {
+        surfaceWidth = width;
+        surfaceHeight = height;
+        getHolder().setFixedSize(width, height);
+      }
+    } else {
+      surfaceWidth = surfaceHeight = 0;
+      getHolder().setSizeFromLayout();
+    }
+  }
+
+  // SurfaceHolder.Callback interface.
+  @Override
+  public void surfaceCreated(final SurfaceHolder holder) {
+    ThreadUtils.checkIsOnMainThread();
+    surfaceWidth = surfaceHeight = 0;
+    updateSurfaceSize();
+  }
+
+  @Override
+  public void surfaceDestroyed(SurfaceHolder holder) {}
+
+  @Override
+  public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
+
+  private String getResourceName() {
+    try {
+      return getResources().getResourceEntryName(getId());
+    } catch (NotFoundException e) {
+      return "";
+    }
+  }
+
+  /**
+   * Post a task to clear the SurfaceView to a transparent uniform color.
+   */
+  public void clearImage() {
+    eglRenderer.clearImage();
+  }
+
+  @Override
+  public void onFirstFrameRendered() {
+    if (rendererEvents != null) {
+      rendererEvents.onFirstFrameRendered();
+    }
+  }
+
+  @Override
+  public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
+    if (rendererEvents != null) {
+      rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+    }
+    int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight;
+    int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth;
+    // run immediately if possible for ui thread tests
+    postOrRun(() -> {
+      rotatedFrameWidth = rotatedWidth;
+      rotatedFrameHeight = rotatedHeight;
+      updateSurfaceSize();
+      requestLayout();
+    });
+  }
+
+  private void postOrRun(Runnable r) {
+    if (Thread.currentThread() == Looper.getMainLooper().getThread()) {
+      r.run();
+    } else {
+      post(r);
+    }
+  }
+
+  private void logD(String string) {
+    Logging.d(TAG, resourceName + ": " + string);
+  }
+}

+ 0 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/TextureBufferImpl.java


Certains fichiers n'ont pas été affichés car il y a eu trop de fichiers modifiés dans ce diff