weizhengliang преди 3 години
родител
ревизия
9a937fc4d5
променени са 100 файла, в които са добавени 8613 реда и са изтрити 207 реда
  1. 10 7
      app/build.gradle
  2. 3 1
      app/src/main/AndroidManifest.xml
  3. 22 8
      app/src/main/code/com/wdkl/app/ncs/application/Application.kt
  4. 3 6
      bedlib/src/main/java/serialporttest/utils/SerialPortUtil.java
  5. 19 9
      build.gradle
  6. 10 2
      callingbed/build.gradle
  7. 460 137
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/activity/CallingbedActivity.kt
  8. 143 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/BaseCallFragment.kt
  9. 3 2
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/MainFragment.kt
  10. 41 13
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/QrCodeFragment.kt
  11. 613 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/SkyCallFragment.kt
  12. 251 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/AnrFcExceptionUtil.java
  13. 35 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/AppUpdateHelper.java
  14. 16 8
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/NetHelper.java
  15. 24 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/RingPlayHelper.java
  16. 81 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/ScreenManagerUtil.kt
  17. 12 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/SerialPortHelper.java
  18. 17 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/Utils.java
  19. 184 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/VoiceManagerUtil.java
  20. 51 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/WarningDialogHelper.java
  21. 354 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/settings/SettingConfig.java
  22. 0 0
      callingbed/src/main/java/com/wdkl/app/ncs/callingbed/sip/SipHelper.java.bak
  23. 7 0
      callingbed/src/main/res/anim/slide_down_in.xml
  24. 7 0
      callingbed/src/main/res/anim/slide_left_in.xml
  25. 7 0
      callingbed/src/main/res/anim/slide_right_out.xml
  26. 7 0
      callingbed/src/main/res/anim/slide_up_out.xml
  27. BIN
      callingbed/src/main/res/drawable/ic_answer_normal.png
  28. BIN
      callingbed/src/main/res/drawable/ic_answer_press.png
  29. BIN
      callingbed/src/main/res/drawable/ic_hangup_normal.png
  30. BIN
      callingbed/src/main/res/drawable/ic_hangup_press.png
  31. BIN
      callingbed/src/main/res/drawable/ic_nurse.png
  32. 5 0
      callingbed/src/main/res/drawable/selector_call_answer.xml
  33. 5 0
      callingbed/src/main/res/drawable/selector_call_hangup.xml
  34. 6 0
      callingbed/src/main/res/layout/callingbed_main_lay.xml
  35. 13 4
      callingbed/src/main/res/layout/qrcode_view.xml
  36. 134 0
      callingbed/src/main/res/layout/sky_voice_call_layout.xml
  37. 4 4
      callingbed/src/main/res/layout/view_title_layout.xml
  38. 27 0
      callingbed/src/main/res/layout/warning_dialog_lay.xml
  39. BIN
      callingbed/src/main/res/raw/incoming_call.mp3
  40. BIN
      callingbed/src/main/res/raw/ring_back2.wav
  41. 6 0
      common/build.gradle
  42. 16 3
      common/src/main/code/com/wdkl/ncs/android/lib/base/BaseActivity.kt
  43. 5 0
      common/src/main/code/com/wdkl/ncs/android/lib/utils/AppTool.kt
  44. 23 2
      common/src/main/code/com/wdkl/ncs/android/lib/utils/ConnectionObserver.kt
  45. 20 0
      common/src/main/code/com/wdkl/ncs/android/lib/utils/TimeHandle.kt
  46. 57 0
      common/src/main/code/com/wdkl/ncs/android/lib/widget/MenuDialog.kt
  47. 108 0
      common/src/main/res/layout/menu_dialog_lay.xml
  48. 5 0
      extra/build.gradle
  49. 1 1
      gradle.properties
  50. 5 0
      hello/build.gradle
  51. 5 0
      home/build.gradle
  52. 1 0
      libwebrtc/.gitignore
  53. 48 0
      libwebrtc/build.gradle
  54. BIN
      libwebrtc/libs/arm64-v8a/libjingle_peerconnection_so.so
  55. BIN
      libwebrtc/libs/armeabi-v7a/libjingle_peerconnection_so.so
  56. 21 0
      libwebrtc/proguard-rules.pro
  57. 2 0
      libwebrtc/src/main/AndroidManifest.xml
  58. 51 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java
  59. 324 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java
  60. 378 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java
  61. 409 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java
  62. 524 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java
  63. 388 0
      libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java
  64. 46 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ContextUtils.java
  65. 22 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Loggable.java
  66. 199 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Logging.java
  67. 2 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/OWNERS
  68. 45 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Size.java
  69. 214 0
      libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ThreadUtils.java
  70. 21 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java
  71. 21 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java
  72. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioProcessingFactory.java
  73. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioSource.java
  74. 32 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioTrack.java
  75. 23 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java
  76. 23 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java
  77. 41 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java
  78. 35 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Capturer.java
  79. 186 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Enumerator.java
  80. 37 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Capturer.java
  81. 245 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Enumerator.java
  82. 206 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
  83. 25 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerator.java
  84. 167 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraVideoCapturer.java
  85. 27 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CapturerObserver.java
  86. 145 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/CryptoOptions.java
  87. 195 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DataChannel.java
  88. 68 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
  89. 66 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
  90. 96 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/DtmfSender.java
  91. 202 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase.java
  92. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase10.java
  93. 20 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase14.java
  94. 753 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglRenderer.java
  95. 139 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/EncodedImage.java
  96. 22 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java
  97. 201 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FileVideoCapturer.java
  98. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameDecryptor.java
  99. 26 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameEncryptor.java
  100. 0 0
      libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlRectDrawer.java

+ 10 - 7
app/build.gradle

@@ -31,6 +31,8 @@ android {
     buildToolsVersion build_tools_version
     aaptOptions.cruncherEnabled = false
     aaptOptions.useNewCruncher = false
+    aaptOptions.noCompress("mp3","wav")
+
     defaultConfig {
         applicationId "com.wdkl.app.ncs.callingbed"
         minSdkVersion min_sdk_version
@@ -41,10 +43,10 @@ android {
         dataBinding {
             enabled = true
         }
-        ndk {
+        //ndk {
             //选择要添加的对应cpu类型的.so库。
-            abiFilters 'armeabi', 'armeabi-v7a', 'armeabi-v8a' ,'x86', 'x86_64', 'mips', 'mips64'
-        }
+        //    abiFilters 'armeabi', 'armeabi-v7a', 'armeabi-v8a' ,'x86', 'x86_64', 'mips', 'mips64'
+        //}
         testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
     }
 
@@ -85,7 +87,7 @@ android {
 }
 
 dependencies {
-    compile fileTree(include: ['*.jar'], dir: 'libs')
+    //compile fileTree(include: ['*.jar'], dir: 'libs')
     androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
         exclude group: 'com.android.support', module: 'support-annotations'
     })
@@ -118,11 +120,12 @@ dependencies {
     if (!componentTag) {
         compile project(':welcome') // ===> 开始模块  建议在模块内添加广告 欢迎页 等页面
         compile project(':home')   // ===> 主页模块  里面一般是App的首页 分类楼层页面
-        compile project(':shop')      // ===> 店铺模块 店铺列表 详细 等
-        compile project(':setting')   // ===> 设置模块 设置 缓存 App分享等
+        //compile project(':shop')      // ===> 店铺模块 店铺列表 详细 等
+        //compile project(':setting')   // ===> 设置模块 设置 缓存 App分享等
         compile project(':extra')    // ===> 额外的一些页面 比如二维码扫描等一些附加功能
-        compile project(':hello')
+        //compile project(':hello')
         compile project(':callingbed')
+        //compile project(':sip')
     }
     /**
      * JavaShopAndroid 中间件依赖库

+ 3 - 1
app/src/main/AndroidManifest.xml

@@ -19,6 +19,7 @@
     <uses-permission android:name="android.permission.CAMERA"/>
     <uses-permission android:name="android.permission.BLUETOOTH"/>
     <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+    <uses-permission android:name="android.permission.WRITE_SETTINGS" />
 
     <uses-permission android:name ="android.permission.WAKE_LOCK" />
     <uses-permission android:name="android.permission.ACCESS_LOCATION_EXTRA_COMMANDS"/>
@@ -28,6 +29,7 @@
         android:label="@string/javashop_app_name"
         android:supportsRtl="true"
         tools:replace="android:label"
+        tools:remove="android:requestLegacyExternalStorage"
         android:networkSecurityConfig="@xml/network_security_config"
         android:name="com.wdkl.app.ncs.application.Application"
         android:theme="@style/MyAppTheme">
@@ -43,7 +45,7 @@
 
         <activity android:name="com.wdkl.ncs.android.component.welcome.activity.WelcomeActivity"
             android:screenOrientation="landscape"
-            >
+            android:launchMode="singleTask">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN"/>
 

+ 22 - 8
app/src/main/code/com/wdkl/app/ncs/application/Application.kt

@@ -5,7 +5,14 @@ import com.wdkl.ncs.android.lib.base.BaseApplication
 import com.enation.javashop.net.engine.config.NetEngineConfig
 import com.enation.javashop.net.engine.plugin.exception.RestfulExceptionInterceptor
 import com.enation.javashop.utils.base.config.BaseConfig
+import com.wdkl.app.ncs.callingbed.helper.AnrFcExceptionUtil
 import com.wdkl.app.ncs.callingbed.helper.NetHelper
+import com.wdkl.app.ncs.callingbed.helper.Utils
+import com.wdkl.core.socket.SocketManager
+import com.wdkl.core.voip.VoipEvent
+import com.wdkl.net.HttpRequestPresenter
+import com.wdkl.net.urlconn.UrlConnRequest
+import com.wdkl.skywebrtc.SkyEngineKit
 import serialporttest.utils.SerialPortUtil
 
 /**
@@ -43,11 +50,11 @@ class Application : BaseApplication() {
         JRouter.openLog()
         JRouter.prepare().create("/welcome/launch").seek()
         JRouter.prepare().create("/home/launch").seek()
-        JRouter.prepare().create("/setting/launch").seek()
-        JRouter.prepare().create("/shop/launch").seek()
+        //JRouter.prepare().create("/setting/launch").seek()
+        //JRouter.prepare().create("/shop/launch").seek()
         JRouter.prepare().create("/extra/launch").seek()
 
-        JRouter.prepare().create("/hello/launch").seek()
+        //JRouter.prepare().create("/hello/launch").seek()
         //5寸分机
         JRouter.prepare().create("/callingbed/launch").seek()
     }
@@ -73,11 +80,7 @@ class Application : BaseApplication() {
      * @return rx观察者
      */
     private fun initFrame() {
-        if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP) {
-            BaseConfig.getInstance().addActivity("WelcomeActivity", "HomeActivity")
-        } else {
-            BaseConfig.getInstance().closeScrollBack()
-        }
+
         NetEngineConfig.init(baseContext)
                 .openLogger()
                 .addNetInterceptor(RestfulExceptionInterceptor())
@@ -86,5 +89,16 @@ class Application : BaseApplication() {
         NetHelper.getInstance().init(applicationContext)
         //open serial port
         SerialPortUtil.getInstance().openSerialPort();
+
+        // 初始化网络请求
+        HttpRequestPresenter.init(UrlConnRequest())
+        // 初始化信令
+        SkyEngineKit.init(VoipEvent())
+        SocketManager.getInstance().init(applicationContext)
+
+        //anr catcher
+        AnrFcExceptionUtil.getInstance(this).initFCException()
+
+        Utils.checkCameraSupport()
     }
 }

+ 3 - 6
bedlib/src/main/java/serialporttest/utils/SerialPortUtil.java

@@ -103,7 +103,7 @@ public class SerialPortUtil {
                         int size = inputStream.read(buffer);
                         if (size > 0 && isOpenSerialPortUtil) {
                             data = new String(buffer, 0, size);
-                            Log.d("aaaa", "data==" + data);
+                            //Log.d("aaaa", "data==" + data);
 
                             if (systemVersionIsO()) {//安卓7.0.1 or 8.1.0 版用
                                 //addByData(data);//my do
@@ -342,13 +342,11 @@ public class SerialPortUtil {
             if (isOpenSerialPortUtil) {
                 byte[] sendData = command.getBytes();
                 outputStream.write(sendData);
-                Log.d("NURSELIGHT","==command=="+"护理灯串口数据发送成功");
-                Log.i(TAG, "串口数据发送成功");
+                Log.d("serialPort","==command==" + command);
             }
         } catch (IOException e) {
             e.printStackTrace();
-            Log.d("NURSELIGHT","==command=="+"护理灯串口数据发送失败");
-            Log.i(TAG, "串口数据发送失败");
+            Log.d("serialPort","==command==" + command);
         }
     }
 
@@ -484,7 +482,6 @@ public class SerialPortUtil {
         if (null != check && !"".equals(check)) {
             check_v = check;
         }
-        Log.d("NURSELIGHT","=="+C_HEARD + command + C_SEPARATE + random_v + check_v + C_END);
         send(C_HEARD + command + C_SEPARATE + random_v + check_v + C_END);
     }
 

+ 19 - 9
build.gradle

@@ -2,7 +2,7 @@ buildscript {
     /**
      * Kotlin统一版本
      */
-    ext.kotlin_version = '1.3.21'
+    ext.kotlin_version = '1.2.40'
 
     /**
      * Aop编制版本
@@ -52,7 +52,7 @@ buildscript {
     /**
      * APP版本号
      */
-    ext.app_version = "1.0"
+    ext.app_version = "1.0.1"
 
     /**
      * 项目依赖库
@@ -62,12 +62,11 @@ buildscript {
             /**
              * 依赖仓储
              */
-            /*maven { url 'http://maven.aliyun.com/nexus/content/repositories/google' }
+            maven { url 'http://maven.aliyun.com/nexus/content/repositories/google' }
             maven { url 'http://maven.aliyun.com/nexus/content/repositories/jcenter'}
             maven { url 'http://maven.aliyun.com/nexus/content/groups/public/' }
-            maven { url 'https://jitpack.io' }*/
 
-            jcenter()
+            //jcenter()
             mavenCentral()
             google()
         }
@@ -96,21 +95,32 @@ buildscript {
          */
         classpath "org.aspectj:aspectjtools:$aspectj_version"
     }
+
+    repositories {
+        maven { url 'http://maven.aliyun.com/nexus/content/repositories/google' }
+        maven { url 'http://maven.aliyun.com/nexus/content/repositories/jcenter'}
+        maven { url 'http://maven.aliyun.com/nexus/content/groups/public/' }
+
+        jcenter()
+        mavenCentral()
+        google()
+
+        maven { url "https://jitpack.io" }
+    }
 }
 /**
  * 统一依赖仓储
  */
 allprojects {
     repositories {
-        /*maven { url 'http://maven.aliyun.com/nexus/content/repositories/google' }
+        maven { url 'http://maven.aliyun.com/nexus/content/repositories/google' }
         maven { url 'http://maven.aliyun.com/nexus/content/repositories/jcenter'}
         maven { url 'http://maven.aliyun.com/nexus/content/groups/public/' }
-        maven { url 'https://jitpack.io' }*/
 
-        google()
         jcenter()
         mavenCentral()
-        maven { url 'https://dl.bintray.com/geamtear/maven' }
+        google()
+
         maven { url "https://jitpack.io" }
     }
     tasks.withType(Javadoc) { // 新增

+ 10 - 2
callingbed/build.gradle

@@ -80,9 +80,15 @@ dependencies {
     /**
      * 公共库依赖
      */
+    compile project(':welcome')
     compile project(':middleware')
-    compile project(':sip2')
-    compile project(':bedlib')
+    //compile project(':sip2')
+    //compile project(':bedlib')
+
+    //web rtc
+    compile project(':webrtc')
+    //compile project(':libwebrtc')
+    compile project(':rtc-chat')
 
     if(componentTag){
         debugCompile 'com.squareup.leakcanary:leakcanary-android:1.5.1'
@@ -109,6 +115,8 @@ dependencies {
      *  constraint-layout布局依赖
      */
     compile 'com.android.support.constraint:constraint-layout:1.1.0-beta5'
+
+    compile 'com.github.anrwatchdog:anrwatchdog:1.3.0'
 }
 
 /**

+ 460 - 137
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/activity/CallingbedActivity.kt

@@ -5,10 +5,7 @@ import android.content.Context
 import android.content.Intent
 import android.content.IntentFilter
 import android.net.ConnectivityManager
-import android.os.CountDownTimer
-import android.os.Handler
-import android.os.Message
-import android.os.SystemClock
+import android.os.*
 import android.support.v4.app.Fragment
 import android.support.v7.widget.LinearLayoutManager
 import android.text.TextUtils
@@ -17,26 +14,33 @@ import android.view.View
 import android.view.animation.AnimationUtils
 import com.enation.javashop.android.jrouter.external.annotation.Router
 import com.enation.javashop.net.engine.model.NetState
-import com.wdkl.app.ncs.callingbed.sip.SipHelper
-import com.wdkl.app.ncs.callingbed.sip.SipStatus
-import com.vvsip.ansip.IVvsipServiceListener
-import com.vvsip.ansip.VvsipCall
+import com.google.gson.Gson
+//import com.wdkl.app.ncs.callingbed.sip.SipHelper
+//import com.wdkl.app.ncs.callingbed.sip.SipStatus
+//import com.vvsip.ansip.IVvsipServiceListener
+//import com.vvsip.ansip.VvsipCall
 import com.wdkl.app.ncs.callingbed.R
 import com.wdkl.app.ncs.callingbed.adapter.NurseConfigAdpter
 import com.wdkl.app.ncs.callingbed.databinding.CallingbedMainLayBinding
 import com.wdkl.app.ncs.callingbed.fragment.*
-import com.wdkl.app.ncs.callingbed.helper.NetHelper
-import com.wdkl.app.ncs.callingbed.helper.SerialPortHelper
-import com.wdkl.app.ncs.callingbed.helper.SoundPoolHelper
+import com.wdkl.app.ncs.callingbed.helper.*
 import com.wdkl.app.ncs.callingbed.launch.CallingbedLaunch
+import com.wdkl.app.ncs.callingbed.settings.SettingConfig
 import com.wdkl.app.ncs.callingbed2.agreement.CallingbedAgreement
+import com.wdkl.app.ncs.callingbed2.fragment.SkyCallFragment
+import com.wdkl.app.ncs.callingbed2.helper.ScreenManagerUtil
+import com.wdkl.core.consts.Urls
+import com.wdkl.core.socket.IUserState
+import com.wdkl.core.socket.SocketManager
 //import com.wdkl.app.ncs.sip.service.SipService
 import com.wdkl.ncs.android.lib.base.BaseActivity
 import com.wdkl.ncs.android.lib.utils.TimeHandle
 import com.wdkl.ncs.android.lib.utils.showMessage
 import com.wdkl.ncs.android.lib.vo.filter
+import com.wdkl.ncs.android.lib.widget.MenuDialog
 import com.wdkl.ncs.android.middleware.common.Constant
 import com.wdkl.ncs.android.middleware.common.MessageEvent
+import com.wdkl.ncs.android.middleware.common.SipStatus
 import com.wdkl.ncs.android.middleware.logic.contract.callingbed.CallingbedActivityContract
 import com.wdkl.ncs.android.middleware.logic.presenter.callingbed.CallingbedActivityPresenter
 import com.wdkl.ncs.android.middleware.model.dos.PartSettingDO
@@ -44,8 +48,13 @@ import com.wdkl.ncs.android.middleware.model.dto.NurseConfigDto
 import com.wdkl.ncs.android.middleware.model.dto.TcpSeverDTO
 import com.wdkl.ncs.android.middleware.model.vo.BedDeviceInfoVO
 import com.wdkl.ncs.android.middleware.model.vo.CustomerInfoVO
+import com.wdkl.ncs.android.middleware.model.vo.InteractionVO
 import com.wdkl.ncs.android.middleware.tcp.TcpClient
+import com.wdkl.ncs.android.middleware.tcp.channel.VideoUtil
 import com.wdkl.ncs.android.middleware.tcp.channel.VoiceUtil
+import com.wdkl.ncs.android.middleware.tcp.dto.TcpModel
+import com.wdkl.ncs.android.middleware.tcp.enums.TcpAction
+import com.wdkl.ncs.android.middleware.tcp.enums.TcpType
 import kotlinx.android.synthetic.main.callingbed_main_lay.*
 import kotlinx.android.synthetic.main.view_title_layout.*
 import org.greenrobot.eventbus.EventBus
@@ -64,7 +73,7 @@ import java.lang.ref.WeakReference
 
 @Router(path = "/callingbed/main")
 class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMainLayBinding>(), CallingbedAgreement, CallingbedActivityContract.View, /*IVvsipServiceListener,*/
-        SerialPortUtil.ISerialPortBedOnclickEvent, SerialPortUtil.ISerialPortBedOnclickString{
+        SerialPortUtil.ISerialPortBedOnclickEvent, SerialPortUtil.ISerialPortBedOnclickString, IUserState {
 
     private lateinit var receiver: TimeReceiver
     private lateinit var curFragment: String
@@ -78,9 +87,13 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
     private lateinit var nurseConfigAdpter : NurseConfigAdpter
     private var configList = ArrayList<NurseConfigDto>()
     private var sipServiceRunning :Boolean = false
+    private var initialized :Boolean = false
 
-    //呼叫倒计时
-    lateinit var countDownTimer: CountDownTimer
+    //通话界面fragment
+    private var skyCallFragment: Fragment? = null
+
+    //网络异常计数
+    private var netErrCount : Int = 0
 
     //主信息
     private val mainFragment = "main_fragment"
@@ -120,8 +133,7 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
 
         //sip初始化
         //initSip()
-        //显示日期时间
-        updateDateTime()
+
         //注册广播
         regReceiver()
         //串口监听
@@ -130,11 +142,16 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         SoundPoolHelper.getInstance().init(applicationContext)
         //打开手柄mic
         SerialPortHelper.setHandsMIC(true)
+        SerialPortHelper.setSosLight("0")
+
+        EventBus.getDefault().register(this)
 
-        initCountDownTimer()
         initNurseConfig()
         updateNetState()
 
+        //添加webrtc登录回调
+        SocketManager.getInstance().addUserStateCallback(this)
+
         initDevice()
 
         //启动时间线程
@@ -149,35 +166,27 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
     }
 
     override fun destory() {
-        countDownTimer.cancel()
         SerialPortUtil.getInstance().closeHeart()
         SerialPortUtil.getInstance().closeSerialPort()
-        SipHelper.getInstance().unRegisterSip()
+        //SipHelper.getInstance().unRegisterSip()
         SoundPoolHelper.getInstance().release()
+        EventBus.getDefault().unregister(this)
         unRegReceiver()
         handler.removeCallbacksAndMessages(null)
         isTimeWorking = false
     }
 
-    fun initDevice() {
-        if (!TextUtils.isEmpty(Constant.LOCAL_MAC)) {
-            presenter.loadDeviceInfo(Constant.LOCAL_MAC)
-        }
+    override fun userLogin() {
+        updateStatus(SipStatus.REGISTERCOM)
     }
 
-    fun initCountDownTimer() {
-        countDownTimer = object: CountDownTimer(Constant.CALL_TIMEOUT*1000L, 1000) {
-            override fun onTick(millisUntilFinished: Long) {
-                //
-            }
+    override fun userLogout() {
+        updateStatus(SipStatus.REGISTERFAIL)
+    }
 
-            override fun onFinish() {
-                //呼叫超时,返回到主界面
-                showMessage("无人应答...")
-                //MediaPlayHelper.getInstance().stopMusic()
-                VoiceUtil.cancelAudioCall(Constant.DEVICE_ID)
-                endCall()
-            }
+    fun initDevice() {
+        if (!TextUtils.isEmpty(Constant.LOCAL_MAC)) {
+            presenter.loadDeviceInfo(Constant.LOCAL_MAC)
         }
     }
 
@@ -202,7 +211,7 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         //dismissDialog()
         showMessage(message)
         //加载失败,重置customid
-        Constant.CUSTOM_ID = -1
+        //Constant.CUSTOM_ID = -1
     }
 
     //数据加载完成
@@ -220,7 +229,7 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
     override fun onNoNet() {
         showMessage("没有网络")
         //加载失败,重置customid
-        Constant.CUSTOM_ID = -1
+        //Constant.CUSTOM_ID = -1
     }
 
     //网络监听
@@ -241,24 +250,61 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         Constant.PART_ID = deviceInfo.partId
         Constant.BED_NAME = deviceInfo.fullName
 
+        Constant.DEVICE_TYPE = deviceInfo.deviceType
+        Constant.DEVICE_CODE = deviceInfo.code
+        Constant.DEVICE_MODEL = deviceInfo.model
+        Constant.DEVICE_SOFT_VER = deviceInfo.softVer
+        Constant.DEVICE_HARD_VER = deviceInfo.hardVer
+        Constant.DEVICE_NAME = deviceInfo.name
+        if (deviceInfo.status != null) {
+            Constant.DEVICE_STATUS = deviceInfo.status
+        }
+
         view_title_layout_tv_hospital_name.text = deviceInfo.hospitalName + deviceInfo.partName
+
         if (deviceInfo.customerId != null) {
             Constant.CUSTOM_ID = deviceInfo.customerId
-            EventBus.getDefault().post(MessageEvent("updateCustom", Constant.EVENT_UPDATE_CUSTOM))
-            presenter.loadPartSettings(Constant.PART_ID)
         }
-
-        //拿到sip账号之后再注册
-        if (!sipServiceRunning && !TextUtils.isEmpty(Constant.SIP_ID)) {
-            sipServiceRunning = true
-            //startSipService()
-            //StarRtcHelper.getInstance().initStarRtc()
-            initSip(Constant.SIP_HOST, Constant.SIP_ID);
+        EventBus.getDefault().post(MessageEvent("updateCustom", Constant.EVENT_UPDATE_CUSTOM))
+
+        if (TextUtils.isEmpty(Constant.SIP_ID)) {
+            showMessage("SIP ID为空")
+            return
+        } else if (Constant.DEVICE_STATUS == 0) {
+            showMessage("设备未启用")
+            return
         }
+
+        initialized = true
+        presenter.loadPartSettings(Constant.PART_ID)
+
+        connectRtcWebSocket()
     }
 
     override fun setPartSettings(partSetting: PartSettingDO) {
-        //Constant.CALL_TIMEOUT = partSetting.sipOvertime
+        //设置白昼起止时间
+        SettingConfig.setInitialDayTime(this, partSetting.dayStart.substring(0,5))
+        SettingConfig.setEndOfDay(this,partSetting.nightStart.substring(0,5))
+
+        //分机白天夜晚亮度
+        SettingConfig.setExtensionDaytimeBrightness(this, partSetting.dayLight)
+        SettingConfig.setExtensionNightBrightness(this, partSetting.nightLight)
+
+        //分机白天夜晚护理灯 也就是LED灯的亮度
+        SettingConfig.setExtensionDaytimeLEDBrightness(this, partSetting.dayNurseLed)
+        SettingConfig.setExtensionNightLEDBrightness(this, partSetting.nightNurseLed)
+
+        //分机白天夜晚系统音量
+        SettingConfig.setExtensionDaytimeSystemVolume(this, partSetting.dayBedVol)
+        SettingConfig.setExtensionNightSystemVolume(this, partSetting.nightBedVol)
+
+        //分机通话音量  没做白昼区分
+        SettingConfig.setExtensionCallVolume(this, partSetting.dayBedVol)
+
+        SettingConfig.setSipOverTime(this, partSetting.sipOvertime)
+        SettingConfig.setSleepTime(this, partSetting.sleepSecondsBed)
+
+        updateSettings(true)
     }
 
     override fun setTcpServerHost(tcpSeverDTO: TcpSeverDTO) {
@@ -269,10 +315,23 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         //开启TCP连接
         startTcp()
         showMessage("tcp开始连接...host: " + Constant.TCP_SERVER_URL + ", port: " + Constant.TCP_PORT)
+
+        Thread(Runnable {
+            while (!initialized) {
+                runOnUiThread(Runnable {
+                    initDevice()
+                })
+                try {
+                    Thread.sleep(8000)
+                } catch (e: Exception) {
+                    //
+                }
+            }
+        }).start()
     }
 
     override fun updateNurseConfig(list: List<NurseConfigDto>) {
-        if (list.size > 0) {
+        if (list.isNotEmpty()) {
             for ((index, e) in list.withIndex()) {
                 configList.set(index, e)
             }
@@ -280,14 +339,35 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         }
     }
 
-    override fun onStart() {
-        EventBus.getDefault().register(this)
-        super.onStart()
+    private fun connectRtcWebSocket(){
+        //拿到sip账号之后再注册
+        if (!TextUtils.isEmpty(Constant.SIP_ID)) {
+            // 连接socket登录
+            SocketManager.getInstance().connect(Urls.WS, Constant.SIP_ID, 0)
+        }
     }
 
-    override fun onStop() {
-        EventBus.getDefault().unregister(this)
-        super.onStop()
+    fun addCallFragment(fragment: Fragment) {
+        //先切换到首页
+        if (!mainFragment.equals(curFragment)) {
+            switchToMainFragment()
+        }
+
+        skyCallFragment = fragment
+        supportFragmentManager.beginTransaction()
+            .setCustomAnimations(R.anim.slide_down_in, R.anim.slide_up_out)
+            .add(R.id.call_frame, fragment)
+            .commit()
+    }
+
+    fun removeCallFragment() {
+        if (skyCallFragment != null) {
+            supportFragmentManager.beginTransaction()
+                //.setCustomAnimations(R.anim.slide_down_in, R.anim.slide_down_out)
+                .remove(skyCallFragment)
+                .commit()
+            skyCallFragment = null
+        }
     }
 
     //切换fragment
@@ -327,6 +407,7 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         intentFilter.addAction(Intent.ACTION_TIME_TICK)
         intentFilter.addAction(Intent.ACTION_TIMEZONE_CHANGED)
         intentFilter.addAction(Intent.ACTION_TIME_CHANGED)
+        intentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION)
         registerReceiver(receiver, intentFilter)
     }
 
@@ -334,11 +415,11 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         unregisterReceiver(receiver)
     }
 
-    private fun initSip(ipStr: String, id: String) {
+/*    private fun initSip(ipStr: String, id: String) {
         //=============================================SIP启动服务===================================//
-        //SipHelper.getInstance().initSip(this@CallingbedActivity,ipStr, id, id)
-        //SipHelper.getInstance().sipStartService()
-        //SipHelper.getInstance().setSipListner(this)
+        SipHelper.getInstance().initSip(this@CallingbedActivity,ipStr, id, id)
+        SipHelper.getInstance().sipStartService()
+        SipHelper.getInstance().setSipListner(this)
     }
 
     fun sendSipCall(sipId: String) {
@@ -351,7 +432,7 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         SipHelper.getInstance().endCall()
     }
 
-    /*override fun onNewVvsipCallEvent(call: VvsipCall?) {
+    override fun onNewVvsipCallEvent(call: VvsipCall?) {
         //Log.d("sip", "onNewVvsipCallEvent----")
         if (call != null) {
             SipHelper.getInstance().addCallObject(call)
@@ -402,49 +483,111 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         }
     }*/
 
+    private fun updateSettings(forceSet: Boolean) {
+        val  currentTimestamp = System.currentTimeMillis()
+
+        //白天起始时间戳
+        val date = TimeHandle.getDateTime("yyyy-MM-dd")
+        val dayStartTimeStamp = TimeHandle.dateToStamp(date+" "+SettingConfig.getInitialDayTime(this)+":00", "yyyy-MM-dd HH:mm:ss")
+        //白天结束时间戳
+        val endOfDayTimeStamp = TimeHandle.dateToStamp(date+" "+SettingConfig.getEndOfDay(this)+":00", "yyyy-MM-dd HH:mm:ss")
+
+        val curInday = dayStartTimeStamp < currentTimestamp && currentTimestamp < endOfDayTimeStamp
+
+        if (curInday) {
+            //当前是白天并且原来不是白天
+            if (Constant.day_state != 0 || forceSet) {
+                //设置白天亮度
+                ScreenManagerUtil().setScreenBrightness(
+                    this,
+                    Math.ceil(2.54 * SettingConfig.getExtensionDaytimeBrightness(this)).toInt()
+                )
+                //设置白天系统音量和响铃音
+                VoiceManagerUtil.setSystemVoice(
+                    this,
+                    SettingConfig.getExtensionDaytimeSystemVolume(this)
+                )
+                VoiceManagerUtil.setMusicVoice(
+                    this,
+                    SettingConfig.getExtensionDaytimeSystemVolume(this)
+                )
+                view_title_layout_iv_day_night.setImageResource(R.mipmap.ic_daylight)
+            }
+            Constant.day_state = 0
+        } else {
+            //当前是夜晚并且原来不是夜晚
+            if (Constant.day_state != 1 || forceSet) {
+                //设置晚上亮度
+                ScreenManagerUtil().setScreenBrightness(
+                    this,
+                    Math.ceil(2.54 * SettingConfig.getExtensionNightBrightness(this)).toInt()
+                )
+                //设置晚上系统音量和响铃音
+                VoiceManagerUtil.setSystemVoice(
+                    this,
+                    SettingConfig.getExtensionNightSystemVolume(this)
+                )
+                VoiceManagerUtil.setMusicVoice(
+                    this,
+                    SettingConfig.getExtensionNightSystemVolume(this)
+                )
+                view_title_layout_iv_day_night.setImageResource(R.mipmap.ic_night)
+            }
+            Constant.day_state = 1
+        }
+    }
+
     override fun serialPortBedOnclick(buffer: ByteArray) {
         Log.d("serialPortBedOnclick", "buffer[0]:" + buffer[0] + ", buffer[1]:" + buffer[1] + ", buffer[2]:" + buffer[2]
                 + ", buffer[3]:" + buffer[3] + ", buffer[4]:" + buffer[4] + ", buffer[5]:" + buffer[5] + ", buffer[6]:" + buffer[6]
                 + ", buffer[7]:" + buffer[7] + ", buffer[8]:" + buffer[8])
         //首页
         if (buffer[0].toInt() == 1 || buffer[0].toInt() == 2) {
-            runOnUiThread {
-                clickVHome()
+            if (skyCallFragment == null) {
+                runOnUiThread {
+                    clickVHome()
+                }
             }
         }
 
         //费用
         else if (buffer[1].toInt() == 1 || buffer[1].toInt() == 2) {
-            runOnUiThread {
-                clickVCost()
+            if (skyCallFragment == null) {
+                runOnUiThread {
+                    clickVCost()
+                }
             }
         }
 
         //更多
         else if (buffer[2].toInt() == 1 || buffer[2].toInt() == 2) {
-            runOnUiThread {
-                clickVQrcode()
+            if (skyCallFragment == null) {
+                runOnUiThread {
+                    clickVQrcode()
+                }
             }
         }
 
         //增援
         else if (buffer[3].toInt() == 1 || buffer[3].toInt() == 2) {
-            runOnUiThread {
-                clickVSupport()
+            if (skyCallFragment == null) {
+                runOnUiThread {
+                    clickVSupport()
+                }
             }
         }
 
         //呼叫
         else if (buffer[4].toInt() == 1 || buffer[4].toInt() == 2) {
             runOnUiThread {
-                clickVCall()
+                clickVCall(true)
             }
         }
 
         //呼叫护士键短按或长按松开
         else if (buffer[5].toInt() == 1 || buffer[5].toInt() == 2) {
             runOnUiThread {
-                clickCall()
+                clickVCall(false)
             }
         }
 
@@ -457,7 +600,15 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
     }
 
     override fun serialPortBedOnclickString(str: String) {
-        //
+        try {
+            val newStr = str.substring(str.indexOf("$") + 1, str.indexOf("#"))
+            Log.d("serialPortBedOnclickString", "newStr==$newStr")
+            if (newStr.startsWith("V")) {
+                Constant.MCU_VERSION_NUMBER = newStr.substring(newStr.indexOf(",") + 1, 16)
+            }
+        } catch (e: Exception) {
+            e.printStackTrace()
+        }
     }
 
     private fun isVisible(v: View) : Boolean {
@@ -526,7 +677,7 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         backTimeout = 0
         if (isVisible(right_menu)) {
             hideRightMenu(true)
-            showMessage("增援请求已发送!")
+            //showMessage("增援请求已发送!")
             //switchToFragment(R.id.callingbed_main_frame, SupportFragment(), supportFragment)
             //SoundPoolHelper.getInstance().playSound(2)
         } else {
@@ -534,28 +685,46 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         }
     }
 
-    private fun clickVCall() {
+    private fun clickVCall(checkMenu: Boolean) {
         //重置超时时间
         backTimeout = 0
         //当前为待机状态,显示呼叫
         if (Constant.CALL_STATE == Constant.CALL_STANDBY) {
-            if (isVisible(right_menu)) {
-                hideRightMenu(true)
-                VoiceUtil.startAudioCall(Constant.DEVICE_ID)
-                sendCall()
-                //播放铃音
-                //MediaPlayHelper.getInstance().playResMusic(R.raw.ringback, 1.0f, true)
-                SoundPoolHelper.getInstance().playSound(2)
+            if (checkMenu) {
+                if (isVisible(right_menu)) {
+                    hideRightMenu(true)
+                    if (!TextUtils.isEmpty(Constant.SIP_ID)) {
+                        //没有摄像头则只能拨打语音
+                        startCall(Constant.VOICE_CALL)
+                    } else {
+                        showMessage("当前设备可能未办理入住,不能呼叫")
+                    }
+                } else {
+                    showRightMenu()
+                }
             } else {
-                showRightMenu()
+                if (isVisible(right_menu)) {
+                    hideRightMenu(true)
+                }
+                if (!TextUtils.isEmpty(Constant.SIP_ID)) {
+                    //没有摄像头则只能拨打语音
+                    startCall(Constant.VOICE_CALL)
+                } else {
+                    showMessage("当前设备可能未办理入住,不能呼叫")
+                }
             }
         } else if (Constant.CALL_STATE == Constant.CALL_CALLING) {
-            //StarRtcHelper.getInstance().hangupAudioCall()
-            //VoiceUtil.handoffAudioCall(Constant.DEVICE_ID, fromId, interactionId)
-            endCall()
-        } else {
-            VoiceUtil.cancelAudioCall(Constant.DEVICE_ID)
-            endCall()
+            //通话中挂断
+            EventBus.getDefault().post(MessageEvent("handoff", Constant.EVENT_SERIAL_EVENT))
+            Constant.CALL_STATE = Constant.CALL_STANDBY
+        } else if (Constant.CALL_STATE == Constant.CALL_OUTGOING) {
+            //呼叫取消
+            EventBus.getDefault().post(MessageEvent("cancel", Constant.EVENT_SERIAL_EVENT))
+            Constant.CALL_STATE = Constant.CALL_STANDBY
+        } else if (Constant.CALL_STATE == Constant.CALL_INCOMING) {
+            //来电接听
+            EventBus.getDefault().post(MessageEvent("accept", Constant.EVENT_SERIAL_EVENT))
+            Constant.CALL_STATE = Constant.CALL_STANDBY
         }
     }
 
@@ -567,18 +736,16 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
             if (isVisible(right_menu)) {
                 hideRightMenu(true)
             }
-            VoiceUtil.startAudioCall(Constant.DEVICE_ID)
-            sendCall()
-            //播放铃音
-            //MediaPlayHelper.getInstance().playResMusic(R.raw.ringback, 1.0f, true)
-            SoundPoolHelper.getInstance().playSound(2)
-        } else if (Constant.CALL_STATE == Constant.CALL_CALLING) {
-            //StarRtcHelper.getInstance().hangupAudioCall()
-            //VoiceUtil.handoffAudioCall(Constant.DEVICE_ID, fromId, interactionId)
-            endCall()
-        } else {
-            VoiceUtil.cancelAudioCall(Constant.DEVICE_ID)
-            endCall()
+            if (!TextUtils.isEmpty(Constant.SIP_ID)) {
+                //没有摄像头则只能拨打语音
+                startCall(Constant.VOICE_CALL)
+            } else {
+                showMessage("当前设备可能未办理入住,不能呼叫")
+            }
+        } else if (Constant.CALL_STATE == Constant.CALL_INCOMING) {
+            //来电接听
+            EventBus.getDefault().post(MessageEvent("accept", Constant.EVENT_SERIAL_EVENT))
+            Constant.CALL_STATE = Constant.CALL_STANDBY
         }
     }
 
@@ -603,40 +770,41 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         right_menu.setVisibility(View.GONE)
     }
 
-    //发起呼叫
-    private fun sendCall() {
-        Constant.CALL_STATE = Constant.CALL_OUTGOING
-        ll_call_view.visibility = View.VISIBLE
-        tv_call_bed_name.text = Constant.BED_NAME
-        tv_call_state.text = "正在呼叫..."
-        voice_call_timer_view.visibility = View.INVISIBLE
-        countDownTimer.start()
-    }
-
-    //呼叫或通话结束
-    private fun endCall() {
-        voice_call_timer_view.base = SystemClock.elapsedRealtime()
-        voice_call_timer_view.stop()
-        ll_call_view.visibility = View.GONE
-        Constant.CALL_STATE = Constant.CALL_STANDBY
-        countDownTimer.cancel()
+    //开始呼叫
+    fun startCall(type: Int) {
+        //通话之前先判断webrtc socket和tcp是否连接正常,否则不能建立通话
+        if (SocketManager.getInstance().socketOpen() && Constant.TCP_CONNECTED) {
+            //去电界面
+            Constant.CALL_TYPE = type
+            Constant.CALL_STATE = Constant.CALL_OUTGOING
+            var fragment = SkyCallFragment()
+            var bundle = Bundle()
+            bundle.putInt("call_state", 0)
+            fragment.arguments = bundle
+            addCallFragment(fragment)
+        } else {
+            showMessage("通话服务或网络未连接,请检查网络稍后再试")
+        }
     }
 
-    //通话中
-    private fun inCall() {
-        ll_call_view.visibility = View.VISIBLE
-        tv_call_bed_name.text = Constant.BED_NAME
-        tv_call_state.text = "通话中..."
-        voice_call_timer_view.visibility = View.VISIBLE
-        voice_call_timer_view.base = SystemClock.elapsedRealtime()
-        voice_call_timer_view.start()
-        Constant.CALL_STATE = Constant.CALL_CALLING
-        countDownTimer.cancel()
-    }
 
     @Subscribe(threadMode = ThreadMode.MAIN)
     fun onMoonEvent(messageEvent: MessageEvent) {
+        synchronized(Unit) {
+            handleTcpModel(messageEvent)
+        }
+    }
+
+    fun handleTcpModel(messageEvent: MessageEvent) {
         when (messageEvent.getType()) {
+            //退出通话界面
+            Constant.EVENT_REMOVE_CALL_FRAGMENT -> {
+                if (skyCallFragment != null) {
+                    Constant.CALL_STATE = Constant.CALL_STANDBY
+                    removeCallFragment()
+                }
+            }
+
             //Sip注册状态
             Constant.EVENT_SIP_REGISTER_STATUS -> {
                 if (messageEvent.message is String) run {
@@ -645,20 +813,111 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
                     updateStatus(status)
                 }
             }
+
             //网络ping状态
-            Constant.EVENT_INTERNETPING -> {
-                SipHelper.getInstance().obtainSipInfo()
+            /*Constant.EVENT_INTERNETPING -> {
+                //SipHelper.getInstance().obtainSipInfo()
                 if (NetHelper.NetConn && NetHelper.getInstance().networkType == ConnectivityManager.TYPE_ETHERNET) {
                     view_title_layout_iv_ethernet.setImageResource(R.mipmap.ic_etherneted)
                 } else {
                     view_title_layout_iv_ethernet.setImageResource(R.mipmap.ic_no_ethernet)
                 }
                 view_title_layout_iv_wifi.setImageResource(R.mipmap.ic_wifi_nor)
-            }
+            }*/
+
             //返回主界面
             Constant.EVENT_BACK_MAIN -> {
                 switchToMainFragment()
             }
+
+            //TCP连接状态
+            Constant.EVENT_TCP_STATE -> {
+                updateTcpState()
+            }
+
+            //TCP消息处理
+            Constant.EVENT_TCP_MSG -> {
+                if (messageEvent.message is TcpModel) {
+                    val tcpModel = messageEvent.message as TcpModel
+                    Log.d("TCP", "received tcp action: " + tcpModel.action + ", type: " + tcpModel.type)
+                    if (tcpModel.getType() == TcpType.VOICE) {
+                        val interactionVO = Gson().fromJson(tcpModel.data.toString(), InteractionVO::class.java)
+                        if (tcpModel.action == TcpAction.VoiceAction.CALL) {
+                            Constant.fromId = tcpModel.fromId
+                            Constant.interactionId = interactionVO.id
+                            Constant.CALL_STATE = Constant.CALL_INCOMING
+                            //通话之前先判断webrtc socket是否连接上,否则不能建立通话
+                            if (SocketManager.getInstance().socketOpen()) {
+                                //来电界面
+                                var fragment = SkyCallFragment()
+                                var bundle = Bundle()
+                                bundle.putInt("call_state", 1)
+                                bundle.putSerializable("tcp_model", tcpModel)
+                                fragment.arguments = bundle
+                                addCallFragment(fragment)
+                            } else {
+                                showMessage("通话服务还未建立连接,请稍后")
+                                Constant.CALL_STATE = Constant.CALL_STANDBY
+                                VoiceUtil.rejectAudioCall(Constant.DEVICE_ID, Constant.fromId, Constant.interactionId)
+                            }
+                        }
+                    } else if (tcpModel.type == TcpType.SOS) {
+                        if (tcpModel.action == TcpAction.SOSAction.CANCEL) {
+                            //紧急呼叫已处理
+                            //SOSHelper.sosStop()
+                        }
+                    } else if (tcpModel.type == TcpType.DEVICE) {
+                        //检查APP版本
+                        if (tcpModel.getAction() == TcpAction.DeviceAction.APP_UPDATE) {
+                            //checkAppVersion()
+                        } else if (tcpModel.action == TcpAction.DeviceAction.RESTART) {
+                            //收到重启app指令,若当前处于正常待机状态则直接重启app,否则等待通话结束再重启
+                            if (Constant.CALL_STATE == Constant.CALL_STANDBY) {
+                                AppUpdateHelper.restartApp(activity)
+                            } else {
+                                Constant.LATER_RESTART = true
+                            }
+                        } else if (tcpModel.action == TcpAction.DeviceAction.SYSTEM_SETTING) {
+                            presenter.loadPartSettings(Constant.PART_ID)
+                        }
+                    } else if (tcpModel.action == TcpAction.DataAction.REFRESH) {
+                        //重新加载数据,比如出院,入院等
+                        initDevice()
+                    }
+
+                    //探视
+                    if (tcpModel.type == TcpType.VIDEO) {
+                        val interactionVO = Gson().fromJson(tcpModel.data.toString(), InteractionVO::class.java)
+                        if (tcpModel.action == TcpAction.VideoAction.VIDEO_INVITE_JOIN) {
+                            if (Constant.CALL_STATE != Constant.CALL_STANDBY) {
+                                VideoUtil.videoInCalling(Constant.DEVICE_ID, tcpModel.fromId, interactionVO.id)
+                            } else {
+                                //收到探视请求,向探视机发起视频通话
+                                Constant.interactionId = interactionVO.id
+                                Constant.fromId = tcpModel.fromId
+                                Constant.CALL_STATE = Constant.CALL_OUTGOING
+
+                                //通话之前先判断webrtc socket是否连接上,否则不能建立通话
+                                if (SocketManager.getInstance().socketOpen()) {
+                                    //探视界面
+                                    var fragment = SkyCallFragment()
+                                    var bundle = Bundle()
+                                    bundle.putInt("call_state", 2)
+                                    bundle.putSerializable("tcp_model", tcpModel)
+                                    fragment.arguments = bundle
+                                    addCallFragment(fragment)
+                                } else {
+                                    showMessage("通话服务还未建立连接,请稍后")
+                                    Constant.CALL_STATE = Constant.CALL_STANDBY
+                                    VoiceUtil.rejectAudioCall(Constant.DEVICE_ID, Constant.fromId, Constant.interactionId)
+                                }
+                            }
+                        } else if (tcpModel.action == TcpAction.VideoAction.HANDOFF) {
+                            Constant.CALL_STATE = Constant.CALL_STANDBY
+                        }
+                    }
+                }
+            }
         }
     }
 
@@ -683,10 +942,6 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
         }
     }
 
-    private fun updateDateTime() {
-        view_title_layout_tv_time.setText(TimeHandle.getDateTime("yyyy-MM-dd HH:mm E"));
-    }
-
     private fun updateNetState() {
         if (NetHelper.getInstance().networkType == ConnectivityManager.TYPE_WIFI) {
             view_title_layout_iv_wifi.visibility = View.VISIBLE
@@ -698,13 +953,54 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
             view_title_layout_iv_wifi.visibility = View.GONE
         }
 
-        if (NetHelper.isBTConnected()) {
+        /*if (NetHelper.isBTConnected()) {
             view_title_layout_iv_bt.visibility = View.VISIBLE
             view_title_layout_iv_bt.setImageResource(R.mipmap.ic_bt_success)
         } else {
             view_title_layout_iv_bt.visibility = View.GONE
+        }*/
+
+        /*
+        * 检查网络情况,若tcp断开连接多次且IP也是空的则网络异常,重启设备
+        * 仅对3128设备有效
+         */
+        if (Build.MODEL.equals("rk3128")) {
+            var count = SettingConfig.getNetErrResetCount(this)
+            if (!Constant.TCP_CONNECTED && TextUtils.isEmpty(NetHelper.getInstance().localIP)) {
+                netErrCount++
+            } else {
+                netErrCount = 0
+                if (count > 0) {
+                    count = 0
+                    SettingConfig.setNetErrResetCount(this, count)
+                }
+            }
+
+            if (netErrCount >= 5) {
+                //如果重启次数超过8次还是无网则不再重启
+                if (count < 8) {
+                    count++
+                    SettingConfig.setNetErrResetCount(this, count)
+                    Handler().postDelayed({
+                        AppUpdateHelper.reboot(activity)
+                    }, 5000)
+                } else {
+                    WarningDialogHelper.showDialog(activity)
+                }
+            }
+        }
+    }
+
+    private fun updateTcpState() {
+        if (Constant.TCP_CONNECTED) {
+            view_title_layout_iv_tcp.setImageResource(R.mipmap.ic_tcp_success)
+            if (initialized && SocketManager.getInstance().userState == 0) {
+                //如果socket断开了则重连
+                connectRtcWebSocket()
+            }
+        } else {
+            view_title_layout_iv_tcp.setImageResource(R.mipmap.ic_tcp_fail)
         }
-        view_title_layout_iv_day_night.setImageResource(R.mipmap.ic_daylight)
     }
 
     inner class TimeReceiver: BroadcastReceiver() {
@@ -712,8 +1008,35 @@ class CallingbedActivity :BaseActivity<CallingbedActivityPresenter, CallingbedMa
             if (intent.action == Intent.ACTION_TIME_TICK
                     || intent.action == Intent.ACTION_TIME_CHANGED
                     || intent.action == Intent.ACTION_TIMEZONE_CHANGED) {
-                updateDateTime()
-                updateNetState()
+                //设备完成初始化才刷新
+                if (initialized) {
+                    updateNetState()
+                    updateSettings(false)
+                    updateTcpState()
+
+                    if (SocketManager.getInstance().userState == 1) {
+                        view_title_layout_tv_point.setBackgroundResource(R.color.green)
+                    } else {
+                        view_title_layout_tv_point.setBackgroundResource(R.color.red_color)
+                    }
+
+                    if (Constant.LATER_RESTART && Constant.CALL_STATE == Constant.CALL_STANDBY) {
+                        AppUpdateHelper.restartApp(activity)
+                    }
+                }
+            } else if (intent.action == ConnectivityManager.CONNECTIVITY_ACTION) {
+                if (initialized) {
+                    updateNetState()
+                    if (NetHelper.getInstance().networkAvailable()) {
+                        if (!Constant.TCP_CONNECTED) {
+                            TcpClient.getInstance().doConnect()
+                        }
+                        if (SocketManager.getInstance().userState == 0) {
+                            //如果socket断开了则重连
+                            connectRtcWebSocket()
+                        }
+                    }
+                }
             }
         }
     }

+ 143 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/BaseCallFragment.kt

@@ -0,0 +1,143 @@
+package com.wdkl.app.ncs.callingbed2.fragment
+
+import android.os.Bundle
+import android.os.CountDownTimer
+import android.support.v4.app.Fragment
+import android.view.LayoutInflater
+import android.view.View
+import android.view.ViewGroup
+import android.widget.TextView
+import com.enation.javashop.utils.base.tool.BaseToolActivity
+import com.wdkl.app.ncs.callingbed.helper.RingPlayHelper
+import com.wdkl.app.ncs.callingbed.settings.SettingConfig
+import com.wdkl.core.voip.VoipEvent
+import com.wdkl.ncs.android.lib.utils.showMessage
+import com.wdkl.ncs.android.middleware.common.Constant
+import com.wdkl.ncs.android.middleware.common.MessageEvent
+import com.wdkl.ncs.android.middleware.tcp.channel.VoiceUtil
+import com.wdkl.ncs.android.middleware.tcp.dto.TcpModel
+import com.wdkl.skywebrtc.SkyEngineKit
+import com.wdkl.skywebrtc.except.NotInitializedException
+import org.greenrobot.eventbus.EventBus
+
+abstract class BaseCallFragment: Fragment() {
+
+    private var layout: View? = null
+
+    protected lateinit var baseActivity: BaseToolActivity
+
+    //通话状态:0-去电, 1-来电, 2-探视
+    protected var callState : Int = 0
+    protected var tcpModel: TcpModel? = null
+
+    //计时器
+    lateinit var countDownTimer: CountDownTimer
+
+    protected var gEngineKit: SkyEngineKit? = null
+
+
+    override fun onCreate(savedInstanceState: Bundle?) {
+        super.onCreate(savedInstanceState)
+        retainInstance = true
+        callState = arguments.getInt("call_state")
+        if (arguments.getSerializable("tcp_model") != null) {
+            tcpModel = arguments.getSerializable("tcp_model") as TcpModel
+        }
+    }
+
+    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View? {
+        if (layout == null) {
+            layout = inflater.inflate(getLayId(), null)
+        }
+
+        /**初始化宿主Activity*/
+        baseActivity = getActivity() as BaseToolActivity
+
+        return layout
+    }
+
+    override fun onViewCreated(view: View?, savedInstanceState: Bundle?) {
+        super.onViewCreated(view, savedInstanceState)
+
+        try {
+            SkyEngineKit.init(VoipEvent())
+            gEngineKit = SkyEngineKit.Instance()
+        } catch (e: NotInitializedException) {
+            SkyEngineKit.init(VoipEvent())
+            try {
+                gEngineKit = SkyEngineKit.Instance()
+            } catch (ex: NotInitializedException) {
+                ex.printStackTrace()
+                baseActivity.finish()
+            }
+        }
+
+        init()
+        bindEvent()
+    }
+
+    override fun onDestroyView() {
+        super.onDestroyView()
+        destroy()
+    }
+
+    override fun onStart() {
+        EventBus.getDefault().register(this)
+        super.onStart()
+    }
+
+    override fun onStop() {
+        EventBus.getDefault().unregister(this)
+        super.onStop()
+    }
+
+    protected abstract fun getLayId(): Int
+
+    protected abstract fun init()
+
+    protected abstract fun bindEvent()
+
+    protected abstract fun destroy()
+
+    //初始化计时器
+    protected fun initCountDownTimer(view: TextView) {
+        val overTime = SettingConfig.getSipOverTime(baseActivity) * 1000L
+        countDownTimer = object: CountDownTimer(overTime, 1000) {
+            override fun onTick(millisUntilFinished: Long) {
+                if (view != null) {
+                    val time = millisUntilFinished/1000
+                    view.setText("倒计时: " + time + "秒")
+                }
+            }
+
+            override fun onFinish() {
+                //呼叫超时,返回到主界面
+                RingPlayHelper.stopRingTone()
+                showMessage("无人应答...")
+                Constant.CALL_STATE = Constant.CALL_STANDBY
+                VoiceUtil.cancelAudioCall(Constant.DEVICE_ID)
+                backToMain()
+            }
+        }
+    }
+
+    //开始计时
+    protected fun startTimer() {
+        if (countDownTimer != null) {
+            countDownTimer.start()
+        }
+    }
+
+    //取消计时器
+    protected fun cancelTimer() {
+        if (countDownTimer != null) {
+            countDownTimer.cancel()
+        }
+    }
+
+    //返回主界面
+    protected fun backToMain() {
+        EventBus.getDefault().post(MessageEvent("BackCall", Constant.EVENT_REMOVE_CALL_FRAGMENT))
+    }
+
+}

+ 3 - 2
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/MainFragment.kt

@@ -11,7 +11,6 @@ import com.wdkl.app.ncs.callingbed.activity.CallingbedActivity
 import com.wdkl.app.ncs.callingbed.databinding.MainViewLayoutBinding
 import com.wdkl.app.ncs.callingbed.launch.CallingbedLaunch
 import com.wdkl.app.ncs.callingbed.net.NettyClient
-import com.wdkl.app.ncs.callingbed.sip.SipHelper
 //import com.wdkl.app.ncs.sip.activity.VoipAudioActivity
 import com.wdkl.ncs.android.lib.base.BaseFragment
 import com.wdkl.ncs.android.lib.utils.*
@@ -94,7 +93,9 @@ class MainFragment: BaseFragment<MainFragmentPresenter, MainViewLayoutBinding>()
         tv_nurse_name.text = customInfo.nurseName
 
         //更新护理项
-        (activity as CallingbedActivity).updateNurseConfig(customInfo.list)
+        if (customInfo.list != null) {
+            (activity as CallingbedActivity).updateNurseConfig(customInfo.list)
+        }
     }
 
     override fun onError(message: String, type: Int) {

+ 41 - 13
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/QrCodeFragment.kt

@@ -13,6 +13,7 @@ import com.wdkl.ncs.android.lib.utils.debugLog
 import com.wdkl.ncs.android.lib.utils.errorLog
 import com.wdkl.ncs.android.lib.utils.showMessage
 import com.wdkl.ncs.android.lib.vo.filter
+import com.wdkl.ncs.android.middleware.api.UrlManager
 import com.wdkl.ncs.android.middleware.common.Constant
 import com.wdkl.ncs.android.middleware.logic.contract.callingbed.QrCodeFragmentContract
 import com.wdkl.ncs.android.middleware.logic.presenter.callingbed.QrCodeFragmentPresenter
@@ -21,8 +22,7 @@ import kotlinx.android.synthetic.main.view_bed_name.*
 
 class QrCodeFragment : BaseFragment<QrCodeFragmentPresenter, QrcodeViewBinding>(), QrCodeFragmentContract.View {
     val TAG = "QrCodeFragment"
-    val QR_CODE_PATH = "http://m.wdklian.com/care/apk/care.user?type=NCS_DEVICE&no=?"
-    var KEY_ID = ""
+    val QR_CODE_PATH = "http://m.wdklian.com/care/apk/care.user?type=NCS_DEVICE"
 
     override fun getLayId(): Int {
         return R.layout.qrcode_view
@@ -33,43 +33,71 @@ class QrCodeFragment : BaseFragment<QrCodeFragmentPresenter, QrcodeViewBinding>(
     }
 
     override fun init() {
-        tv_bed_name.text = Constant.BED_NAME
+        if (Constant.DEVICE_STATUS == 0) {
+            tv_bed_name.text = "设备未启用"
+            tv_bed_name.setTextColor(resources.getColor(R.color.red_color))
+        } else {
+            tv_bed_name.text = Constant.BED_NAME
+            tv_bed_name.setTextColor(resources.getColor(R.color.main_color))
+        }
 
         Thread{
             //val logoBitmap = BitmapFactory.decodeResource(resources, R.mipmap.erlogo)
-            val code = EcodeHelper().createQRImage(QR_CODE_PATH + KEY_ID,200, null)
+            var builder = StringBuilder()
+            builder.append(QR_CODE_PATH)
+            builder.append("&code=")
+            builder.append(Constant.DEVICE_CODE)
+            builder.append("&mac=")
+            builder.append(Constant.LOCAL_MAC)
+            builder.append("&model=")
+            builder.append(Constant.DEVICE_MODEL)
+            builder.append("&hard_ver=")
+            builder.append(Constant.DEVICE_HARD_VER)
+            builder.append("&soft_ver=")
+            builder.append(Constant.DEVICE_SOFT_VER)
+            builder.append("&device_type=")
+            builder.append(Constant.DEVICE_TYPE)
+            builder.append("&device_name=")
+            builder.append(Constant.DEVICE_NAME)
+            val code = EcodeHelper().createQRImage(builder.toString(),180, null)
             activity.runOnUiThread {
                 view_qr_code?.setImageBitmap(code)
             }
         }.start()
+
         val macAddr = NetHelper.getInstance().macAddress
         val ipAddr = NetHelper.getInstance().localIP
-        tv_device_id.text = "设备ID: " + Constant.DEVICE_ID
-        tv_local_ip.text = "本机IP: " + ipAddr
+        if (Constant.DEVICE_STATUS == 0) {
+            tv_device_id.text = "设备ID: " + Constant.DEVICE_ID + " - 未启用"
+        } else if (Constant.DEVICE_STATUS == 1) {
+            tv_device_id.text = "设备ID: " + Constant.DEVICE_ID + " - 已启用"
+        } else {
+            tv_device_id.text = "设备ID: " + Constant.DEVICE_ID
+        }
+
+        var serverIp = ""
+        if (UrlManager.build().base.length > 14) {
+            serverIp = UrlManager.build().base.substring(7, 14)
+        }
+        tv_local_ip.text = "本机IP: " + ipAddr + " - " + serverIp
         tv_local_mac.text = "本机MAC: " + macAddr
         tv_app_version.text = "版本信息: V" + BuildConfig.VERSION_NAME
+        tv_mcu_version.text = "MCU版本: " + Constant.MCU_VERSION_NUMBER
     }
 
     override fun bindEvent() {
-        //debugLog(TAG,"bindEvent")
     }
 
     override fun destory() {
-        //debugLog(TAG,"destory")
     }
 
     override fun onError(message: String, type: Int) {
-        //getUtils().dismissDialog()
-        //errorLog("error",message)
-        //showMessage(message)
     }
 
     override fun complete(message: String, type: Int) {
-        //getUtils().dismissDialog()
     }
 
     override fun start() {
-        //getUtils().showDialog()
     }
 
     override fun networkMonitor(state: NetState) {

+ 613 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/fragment/SkyCallFragment.kt

@@ -0,0 +1,613 @@
+package com.wdkl.app.ncs.callingbed2.fragment
+
+import android.os.Handler
+import android.os.Looper
+import android.os.SystemClock
+import android.text.TextUtils
+import android.util.Log
+import android.view.View
+import android.view.ViewGroup
+import com.google.gson.Gson
+import com.wdkl.app.ncs.callingbed.R
+import com.wdkl.app.ncs.callingbed.helper.RingPlayHelper
+import com.wdkl.ncs.android.lib.utils.AppTool
+import com.wdkl.ncs.android.lib.utils.showMessage
+import com.wdkl.ncs.android.middleware.common.Constant
+import com.wdkl.ncs.android.middleware.common.MessageEvent
+import com.wdkl.ncs.android.middleware.model.vo.InteractionVO
+import com.wdkl.ncs.android.middleware.tcp.channel.VideoUtil
+import com.wdkl.ncs.android.middleware.tcp.channel.VoiceUtil
+import com.wdkl.ncs.android.middleware.tcp.dto.TcpModel
+import com.wdkl.ncs.android.middleware.tcp.enums.TcpAction
+import com.wdkl.ncs.android.middleware.tcp.enums.TcpType
+import com.wdkl.skywebrtc.CallSession
+import com.wdkl.skywebrtc.EnumType
+import kotlinx.android.synthetic.main.sky_voice_call_layout.*
+import org.greenrobot.eventbus.Subscribe
+import org.greenrobot.eventbus.ThreadMode
+import org.webrtc.SurfaceViewRenderer
+import java.util.*
+
+class SkyCallFragment: BaseCallFragment(), CallSession.CallSessionCallback {
+
+    //来电设备id
+    var fromId: Int = -1
+
+    private var interactionVO: InteractionVO? = null
+
+    private var localSurfaceView: SurfaceViewRenderer? = null
+    private var remoteSurfaceView: SurfaceViewRenderer? = null
+
+    private val handler = Handler(Looper.getMainLooper())
+
+    private var visiting: Boolean = false
+
+    private var audioCall: Boolean = false
+
+    private var callEnded: Boolean = false
+
+    private var outGoing: Boolean = false
+
+    private var callSuccess: Boolean = false
+
+    override fun getLayId(): Int {
+        return R.layout.sky_voice_call_layout
+    }
+
+    override fun init() {
+        //初始化计时器
+        initCountDownTimer(sky_voice_call_timeout)
+        //tcp参数
+        if (tcpModel != null) {
+            fromId = tcpModel!!.fromId
+            interactionVO = Gson().fromJson(tcpModel!!.data.toString(), InteractionVO::class.java)
+        }
+
+        when (callState) {
+            0 -> {
+                //去电
+                outGoing = true
+                startOutgoing()
+                RingPlayHelper.playRingTone(baseActivity, R.raw.ring_back2, true)
+            }
+
+            1 -> {
+                //来电
+                outGoing = false
+                showIncomingCall()
+                RingPlayHelper.playRingTone(baseActivity, R.raw.incoming_call, true)
+            }
+
+            2 -> {
+                //探视
+                outGoing = true
+                visiting = true
+                acceptCall()
+                //因为探视请求是由探视机发起的,所以在每一条的探视交互记录中的from为探视机
+                startVisiting(interactionVO!!.fromSipId)
+            }
+        }
+    }
+
+    override fun bindEvent() {
+        //去电取消或通话挂断
+        sky_voice_call_hangup.setOnClickListener {
+            RingPlayHelper.stopRingTone()
+            if (Constant.CALL_STATE == Constant.CALL_CALLING) {
+                //结束sip通话
+                gEngineKit?.endCall()
+
+                Constant.CALL_STATE = Constant.CALL_STANDBY
+                sky_voice_call_timer.stop()
+                //backToMain()
+            } else {
+                Constant.CALL_STATE = Constant.CALL_STANDBY
+                VoiceUtil.cancelAudioCall(Constant.DEVICE_ID)
+                cancelCall()
+            }
+        }
+
+        //来电拒绝
+        sky_voice_call_ring_reject.setOnClickListener {
+            RingPlayHelper.stopRingTone()
+            Constant.CALL_STATE = Constant.CALL_STANDBY
+            VoiceUtil.rejectAudioCall(Constant.DEVICE_ID, fromId, interactionVO?.id)
+            backToMain()
+        }
+
+        //来电接听
+        sky_voice_call_ring_pickup_audio.setOnClickListener {
+            acceptCall()
+            RingPlayHelper.stopRingTone()
+            Constant.CALL_STATE = Constant.CALL_INCOMING
+            VoiceUtil.acceptAudioCall(Constant.DEVICE_ID, fromId, interactionVO?.id)
+        }
+    }
+
+    override fun destroy() {
+        cancelTimer()
+        Constant.CALL_STATE = Constant.CALL_STANDBY
+        if (sky_voice_call_timer != null) {
+            sky_voice_call_timer.stop()
+        }
+        RingPlayHelper.stopRingTone()
+    }
+
+    private fun startOutgoing() {
+        callSuccess = false
+        sky_voice_call_hangup.isEnabled = false
+        VoiceUtil.startAudioCall(Constant.DEVICE_ID)
+        Constant.CALL_STATE = Constant.CALL_OUTGOING
+        sky_voice_call_timeout.visibility = View.VISIBLE
+        sky_voice_call_timer.visibility = View.GONE
+        startTimer()
+
+        AppTool.Time.delay(3000) {
+            Log.d("tcp", "call success: $callSuccess")
+            if (!callSuccess) {
+                //呼叫失败
+                showMessage("呼叫失败,服务器无响应或网络故障!")
+                RingPlayHelper.stopRingTone()
+                cancelCall()
+            }
+        }
+    }
+
+    //去电界面
+    private fun showOutgoingCall() {
+        Constant.CALL_STATE = Constant.CALL_OUTGOING
+        sky_voice_call_calling_text.text = "呼叫成功,等待接听..."
+        sky_voice_call_outgoing.visibility = View.VISIBLE
+        sky_voice_call_incoming.visibility = View.GONE
+        sky_voice_call_timeout.visibility = View.VISIBLE
+        sky_voice_call_timer.visibility = View.GONE
+        startTimer()
+    }
+
+    //来电界面
+    private fun showIncomingCall() {
+        Constant.CALL_STATE = Constant.CALL_INCOMING
+        sky_voice_call_calling_text.text = "有新来电..."
+        sky_voice_call_outgoing.visibility = View.GONE
+        sky_voice_call_incoming.visibility = View.VISIBLE
+        sky_voice_call_timeout.visibility = View.GONE
+        sky_voice_call_timer.visibility = View.GONE
+        cancelTimer()
+    }
+
+    //开始接听
+    private fun acceptCall() {
+        sky_voice_call_calling_text.text = "连接中..."
+        sky_voice_call_outgoing.visibility = View.VISIBLE
+        sky_voice_call_incoming.visibility = View.GONE
+        sky_voice_call_timeout.visibility = View.GONE
+        sky_voice_call_timer.visibility = View.GONE
+        cancelTimer()
+    }
+
+    //呼叫取消
+    private fun cancelCall() {
+        cancelTimer()
+        Constant.CALL_STATE = Constant.CALL_STANDBY
+        if (sky_voice_call_timer != null) {
+            sky_voice_call_timer.stop()
+        }
+        backToMain()
+    }
+
+    //语音接通
+    private fun joinAudioCall() {
+        val session = gEngineKit?.getCurrentSession()
+        if (session != null) {
+            Log.e("dds", "audio call session state: " + session.state)
+            if (session.state == EnumType.CallState.Incoming) {
+                session.joinHome(session.roomId)
+                session.toggleSpeaker(true)
+            } else if (session.state == EnumType.CallState.Idle) {
+                callEnd()
+                return
+            }
+
+            Handler().postDelayed({
+                if (session.state == EnumType.CallState.Connected){
+                    //showCalling(audioCall)
+                } else {
+                    gEngineKit?.endCall()
+                    callEnd()
+                }
+            }, 2000)
+        }
+    }
+
+    //视频接通
+    private fun joinVideoCall() {
+        val session = gEngineKit?.getCurrentSession()
+        if (session != null) {
+            Log.e("dds", "video call session state: " + session.state)
+            if (session.state == EnumType.CallState.Incoming) {
+                val surfaceView = gEngineKit!!.currentSession.setupLocalVideo(false)
+                if (surfaceView != null) {
+                    localSurfaceView = surfaceView as SurfaceViewRenderer
+                    localSurfaceView!!.setZOrderMediaOverlay(false)
+                    fullscreen_video_frame.addView(localSurfaceView)
+                }
+
+                session.joinHome(session.roomId)
+                session.toggleSpeaker(true)
+            } else if (session.state == EnumType.CallState.Idle) {
+                callEnd()
+                return
+            }
+
+            Handler().postDelayed({
+                if (session.state == EnumType.CallState.Connected){
+                    //showCalling(audioCall)
+                } else {
+                    gEngineKit?.endCall()
+                    callEnd()
+                }
+            }, 2000)
+        }
+    }
+
+    private fun showCalling(audioOnly: Boolean) {
+        if (audioOnly) {
+            //移除视频画面
+            fullscreen_video_frame.visibility = View.GONE
+            pip_video_frame.visibility = View.GONE
+            ll_voice_call.visibility = View.VISIBLE
+        } else {
+            //显示视频画面
+            fullscreen_video_frame.visibility = View.VISIBLE
+            pip_video_frame.visibility = View.VISIBLE
+            ll_voice_call.visibility = View.GONE
+        }
+
+        //Constant.CALL_STATE = Constant.CALL_CALLING
+        sky_voice_call_calling_text.text = "通话中..."
+        sky_voice_call_timeout.visibility = View.GONE
+        cancelTimer()
+        sky_voice_call_timer.visibility = View.VISIBLE
+        sky_voice_call_timer.base = SystemClock.elapsedRealtime()
+        sky_voice_call_timer.start()
+    }
+
+    //创建会话
+    private fun startCall(targetId: String, audioOnly: Boolean): Boolean {
+        audioCall = audioOnly
+        val room = UUID.randomUUID().toString() + System.currentTimeMillis()
+        val b = gEngineKit!!.startOutCall(baseActivity, room, targetId, audioOnly)
+        if (b) {
+            val session = gEngineKit!!.currentSession
+            if (session == null) {
+                return false
+            } else {
+                Constant.CALL_STATE = Constant.CALL_CALLING
+                session.setSessionCallback(this)
+                session.toggleSpeaker(true)
+
+                //3s还未连接上则判定为通话失败
+                Handler().postDelayed({
+                    if (session.state == EnumType.CallState.Connected){
+                        //showCalling(audioCall)
+                    } else {
+                        if (!callEnded) {
+                            showMessage("当前网络状态不佳,通话结束")
+                            gEngineKit?.endCall()
+                            callEnd()
+                        }
+                    }
+                }, 3000)
+            }
+        }
+        return b
+    }
+
+    //通话结束
+    private fun callEnd() {
+        if (callEnded) {
+            return
+        }
+        callEnded = true
+
+        Log.e("dds", "call end !!!!!!!!!!!!!!!!!!")
+        if (visiting) {
+            VideoUtil.handoffVideoCall(Constant.DEVICE_ID, fromId, interactionVO?.id)
+        } else {
+            VoiceUtil.handoffAudioCall(Constant.DEVICE_ID, fromId, interactionVO?.id)
+        }
+
+        if (sky_voice_call_timer != null) {
+            sky_voice_call_timer.stop()
+        }
+        if (gEngineKit != null && gEngineKit!!.currentSession != null && gEngineKit!!.currentSession.state != EnumType.CallState.Idle) {
+            gEngineKit!!.endCall()
+        }
+        backToMain()
+    }
+
+    //探视
+    private fun startVisiting(targetId: String) {
+        //如果设备没有摄像头则只能语音通话
+        if (!startCall(targetId, !Constant.supportCamera)) {
+            //通话失败,重置并返回主界面
+            Constant.CALL_STATE = Constant.CALL_STANDBY
+            VideoUtil.rejectVideoCall(Constant.DEVICE_ID, fromId, interactionVO!!.id)
+            if (sky_voice_call_timer != null) {
+                sky_voice_call_timer.stop()
+            }
+            backToMain()
+        }
+    }
+
+
+    /********************************************************
+     ********************* webrtc通话回调 ********************
+     * 注意: 如涉及到UI更新的需要在主线程处理,务必注意
+     *******************************************************/
+    override fun didChangeState(state: EnumType.CallState?) {
+        Log.e("dds", "didChangeState: " + state)
+        handler.post {
+            if (state == EnumType.CallState.Connected) {
+                //更新界面显示
+                showCalling(audioCall)
+            }
+        }
+    }
+
+    override fun didDisconnected(userId: String?) {
+        handler.post {
+            showMessage("断开连接")
+            callEnd()
+        }
+    }
+
+    override fun didError(error: String?) {
+        handler.post {
+            showMessage("通话错误")
+            callEnd()
+        }
+    }
+
+    //处理本地视频画面
+    override fun didCreateLocalVideoTrack() {
+        Log.e("dds", "didCreateLocalVideoTrack")
+        handler.post {
+            val session = gEngineKit!!.currentSession
+            if (session != null && !callEnded) {
+                if (localSurfaceView == null) {
+                    val surfaceView = gEngineKit!!.currentSession.setupLocalVideo(true)
+                    Log.e("dds", "didCreateLocalVideoTrack surfaceView: " + surfaceView)
+                    if (surfaceView != null) {
+                        localSurfaceView = surfaceView as SurfaceViewRenderer
+                    } else {
+                        callEnd()
+                    }
+                } else {
+                    localSurfaceView!!.setZOrderMediaOverlay(true)
+                }
+
+                if (localSurfaceView!!.parent != null) {
+                    (localSurfaceView!!.parent as ViewGroup).removeView(localSurfaceView)
+                }
+
+                if (outGoing && remoteSurfaceView == null) {
+                    if (fullscreen_video_frame != null && fullscreen_video_frame.getChildCount() != 0) {
+                        fullscreen_video_frame.removeAllViews()
+                    }
+                    fullscreen_video_frame.addView(localSurfaceView)
+                } else {
+                    if (pip_video_frame != null && pip_video_frame.getChildCount() != 0) {
+                        pip_video_frame.removeAllViews()
+                    }
+                    pip_video_frame.addView(localSurfaceView)
+                }
+            }
+        }
+    }
+
+    //处理远端视频画面
+    override fun didReceiveRemoteVideoTrack(userId: String?) {
+        Log.e("dds", "didReceiveRemoteVideoTrack  userId: " + userId)
+        handler.post {
+            val session = gEngineKit!!.currentSession
+            if (session != null && !callEnded) {
+                //本地画面
+                if (localSurfaceView != null) {
+                    localSurfaceView!!.setZOrderMediaOverlay(true)
+                    if (outGoing) {
+                        if (localSurfaceView!!.parent != null) {
+                            (localSurfaceView!!.parent as ViewGroup).removeView(localSurfaceView)
+                        }
+                        pip_video_frame!!.addView(localSurfaceView)
+                    }
+                }
+
+                //远端画面
+                val surfaceView = gEngineKit!!.currentSession.setupRemoteVideo(userId, false)
+                Log.e("dds", "didReceiveRemoteVideoTrack,surfaceView = $surfaceView")
+                if (surfaceView != null) {
+                    remoteSurfaceView = surfaceView as SurfaceViewRenderer
+                    fullscreen_video_frame.removeAllViews()
+                    if (remoteSurfaceView!!.parent != null) {
+                        (remoteSurfaceView!!.parent as ViewGroup).removeView(remoteSurfaceView)
+                    }
+                    fullscreen_video_frame.addView(remoteSurfaceView)
+                }
+            }
+        }
+    }
+
+    override fun didCallEndWithReason(callEndReason: EnumType.CallEndReason?) {
+        handler.post {
+            when (callEndReason) {
+                EnumType.CallEndReason.Busy -> {
+                    showMessage("对方忙线中")
+                }
+                EnumType.CallEndReason.AcceptByOtherClient -> {
+                    showMessage("通话中")
+                }
+                EnumType.CallEndReason.Hangup -> {
+                    showMessage("通话结束")
+                }
+                EnumType.CallEndReason.MediaError -> {
+                    showMessage("媒体错误")
+                }
+                EnumType.CallEndReason.OpenCameraFailure -> {
+                    showMessage("打开摄像头错误")
+                }
+                EnumType.CallEndReason.RemoteHangup -> {
+                    showMessage("对方挂断")
+                }
+                EnumType.CallEndReason.RemoteSignalError -> {
+                    showMessage("对方网络断开")
+                }
+                EnumType.CallEndReason.SignalError -> {
+                    showMessage("连接断开")
+                }
+                EnumType.CallEndReason.Timeout -> {
+                    showMessage("对方未接听")
+                }
+            }
+
+            callEnd()
+        }
+    }
+
+    override fun didChangeMode(isAudioOnly: Boolean) {
+        handler.post {
+            //
+        }
+    }
+
+    override fun didUserLeave(userId: String?) {
+        handler.post {
+            showMessage("通话结束")
+            callEnd()
+        }
+    }
+
+
+
+    @Subscribe(threadMode = ThreadMode.MAIN)
+    fun onMoonEvent(messageEvent: MessageEvent) {
+        when (messageEvent.type) {
+            Constant.EVENT_SIP_CALL_STATUS -> {
+                //收到sip通话邀请,加入通话
+                Log.e("dds", "EVENT_SIP_CALL_STATUS: " + messageEvent.message)
+                if (messageEvent.message is String) {
+                    val call = messageEvent.message as String
+                    Constant.CALL_STATE = Constant.CALL_CALLING
+                    val session = gEngineKit?.getCurrentSession()
+                    if (session != null) {
+                        session.setSessionCallback(this)
+                    }
+                    if (call.equals("video_call")) {
+                        audioCall = false
+                        Handler().postDelayed({
+                            joinVideoCall()
+                        }, 500)
+                    } else {
+                        audioCall = true
+                        Handler().postDelayed({
+                            joinAudioCall()
+                        }, 300)
+                    }
+                }
+            }
+
+            Constant.EVENT_TCP_MSG -> {
+                if (messageEvent.message is TcpModel) {
+                    val curTcpModel = messageEvent.message as TcpModel
+                    if (curTcpModel.getType() == TcpType.VOICE) {
+                        val curInteractionVO = Gson().fromJson(curTcpModel.data.toString(), InteractionVO::class.java)
+                        if (curTcpModel.getAction() == TcpAction.VoiceAction.ACCEPT) {
+                            //我方呼出,对方接受
+                            RingPlayHelper.stopRingTone()
+                            Constant.interactionId = curInteractionVO.id
+                            fromId = curTcpModel.fromId
+                            acceptCall()
+                            if (TextUtils.isEmpty(curInteractionVO.toSipId) || !startCall(curInteractionVO.toSipId, Constant.CALL_TYPE == Constant.VOICE_CALL)) {
+                                //通话失败,重置并返回主界面
+                                showMessage("通话失败,targetId为空或通话服务异常!")
+                                Constant.CALL_STATE = Constant.CALL_STANDBY
+                                VoiceUtil.rejectAudioCall(Constant.DEVICE_ID, fromId, Constant.interactionId)
+                                if (sky_voice_call_timer != null) {
+                                    sky_voice_call_timer.stop()
+                                }
+                                backToMain()
+                            }
+                        } else if (curTcpModel.getAction() == TcpAction.VoiceAction.REJECT) {
+                            //我方呼出,对方拒绝
+                            showMessage("对方已拒绝!")
+                            RingPlayHelper.stopRingTone()
+                            cancelCall()
+                        } else if (curTcpModel.getAction() == TcpAction.VoiceAction.CALLING) {
+                            //我方呼出,对方通话中
+                            showMessage("对方正在忙线中,暂时无法接听!")
+                            AppTool.Time.delay(1000) {
+                                RingPlayHelper.stopRingTone()
+                                cancelCall()
+                            }
+                        } else if (curTcpModel.getAction() == TcpAction.VoiceAction.SUCCESS) {
+                            //呼叫成功
+                            //本机呼叫的时候tcpModel为空,只有呼叫成功的时候才能获得对应tcp相关数据
+                            callSuccess = true
+                            sky_voice_call_hangup.isEnabled = true
+                            interactionVO = curInteractionVO
+                            Constant.interactionId = curInteractionVO.id
+                            showOutgoingCall()
+                        } else if (curTcpModel.getAction() == TcpAction.VoiceAction.FAILED) {
+                            //我方呼出,对方不在线,设备离线或其它错误
+                            callSuccess = true
+                            showMessage("呼叫失败,找不到设备或对方不在线!")
+                            RingPlayHelper.stopRingTone()
+                            cancelCall()
+                        } else if (curTcpModel.getAction() == TcpAction.VoiceAction.HANDOFF) {
+                            //对方挂断,不论我方呼出或呼入
+                            if (Constant.interactionId == curInteractionVO.id) {
+                                RingPlayHelper.stopRingTone()
+                                cancelCall()
+                            }
+                        } else if (curTcpModel.getAction() == TcpAction.VoiceAction.CANCEL) {
+                            //对方呼叫时取消
+                            if (Constant.interactionId == curInteractionVO.id) {
+                                RingPlayHelper.stopRingTone()
+                                cancelCall()
+                            }
+                        }
+                    }
+                }
+            }
+
+            //外部呼叫按键
+            Constant.EVENT_SERIAL_EVENT -> {
+                if (messageEvent.message is String) {
+                    val serialAction = messageEvent.message as String
+                    if (serialAction.equals("cancel")) {
+                        RingPlayHelper.stopRingTone()
+                        Constant.CALL_STATE = Constant.CALL_STANDBY
+                        VoiceUtil.cancelAudioCall(Constant.DEVICE_ID)
+                        cancelCall()
+                    } else if (serialAction.equals("accept")) {
+                        RingPlayHelper.stopRingTone()
+                        Constant.CALL_STATE = Constant.CALL_INCOMING
+                        VoiceUtil.acceptAudioCall(Constant.DEVICE_ID, fromId, interactionVO?.id)
+                        acceptCall()
+                    } else if (serialAction.equals("handoff")) {
+                        gEngineKit?.endCall()
+                        Constant.CALL_STATE = Constant.CALL_STANDBY
+                        sky_voice_call_timer.stop()
+                        backToMain()
+                    } else if (serialAction.equals("reject")) {
+                        RingPlayHelper.stopRingTone()
+                        Constant.CALL_STATE = Constant.CALL_STANDBY
+                        VoiceUtil.rejectAudioCall(Constant.DEVICE_ID, fromId, interactionVO?.id)
+                        backToMain()
+                    }
+                }
+            }
+        }
+    }
+
+}

+ 251 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/AnrFcExceptionUtil.java

@@ -0,0 +1,251 @@
+package com.wdkl.app.ncs.callingbed.helper;
+
+import android.app.AlarmManager;
+import android.app.Application;
+import android.app.PendingIntent;
+import android.content.Context;
+import android.content.Intent;
+import android.os.Environment;
+import android.util.Log;
+
+import com.github.anrwatchdog.ANRError;
+import com.github.anrwatchdog.ANRWatchDog;
+import com.wdkl.ncs.android.component.welcome.activity.WelcomeActivity;
+import com.wdkl.ncs.android.middleware.api.UrlManager;
+import com.wdkl.ncs.android.middleware.common.Constant;
+import com.wdkl.skywebrtc.CallSession;
+import com.wdkl.skywebrtc.EnumType;
+import com.wdkl.skywebrtc.SkyEngineKit;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TimeZone;
+
+import okhttp3.Call;
+import okhttp3.Callback;
+import okhttp3.FormBody;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.Response;
+
+/**
+ * Created by dengzhe on 2018/4/2.
+ * //=========================FC&ANR异常处理类=========================//
+ */
+
+public class AnrFcExceptionUtil implements Thread.UncaughtExceptionHandler {
+
+    private static ANRWatchDog mANRWatchDog;
+    private Thread.UncaughtExceptionHandler mDefaultHandler;
+    public static final String TAG = "MyApplication";
+    private static Application application;
+
+    private static AnrFcExceptionUtil mAnrFcExceptionUtil;
+
+    private OkHttpClient okHttpClient;
+    private UrlManager urlManager = UrlManager.Companion.build();
+
+    /**
+     * 存储异常和参数信息
+     */
+    private Map<String, String> paramsMap = new HashMap<>();
+    /**
+     * 格式化时间
+     */
+    private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
+
+
+    public static AnrFcExceptionUtil getInstance(Application application) {
+        if (mAnrFcExceptionUtil == null) {
+            mAnrFcExceptionUtil = new AnrFcExceptionUtil(application);
+        }
+        return mAnrFcExceptionUtil;
+    }
+
+    private AnrFcExceptionUtil(Application application) {
+        //获取系统默认的UncaughtException处理器
+        mDefaultHandler = Thread.getDefaultUncaughtExceptionHandler();
+        this.application = application;
+    }
+
+    @Override
+    public void uncaughtException(Thread thread, Throwable ex) {
+        if (!handleException(ex) && mDefaultHandler != null) {
+            //如果用户没有处理则让系统默认的异常处理器来处理
+            mDefaultHandler.uncaughtException(thread, ex);
+        } else {
+            try {
+                Thread.sleep(2000);
+            } catch (InterruptedException e) {
+                Log.e(TAG, "error : ", e);
+            }
+
+            restartApp();
+        }
+    }
+
+    private void restartApp() {
+        Constant.CALL_STATE = Constant.CALL_STANDBY;
+        CallSession session= SkyEngineKit.Instance().getCurrentSession();
+        if(session!=null&&session.getState()!= EnumType.CallState.Idle){
+            SkyEngineKit.Instance().endCall();
+        }
+
+        //重新启动app
+        Intent mStartActivity = new Intent(application.getApplicationContext(), WelcomeActivity.class);
+        mStartActivity.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
+        int mPendingIntentId = 123456;
+        PendingIntent mPendingIntent = PendingIntent.getActivity(application.getApplicationContext(), mPendingIntentId, mStartActivity, PendingIntent.FLAG_CANCEL_CURRENT);
+        AlarmManager mgr = (AlarmManager) application.getApplicationContext().getSystemService(Context.ALARM_SERVICE);
+        mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1500, mPendingIntent);
+
+        android.os.Process.killProcess(android.os.Process.myPid());
+        System.exit(0);
+    }
+
+    /**
+     * 自定义错误处理,收集错误信息 发送错误报告等操作均在此完成.
+     *
+     * @param ex
+     * @return true:如果处理了该异常信息;否则返回false.
+     */
+    private boolean handleException(Throwable ex) {
+        if (ex == null) {
+            return false;
+        }
+        //使用Toast来显示异常信息
+        /*new Thread() {
+            @Override
+            public void run() {
+                Looper.prepare();
+//                Toast.makeText(application.getApplicationContext(), "很抱歉,程序出现异常,即将重新启动.",
+//                        Toast.LENGTH_SHORT).show();
+                Looper.loop();
+            }
+        }.start();*/
+        saveCrashInfo2File(ex);
+        return true;
+    }
+
+    /**
+     * 保存错误信息到文件中
+     *
+     * @param ex
+     * @return 返回文件名称
+     */
+    private String saveCrashInfo2File(Throwable ex) {
+        StringBuffer sb = new StringBuffer();
+        for (Map.Entry<String, String> entry : paramsMap.entrySet()) {
+            String key = entry.getKey();
+            String value = entry.getValue();
+            sb.append(key + "=" + value + "\n");
+        }
+
+        Writer writer = new StringWriter();
+        PrintWriter printWriter = new PrintWriter(writer);
+        ex.printStackTrace(printWriter);
+        Throwable cause = ex.getCause();
+        while (cause != null) {
+            cause.printStackTrace(printWriter);
+            cause = cause.getCause();
+        }
+        printWriter.close();
+        String result = writer.toString();
+        sb.append(result);
+
+        try {
+            long timestamp = System.currentTimeMillis();
+            format.setTimeZone(TimeZone.getTimeZone("Asia/Shanghai"));
+            String time = format.format(new Date());
+            String fileName = "crash-" + time + "-" + timestamp + ".txt";
+            if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
+                String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/crash/";
+                File dir = new File(path);
+                if (!dir.exists()) {
+                    dir.mkdirs();
+                }
+                FileOutputStream fos = new FileOutputStream(path + fileName);
+                fos.write(sb.toString().getBytes());
+                Log.i(TAG, "saveCrashInfo2File: "+sb.toString());
+                fos.close();
+            }
+
+            //上传错误日志
+            uploadingErrorLog(application.getPackageName(), "crash", "crash", "", sb.toString());
+
+            return fileName;
+        } catch (Exception e) {
+            Log.e(TAG, "an error occured while writing file...", e);
+        }
+        return null;
+    }
+
+    private void uploadingErrorLog(String class_name, String err_msg, String exception_name, String method_name, String stack_trace) {
+        if(okHttpClient == null){
+            okHttpClient = new OkHttpClient();
+        }
+
+        FormBody.Builder formBody = new FormBody.Builder();
+        formBody.add("class_name",class_name);
+        formBody.add("method_name",method_name);
+        formBody.add("exception_name",exception_name);
+        formBody.add("err_msg",err_msg);
+        formBody.add("stack_trace",stack_trace);
+
+        Request request  = new Request.Builder()
+                .url(urlManager.getBase() + "device/error_log")
+                .post(formBody.build())
+                .build();
+
+        okHttpClient.newCall(request).enqueue(new Callback() {
+            @Override
+            public void onFailure(Call call, IOException e) {
+                Log.e(TAG,"错误日志上传失败"+e.getMessage());
+            }
+
+            @Override
+            public void onResponse(Call call, Response response) throws IOException {
+                Log.d(TAG,"错误日志上传成功");
+                String data = response.body().string();
+                Log.d(TAG,"错误日志数据 data "+data);
+            }
+        });
+    }
+
+    /**
+     * ===================================================崩溃异常处理===================================================
+     */
+    public void initFCException() {
+        //设置该CrashHandler为程序的默认处理器
+        AnrFcExceptionUtil catchExcep = AnrFcExceptionUtil.getInstance(application);
+        Thread.setDefaultUncaughtExceptionHandler(catchExcep);
+        mANRWatchDog = new ANRWatchDog(8000);
+        mANRWatchDog.setInterruptionListener(new ANRWatchDog.InterruptionListener() {
+            @Override
+            public void onInterrupted(InterruptedException exception) {
+            }
+        }).setIgnoreDebugger(true).setANRListener(new ANRWatchDog.ANRListener() {
+            @Override
+            public void onAppNotResponding(ANRError error) {
+                /*Intent mStartActivity = new Intent(application.getApplicationContext(), Constants.ANR_FC);
+                int mPendingIntentId = 123456;
+                PendingIntent mPendingIntent = PendingIntent.getActivity(application.getApplicationContext(), mPendingIntentId, mStartActivity, PendingIntent.FLAG_CANCEL_CURRENT);
+                AlarmManager mgr = (AlarmManager) application.getApplicationContext().getSystemService(Context.ALARM_SERVICE);
+                mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1500, mPendingIntent);
+                android.os.Process.killProcess(android.os.Process.myPid());*/
+
+                Log.d("anr", "Anr restart app...");
+                AppUpdateHelper.reboot(application);
+            }
+        }).start();
+
+    }
+}

+ 35 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/AppUpdateHelper.java

@@ -1,6 +1,9 @@
 package com.wdkl.app.ncs.callingbed.helper;
 
+import android.app.AlarmManager;
+import android.app.PendingIntent;
 import android.content.Context;
+import android.content.Intent;
 import android.content.pm.ApplicationInfo;
 import android.content.pm.PackageInfo;
 import android.content.pm.PackageManager;
@@ -9,7 +12,10 @@ import android.os.Build;
 import android.os.Environment;
 import android.util.Log;
 
+import com.wdkl.ncs.android.component.welcome.activity.WelcomeActivity;
+
 import java.io.File;
+import java.io.PrintWriter;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.Method;
 
@@ -90,4 +96,33 @@ public class AppUpdateHelper {
         }
         return false;
     }
+
+
+    public static void reboot(Context context) {
+        try {
+            Intent intent = new Intent(Intent.ACTION_REBOOT);
+            intent.putExtra("nowait", 1);
+            intent.putExtra("interval", 1);
+            intent.putExtra("window", 0);
+            context.sendBroadcast(intent);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        SerialPortHelper.resetDevice();
+    }
+
+    public static void restartApp(Context context) {
+        //重新启动app
+        Intent mStartActivity = new Intent(context.getApplicationContext(), WelcomeActivity.class);
+        mStartActivity.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
+        int mPendingIntentId = 123456;
+        PendingIntent mPendingIntent = PendingIntent.getActivity(context.getApplicationContext(), mPendingIntentId, mStartActivity, PendingIntent.FLAG_CANCEL_CURRENT);
+        AlarmManager mgr = (AlarmManager) context.getApplicationContext().getSystemService(Context.ALARM_SERVICE);
+        mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1500, mPendingIntent);
+
+        android.os.Process.killProcess(android.os.Process.myPid());
+        System.exit(0);
+    }
+
 }

+ 16 - 8
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/NetHelper.java

@@ -315,14 +315,6 @@ public class NetHelper {
         return result;
     }
 
-    public int getNetworkType() {
-        if (connManager != null && connManager.getActiveNetworkInfo() != null) {
-            return connManager.getActiveNetworkInfo().getType();
-        }
-
-        return -1;
-    }
-
     /**
      * 得到MAC
      *
@@ -476,4 +468,20 @@ public class NetHelper {
 
         return false;
     }
+
+    public int getNetworkType() {
+        if (connManager != null && connManager.getActiveNetworkInfo() != null) {
+            return connManager.getActiveNetworkInfo().getType();
+        }
+
+        return -1;
+    }
+
+    public boolean networkAvailable() {
+        if (connManager != null && connManager.getActiveNetworkInfo() != null) {
+            return connManager.getActiveNetworkInfo().isConnected();
+        }
+
+        return false;
+    }
 }

+ 24 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/RingPlayHelper.java

@@ -0,0 +1,24 @@
+package com.wdkl.app.ncs.callingbed.helper;
+
+import android.content.Context;
+import android.media.AudioManager;
+
+import com.wdkl.core.voip.AsyncPlayer;
+
+public class RingPlayHelper {
+
+    private static AsyncPlayer ringPlayer;
+
+    public static void playRingTone(Context context, int res, boolean loop) {
+        if (ringPlayer == null) {
+            ringPlayer = new AsyncPlayer(null);
+        }
+        ringPlayer.play(context, res, loop, AudioManager.STREAM_MUSIC);
+    }
+
+    public static void stopRingTone() {
+        if (ringPlayer != null) {
+            ringPlayer.stop();
+        }
+    }
+}

+ 81 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/ScreenManagerUtil.kt

@@ -0,0 +1,81 @@
+package com.wdkl.app.ncs.callingbed2.helper
+
+import android.content.Context
+import android.content.Intent
+import android.net.Uri
+import android.os.Build
+import android.provider.Settings
+import android.util.Log
+
+class ScreenManagerUtil {
+
+
+    /**
+     * @return 0--255
+     */
+    fun getScreenBrightness(context: Context): Int {
+        var screenBrightness = 150
+
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+            if (!Settings.System.canWrite(context)) {
+                val intent = Intent(Settings.ACTION_MANAGE_WRITE_SETTINGS)
+                intent.data = Uri.parse("package:" + context.packageName)
+                intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
+                context.startActivity(intent)
+            } else {
+                //有了权限,具体的动作
+                try {
+                    screenBrightness = Settings.System.getInt(context.contentResolver,
+                            Settings.System.SCREEN_BRIGHTNESS)
+                } catch (e: Settings.SettingNotFoundException) {
+                    e.printStackTrace()
+                }
+
+            }
+        } else {
+            try {
+                screenBrightness = Settings.System.getInt(context.contentResolver,
+                        Settings.System.SCREEN_BRIGHTNESS)
+            } catch (e: Settings.SettingNotFoundException) {
+                e.printStackTrace()
+            }
+
+        }
+
+        return screenBrightness
+    }
+
+
+    /**
+     * 保存当前的屏幕亮度值,并使之生效
+     *
+     * @param paramInt 0-255
+     */
+    fun setScreenBrightness(context: Context, paramInt: Int) {
+
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+            if (!Settings.System.canWrite(context)) {
+                val intent = Intent(Settings.ACTION_MANAGE_WRITE_SETTINGS)
+                intent.data = Uri.parse("package:" + context.packageName)
+                intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
+                context.startActivity(intent)
+            } else {
+                //有了权限,具体的动作
+                Settings.System.putInt(context.contentResolver,
+                        Settings.System.SCREEN_BRIGHTNESS, paramInt)
+                val uri = Settings.System
+                        .getUriFor("screen_brightness")
+                Log.w("当前亮度", "当前亮度======" + getScreenBrightness(context))
+                context.contentResolver.notifyChange(uri, null)
+            }
+        } else {
+            Settings.System.putInt(context.contentResolver,
+                    Settings.System.SCREEN_BRIGHTNESS, paramInt)
+            val uri = Settings.System
+                    .getUriFor("screen_brightness")
+            Log.w("当前亮度", "当前亮度======" + getScreenBrightness(context))
+            context.contentResolver.notifyChange(uri, null)
+        }
+
+    }
+}

+ 12 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/SerialPortHelper.java

@@ -14,4 +14,16 @@ public class SerialPortHelper {
             SerialPortUtil.getInstance().sendCommand(SerialPortUtil.MIC, "1", "F");
         }
     }
+
+    /**
+     * 设置卫生间紧急按钮灯状态: 0关闭1打开2闪烁
+     */
+    public static void setSosLight(String state) {
+        SerialPortUtil.getInstance().sendCommand(SerialPortUtil.ULED, state, "F");
+    }
+
+    //重置设备
+    public static void resetDevice() {
+        SerialPortUtil.getInstance().sendCommand(SerialPortUtil.NET_STATUS , "1", "F");
+    }
 }

+ 17 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/Utils.java

@@ -0,0 +1,17 @@
+package com.wdkl.app.ncs.callingbed.helper;
+
+import android.hardware.Camera;
+
+import com.wdkl.ncs.android.middleware.common.Constant;
+
+public class Utils {
+
+    public static void checkCameraSupport() {
+        int num = Camera.getNumberOfCameras();
+        if (num > 0) {
+            Constant.supportCamera = true;
+        } else {
+            Constant.supportCamera = false;
+        }
+    }
+}

+ 184 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/VoiceManagerUtil.java

@@ -0,0 +1,184 @@
+package com.wdkl.app.ncs.callingbed.helper;
+
+import android.content.Context;
+import android.media.AudioManager;
+
+/**
+ * 类名称:VoiceManagerUtil <br>
+ * 类描述:声音控制工具类 <br>
+ * 创建人:Waderson Shll (TEL:15675117662)<br>
+ * 创建时间:2018-03-15 <br>
+ * 特别提醒:如有需要该类可任意创建与调用;在未通知本人的情况下该类禁止任何修改!<br>
+ */
+public class VoiceManagerUtil {
+    /**
+     * 获取提示音音量最大值
+     *
+     * @param context
+     */
+    public static int getAlarmMax(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamMaxVolume(AudioManager.STREAM_ALARM);
+    }
+
+    /**
+     * 获取提示音音量当前值
+     *
+     * @param context
+     */
+    public static int getAlarmNow(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamVolume(AudioManager.STREAM_ALARM);
+    }
+
+    /**
+     * 获取多媒体音量最大值
+     *
+     * @param context
+     */
+    public static int getMusicMax(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
+    }
+
+    /**
+     * 获取多媒体音量当前值
+     *
+     * @param context
+     */
+    public static int getMusicNow(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
+    }
+
+    /**
+     * 获取铃声音量最大值
+     *
+     * @param context
+     */
+    public static int getRingMax(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamMaxVolume(AudioManager.STREAM_RING);
+    }
+
+    /**
+     * 获取铃声音量当前值
+     *
+     * @param context
+     */
+    public static int getRingNow(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamVolume(AudioManager.STREAM_RING);
+    }
+
+    /**
+     * 获取系统音量最大值
+     *
+     * @param context
+     */
+    public static int getSystemMax(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamMaxVolume(AudioManager.STREAM_SYSTEM);
+    }
+
+    /**
+     * 获取系统音量当前值
+     *
+     * @param context
+     */
+    public static int getSystemNow(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamVolume(AudioManager.STREAM_SYSTEM);
+    }
+
+    /**
+     * 获取通话音量最大值
+     *
+     * @param context
+     */
+    public static int getCallMax(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
+    }
+
+    /**
+     * 获取通话音量当前值
+     *
+     * @param context
+     */
+    public static int getCallNow(Context context) {
+        AudioManager mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        return mAudioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
+    }
+
+    /**
+     * 设置提示音音量
+     *
+     * @param context
+     * @param percent (百分比;只能0--100之间)
+     */
+    public static void setAlarmVoice(Context context, int percent) {
+        float vPercent=((float)percent)/100f;
+        vPercent = vPercent < 0 ? 0 : vPercent;
+        vPercent = vPercent > 1 ? 1 : vPercent;
+        AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        audioManager.setStreamVolume(AudioManager.STREAM_ALARM, (int) (getAlarmMax(context) * vPercent), 0);
+    }
+
+    /**
+     * 设置多媒体音量
+     *
+     * @param context
+     * @param percent (百分比;只能0--100之间)
+     */
+    public static void setMusicVoice(Context context, int percent) {
+        float vPercent=((float)percent)/100f;
+        vPercent = vPercent < 0 ? 0 : vPercent;
+        vPercent = vPercent > 1 ? 1 : vPercent;
+        AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, (int) (getMusicMax(context) * vPercent), 0);
+    }
+
+    /**
+     * 设置铃声音量
+     *
+     * @param context
+     * @param percent (百分比;只能0--100之间)
+     */
+    public static void setRingVoice(Context context, int percent) {
+        float vPercent=((float)percent)/100f;
+        vPercent = vPercent < 0 ? 0 : vPercent;
+        vPercent = vPercent > 1 ? 1 : vPercent;
+        AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        audioManager.setStreamVolume(AudioManager.STREAM_RING, (int) (getRingMax(context) * vPercent), 0);
+    }
+
+    /**
+     * 设置系统音量
+     *
+     * @param context
+     * @param percent (百分比;只能0--100之间)
+     */
+    public static void setSystemVoice(Context context, int percent) {
+        float vPercent=((float)percent)/100f;
+        vPercent = vPercent < 0 ? 0 : vPercent;
+        vPercent = vPercent > 1 ? 1 : vPercent;
+        AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        audioManager.setStreamVolume(AudioManager.STREAM_SYSTEM, (int) (getSystemMax(context) * vPercent), 0);
+    }
+
+    /**
+     * 设置通话音量
+     *
+     * @param context
+     * @param percent (百分比;只能0--100之间)
+     */
+    public static void setCallVoice(Context context, int percent) {
+        float vPercent=((float)percent)/100f;
+        vPercent = vPercent < 0 ? 0 : vPercent;
+        vPercent = vPercent > 1 ? 1 : vPercent;
+        AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+        audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, (int) (getCallMax(context) * vPercent), 0);
+    }
+
+}

+ 51 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/helper/WarningDialogHelper.java

@@ -0,0 +1,51 @@
+package com.wdkl.app.ncs.callingbed.helper;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.view.Gravity;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+import android.widget.Button;
+
+import com.wdkl.app.ncs.callingbed.R;
+
+
+public class WarningDialogHelper {
+    private static AlertDialog dialog;
+
+    public static void showDialog(final Activity activity) {
+        if (dialog != null && dialog.isShowing()) {
+            return;
+        }
+
+        View contentView = LayoutInflater.from(activity).inflate(R.layout.warning_dialog_lay, null);
+        AlertDialog.Builder builder = new AlertDialog.Builder(activity);
+        builder.setView(contentView);
+        Button button = contentView.findViewById(R.id.cancel_button);
+        button.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                AppUpdateHelper.reboot(activity);
+            }
+        });
+
+        dialog = builder.create();
+        //dialog.setCanceledOnTouchOutside(false);
+        //dialog.setCancelable(false);
+        dialog.show();
+
+        //设置dialog宽高及位置
+        try {
+            Window window = dialog.getWindow();
+            WindowManager.LayoutParams lp = window.getAttributes();
+            lp.width = 600;
+            lp.height = 240;
+            lp.gravity = Gravity.CENTER;
+            window.setAttributes(lp);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+}

+ 354 - 0
callingbed/src/main/java/com/wdkl/app/ncs/callingbed/settings/SettingConfig.java

@@ -0,0 +1,354 @@
+package com.wdkl.app.ncs.callingbed.settings;
+
+import android.content.Context;
+import android.content.SharedPreferences;
+
+
+public class SettingConfig {
+
+
+    private static final String SP_NAME = "SP_BED_FUNCTION";
+
+    //白昼设置白天的初始时间设置
+    private static final String KEY_SP_INITIAL_DAY_TIME = "KEY_SP_INITIAL_DAY_TIME";
+    private static final String initial_day_time = "07:00";
+    private static final String KEY_SP_INITIAL_DAY_TIME_LOCATION = "KEY_SP_INITIAL_DAY_TIME_LOCATION";
+    private static final String initial_day_time_location = "12";
+
+
+    //白昼设置白天的结束时间设置
+    private static final String KEY_SP_END_OF_DAY = "KEY_SP_END_OF_DAY";
+    private static final String end_of_day = "19:00";
+    //白昼设置白天的结束时间设置
+    private static final String KEY_SP_END_OF_DAY_LOCATION = "KEY_SP_END_OF_DAY_LOCATION";
+    private static final String end_of_day_location = "38";
+
+
+    //分机白天亮度
+    private static final String KEY_SP_EXTENSION_DAYTIME_BRIGHTNESS = "KEY_SP_EXTENSION_DAYTIME_BRIGHTNESS";
+    private static final int extension_daytime_brightness = 80;
+    //分机晚上亮度
+    private static final String KEY_SP_EXTENSION_NIGHT_BRIGHTNESS = "KEY_SP_EXTENSION_NIGHT_BRIGHTNESS";
+    private static final int extension_night_brightness = 50;
+
+    //分机LED白天亮度
+    private static final String KEY_SP_EXTENSION_DAYTIME_LED_BRIGHTNESS = "KEY_SP_EXTENSION_DAYTIME_LED_BRIGHTNESS";
+    private static final int extension_daytime_led_brightness = 80;
+    //分机LED晚上亮度
+    private static final String KEY_SP_EXTENSION_NIGHT_LED_BRIGHTNESS = "KEY_SP_EXTENSION_NIGHT_LED_BRIGHTNESS";
+    private static final int extension_night_led_brightness = 50;
+
+    //分机白天系统音量
+    private static final String KEY_SP_EXTENSION_DAYTIME_SYSTEM_VOLUME = "KEY_SP_EXTENSION_DAYTIME_SYSTEM_VOLUME";
+    private static final int extension_daytime_system_volume = 100;
+    //分机晚上系统音量
+    private static final String KEY_SP_EXTENSION_NIGHT_SYSTEM_VOLUME = "KEY_SP_EXTENSION_NIGHT_SYSTEM_VOLUME";
+    private static final int extension_night_system_volume = 70;
+
+    //分机手柄录入音量
+    private static final String KEY_SP_THE_EXTENSION_HANDLR_RECORDS_THE_VOLUME = "KEY_SP_THE_EXTENSION_HANDLR_RECORDS_THE_VOLUME";
+    private static final int the_extension_handle_records_the_volume = 50;
+
+    //分机通话音量
+    private static final String KEY_SP_EXTENSION_CALL_VOLUME = "KEY_SP_EXTENSION_CALL_VOLUME";
+    private static final int extension_call_volume = 100;
+
+    //呼叫超时时间
+    private static final String KEY_SP_SIP_OVERTIME = "KEY_SP_SIP_OVERTIME";
+    private static final int sip_over_time = 30;
+
+    //息屏超时时间
+    private static final String KEY_SP_SLEEP_TIME = "KEY_SP_SLEEP_TIME";
+    private static final int sleep_time = 30;
+
+    //网络异常重启次数
+    private static final String KEY_SP_NET_ERR_RESET_COUNT = "KEY_SP_NET_ERR_RESET_COUNT";
+
+    /**
+     * 获取分机白天亮度
+     *
+     * @return
+     */
+    public static int getExtensionDaytimeBrightness(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_DAYTIME_BRIGHTNESS, extension_daytime_brightness);
+    }
+
+    /**
+     * 设置分机白天亮度
+     *
+     * @param value
+     */
+    public static void setExtensionDaytimeBrightness(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_DAYTIME_BRIGHTNESS, value).apply();
+    }
+
+    /**
+     * 获取分机晚上亮度
+     *
+     * @return
+     */
+    public static int getExtensionNightBrightness(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_NIGHT_BRIGHTNESS, extension_night_brightness);
+    }
+
+    /**
+     * 设置分机晚上亮度
+     *
+     * @param value
+     */
+    public static void setExtensionNightBrightness(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_NIGHT_BRIGHTNESS, value).apply();
+    }
+
+    /**
+     * 获取分机LED白天亮度
+     *
+     * @return
+     */
+    public static int getExtensionDaytimeLEDBrightness(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_DAYTIME_LED_BRIGHTNESS, extension_daytime_led_brightness);
+    }
+
+    /**
+     * 设置分机LED白天亮度
+     *
+     * @param value
+     */
+    public static void setExtensionDaytimeLEDBrightness(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_DAYTIME_LED_BRIGHTNESS, value).apply();
+    }
+
+    /**
+     * 获取分机LED晚上亮度
+     *
+     * @return
+     */
+    public static int getExtensionNightLEDBrightness(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_NIGHT_LED_BRIGHTNESS, extension_night_led_brightness);
+    }
+
+    /**
+     * 设置分机LED晚上亮度
+     *
+     * @param value
+     */
+    public static void setExtensionNightLEDBrightness(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_NIGHT_LED_BRIGHTNESS, value).apply();
+    }
+
+
+    /**
+     * 获取分机白天系统音量
+     *
+     * @return
+     */
+    public static int getExtensionDaytimeSystemVolume(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_DAYTIME_SYSTEM_VOLUME, extension_daytime_system_volume);
+    }
+
+    /**
+     * 设置分机白天系统音量
+     *
+     * @param value
+     */
+    public static void setExtensionDaytimeSystemVolume(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_DAYTIME_SYSTEM_VOLUME, value).apply();
+    }
+
+    /**
+     * 获取分机晚上系统音量
+     *
+     * @return
+     */
+    public static int getExtensionNightSystemVolume(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_NIGHT_SYSTEM_VOLUME, extension_night_system_volume);
+    }
+
+    /**
+     * 设置分机晚上系统音量
+     *
+     * @param value
+     */
+    public static void setExtensionNightSystemVolume(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_NIGHT_SYSTEM_VOLUME, value).apply();
+    }
+
+
+
+    /**
+     * 获取分机手柄录入音量
+     *
+     * @return
+     */
+    public static int getTheExtensionHandleRecordsTheVolume(Context context) {
+        return getSP(context).getInt(KEY_SP_THE_EXTENSION_HANDLR_RECORDS_THE_VOLUME, the_extension_handle_records_the_volume);
+    }
+
+    /**
+     * 设置分机手柄录入音量
+     *
+     * @param value
+     */
+    public static void setTheExtensionHandleRecordsTheVolume(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_THE_EXTENSION_HANDLR_RECORDS_THE_VOLUME, value).apply();
+    }
+
+
+    /**
+     * 获取分机通话音量
+     *
+     * @return
+     */
+    public static int getExtensionCallVolume(Context context) {
+        return getSP(context).getInt(KEY_SP_EXTENSION_CALL_VOLUME, extension_call_volume);
+    }
+
+    /**
+     * 设置分机通话音量
+     *
+     * @param value
+     */
+    public static void setExtensionCallVolume(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_EXTENSION_CALL_VOLUME, value).apply();
+    }
+
+
+    /**
+     * 获取白昼设置白天的初始时间
+     *
+     * @return
+     */
+    public static String getInitialDayTime(Context context) {
+        return getSP(context).getString(KEY_SP_INITIAL_DAY_TIME, initial_day_time);
+    }
+
+    /**
+     * 设置白昼白天的初始时间
+     *
+     * @param value
+     */
+    public static void setInitialDayTime(Context context, String value) {
+        getEditor(context).putString(KEY_SP_INITIAL_DAY_TIME, value).apply();
+    }
+
+    /**
+     * 获取白昼设置白天的初始时间位置
+     *
+     * @return
+     */
+    public static String getInitialDayTimeLocation(Context context) {
+        return getSP(context).getString(KEY_SP_INITIAL_DAY_TIME_LOCATION, initial_day_time_location);
+    }
+
+    /**
+     * 设置白昼白天的初始时间位置
+     *
+     * @param value
+     */
+    public static void setInitialDayTimeLocation(Context context, String value) {
+        getEditor(context).putString(KEY_SP_INITIAL_DAY_TIME_LOCATION, value).apply();
+    }
+
+
+
+    /**
+     * 设置白昼白天的结束时间
+     *
+     * @param value
+     */
+    public static void setEndOfDay(Context context, String value) {
+        getEditor(context).putString(KEY_SP_END_OF_DAY, value).apply();
+    }
+
+    /**
+     * 获取白昼设置白天的结束时间
+     *
+     * @return
+     */
+    public static String getEndOfDay(Context context) {
+        return getSP(context).getString(KEY_SP_END_OF_DAY, end_of_day);
+    }
+
+    /**
+     * 设置白昼白天的结束时间位置
+     *
+     * @param value
+     */
+    public static void setEndOfDayLocation(Context context, String value) {
+        getEditor(context).putString(KEY_SP_END_OF_DAY_LOCATION, value).apply();
+    }
+
+    /**
+     * 获取白昼设置白天的结束时间位置
+     *
+     * @return
+     */
+    public static String getEndOfDayLocation(Context context) {
+        return getSP(context).getString(KEY_SP_END_OF_DAY_LOCATION, end_of_day_location);
+    }
+
+    /**
+     * 设置呼叫超时时间
+     *
+     * @param value
+     */
+    public static void setSipOverTime(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_SIP_OVERTIME, value).apply();
+    }
+
+    /**
+     * 获取呼叫超时时间
+     *
+     * @return
+     */
+    public static int getSipOverTime(Context context) {
+        return getSP(context).getInt(KEY_SP_SIP_OVERTIME, sip_over_time);
+    }
+
+    /**
+     * 设置息屏时间
+     *
+     * @param value
+     */
+    public static void setSleepTime(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_SLEEP_TIME, value).apply();
+    }
+
+    /**
+     * 获取息屏时间
+     *
+     * @return
+     */
+    public static int getSleepTime(Context context) {
+        return getSP(context).getInt(KEY_SP_SLEEP_TIME, sleep_time);
+    }
+
+    /**
+     * 获取网络异常重启次数
+     *
+     * @return
+     */
+    public static int getNetErrResetCount(Context context) {
+        return getSP(context).getInt(KEY_SP_NET_ERR_RESET_COUNT, 0);
+    }
+
+    /**
+     * 设置网络异常重启次数
+     *
+     * @param value
+     */
+    public static void setNetErrResetCount(Context context, int value) {
+        getEditor(context).putInt(KEY_SP_NET_ERR_RESET_COUNT, value).apply();
+    }
+
+
+    private static SharedPreferences getSP(Context context) {
+        return context.getSharedPreferences(SP_NAME, Context.MODE_PRIVATE);
+    }
+
+    private static SharedPreferences.Editor getEditor(Context context) {
+        return getSP(context).edit();
+    }
+
+
+}

callingbed/src/main/java/com/wdkl/app/ncs/callingbed/sip/SipHelper.java → callingbed/src/main/java/com/wdkl/app/ncs/callingbed/sip/SipHelper.java.bak


+ 7 - 0
callingbed/src/main/res/anim/slide_down_in.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<set xmlns:android="http://schemas.android.com/apk/res/android">
+    <translate
+        android:fromYDelta="100%p"
+        android:toYDelta="0%p"
+        android:duration="300"/>
+</set>

+ 7 - 0
callingbed/src/main/res/anim/slide_left_in.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<set xmlns:android="http://schemas.android.com/apk/res/android">
+    <translate
+        android:fromXDelta="-100%p"
+        android:toXDelta="0%p"
+        android:duration="300"/>
+</set>

+ 7 - 0
callingbed/src/main/res/anim/slide_right_out.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<set xmlns:android="http://schemas.android.com/apk/res/android">
+    <translate
+        android:fromXDelta="0%p"
+        android:toXDelta="100%p"
+        android:duration="300"/>
+</set>

+ 7 - 0
callingbed/src/main/res/anim/slide_up_out.xml

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<set xmlns:android="http://schemas.android.com/apk/res/android">
+    <translate
+        android:fromYDelta="0%p"
+        android:toYDelta="-100%p"
+        android:duration="300"/>
+</set>

BIN
callingbed/src/main/res/drawable/ic_answer_normal.png


BIN
callingbed/src/main/res/drawable/ic_answer_press.png


BIN
callingbed/src/main/res/drawable/ic_hangup_normal.png


BIN
callingbed/src/main/res/drawable/ic_hangup_press.png


BIN
callingbed/src/main/res/drawable/ic_nurse.png


+ 5 - 0
callingbed/src/main/res/drawable/selector_call_answer.xml

@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+    <item android:drawable="@drawable/ic_answer_press" android:state_pressed="true"/>
+    <item android:drawable="@drawable/ic_answer_normal"/>
+</selector>

+ 5 - 0
callingbed/src/main/res/drawable/selector_call_hangup.xml

@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+    <item android:drawable="@drawable/ic_hangup_press" android:state_pressed="true"/>
+    <item android:drawable="@drawable/ic_hangup_normal"/>
+</selector>

+ 6 - 0
callingbed/src/main/res/layout/callingbed_main_lay.xml

@@ -37,6 +37,12 @@
             android:layout_toRightOf="@id/rl_left_view"
             android:layout_below="@id/activity_calling_bed_layout_title" />
 
+        <!--通话界面-->
+        <FrameLayout
+            android:id="@+id/call_frame"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent" />
+
         <!--呼叫状态显示-->
         <LinearLayout
             android:id="@+id/ll_call_view"

+ 13 - 4
callingbed/src/main/res/layout/qrcode_view.xml

@@ -21,7 +21,6 @@
                 android:id="@+id/view_qr_code"
                 android:layout_width="180dp"
                 android:layout_height="180dp"
-                android:layout_marginTop="10dp"
                 android:layout_gravity="center_horizontal"/>
 
             <TextView
@@ -37,7 +36,7 @@
                 android:id="@+id/tv_local_ip"
                 android:layout_width="wrap_content"
                 android:layout_height="wrap_content"
-                android:layout_marginTop="10dp"
+                android:layout_marginTop="4dp"
                 android:layout_marginLeft="40dp"
                 android:text="本机IP:"
                 android:textColor="@color/black"
@@ -47,7 +46,7 @@
                 android:id="@+id/tv_local_mac"
                 android:layout_width="wrap_content"
                 android:layout_height="wrap_content"
-                android:layout_marginTop="10dp"
+                android:layout_marginTop="4dp"
                 android:layout_marginLeft="40dp"
                 android:text="本机MAC:"
                 android:textColor="@color/black"
@@ -57,11 +56,21 @@
                 android:id="@+id/tv_app_version"
                 android:layout_width="wrap_content"
                 android:layout_height="wrap_content"
-                android:layout_marginTop="10dp"
+                android:layout_marginTop="4dp"
                 android:layout_marginLeft="40dp"
                 android:text="版本信息:"
                 android:textColor="@color/black"
                 android:textSize="20sp" />
+
+            <TextView
+                android:id="@+id/tv_mcu_version"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_marginTop="4dp"
+                android:layout_marginLeft="40dp"
+                android:text="MCU:"
+                android:textColor="@color/black"
+                android:textSize="20sp" />
         </LinearLayout>
 
         <LinearLayout

+ 134 - 0
callingbed/src/main/res/layout/sky_voice_call_layout.xml

@@ -0,0 +1,134 @@
+<?xml version="1.0" encoding="utf-8"?>
+<layout>
+    <FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:background="@android:color/background_dark">
+        <!--全屏视频画面-->
+        <FrameLayout
+            android:id="@+id/fullscreen_video_frame"
+            android:layout_width="match_parent"
+            android:layout_height="match_parent"
+            android:layout_gravity="center" />
+
+        <!--小窗视频画面-->
+        <FrameLayout
+            android:id="@+id/pip_video_frame"
+            android:layout_width="140dp"
+            android:layout_height="180dp"
+            android:layout_gravity="top|end"
+            android:layout_marginHorizontal="10dp"
+            android:layout_marginTop="10dp" />
+
+        <RelativeLayout
+            android:layout_width="match_parent"
+            android:layout_height="match_parent">
+
+            <!--语音呼叫layout-->
+            <LinearLayout
+                android:id="@+id/ll_voice_call"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:gravity="center_horizontal"
+                android:orientation="vertical">
+
+                <ImageView
+                    android:id="@+id/sky_voice_call_head_img"
+                    android:layout_width="100dp"
+                    android:layout_height="100dp"
+                    android:layout_marginTop="40dp"
+                    android:scaleType="centerInside"
+                    android:src="@drawable/ic_nurse" />
+
+                <TextView
+                    android:id="@+id/sky_voice_call_calling_text"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:layout_marginTop="20dp"
+                    android:gravity="center"
+                    android:text="正在呼叫..."
+                    android:textColor="#9E9E9F"
+                    android:textSize="32sp" />
+
+                <TextView
+                    android:id="@+id/sky_voice_call_timeout"
+                    android:layout_width="wrap_content"
+                    android:layout_height="wrap_content"
+                    android:layout_marginTop="20dp"
+                    android:gravity="center"
+                    android:text="倒计时: 00"
+                    android:textColor="#9E9E9F"
+                    android:textSize="24sp" />
+            </LinearLayout>
+
+            <!--呼出-->
+            <LinearLayout
+                android:id="@+id/sky_voice_call_outgoing"
+                android:layout_width="wrap_content"
+                android:layout_height="wrap_content"
+                android:layout_alignParentBottom="true"
+                android:layout_centerHorizontal="true"
+                android:layout_marginBottom="40dp"
+                android:gravity="center_horizontal"
+                android:orientation="vertical"
+                android:visibility="visible">
+
+                <Chronometer
+                    android:id="@+id/sky_voice_call_timer"
+                    android:layout_width="match_parent"
+                    android:layout_height="wrap_content"
+                    android:gravity="center"
+                    android:text="00:00"
+                    android:textColor="@color/white"
+                    android:textSize="24sp" />
+
+                <ImageView
+                    android:id="@+id/sky_voice_call_hangup"
+                    android:layout_width="100dp"
+                    android:layout_height="100dp"
+                    android:layout_marginTop="20dp"
+                    android:src="@drawable/selector_call_hangup" />
+            </LinearLayout>
+
+            <!--来电-->
+            <LinearLayout
+                android:id="@+id/sky_voice_call_incoming"
+                android:layout_width="match_parent"
+                android:layout_height="wrap_content"
+                android:layout_alignParentBottom="true"
+                android:layout_centerHorizontal="true"
+                android:layout_marginBottom="40dp"
+                android:gravity="bottom"
+                android:orientation="horizontal"
+                android:visibility="gone">
+
+                <View
+                    android:layout_width="0dp"
+                    android:layout_height="1dp"
+                    android:layout_weight="2" />
+
+                <ImageView
+                    android:id="@+id/sky_voice_call_ring_reject"
+                    android:layout_width="100dp"
+                    android:layout_height="100dp"
+                    android:src="@drawable/selector_call_hangup" />
+
+                <View
+                    android:layout_width="0dp"
+                    android:layout_height="1dp"
+                    android:layout_weight="1" />
+
+                <ImageView
+                    android:id="@+id/sky_voice_call_ring_pickup_audio"
+                    android:layout_width="100dp"
+                    android:layout_height="100dp"
+                    android:src="@drawable/selector_call_answer" />
+
+                <View
+                    android:layout_width="0dp"
+                    android:layout_height="1dp"
+                    android:layout_weight="2" />
+            </LinearLayout>
+        </RelativeLayout>
+    </FrameLayout>
+</layout>

+ 4 - 4
callingbed/src/main/res/layout/view_title_layout.xml

@@ -27,14 +27,14 @@
         android:textSize="@dimen/font_size_20" />
 
     <!--日期时间-->
-    <TextView
-        android:id="@+id/view_title_layout_tv_time"
+    <TextClock
         android:layout_width="wrap_content"
         android:layout_height="wrap_content"
         android:layout_centerHorizontal="true"
         android:layout_centerVertical="true"
-        android:layout_marginRight="5dp"
-        android:text="--"
+        android:format12Hour="yyyy-MM-dd HH:mm:ss EEEE"
+        android:format24Hour="yyyy-MM-dd HH:mm:ss EEEE"
+        android:timeZone="GMT+8"
         android:textColor="@color/main_color"
         android:textSize="@dimen/font_size_20" />
 

+ 27 - 0
callingbed/src/main/res/layout/warning_dialog_lay.xml

@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    android:gravity="center"
+    android:orientation="vertical">
+
+    <TextView
+        android:id="@+id/warnig_text"
+        android:layout_width="match_parent"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="20dp"
+        android:padding="8dp"
+        android:text="设备网络出现异常,请检查网络或尝试断电重启!"
+        android:textColor="@color/red_color"
+        android:textSize="32sp"
+        android:gravity="center" />
+
+    <Button
+        android:id="@+id/cancel_button"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginTop="20dp"
+        android:padding="8dp"
+        android:text="确定" />
+</LinearLayout>

BIN
callingbed/src/main/res/raw/incoming_call.mp3


BIN
callingbed/src/main/res/raw/ring_back2.wav


+ 6 - 0
common/build.gradle

@@ -38,6 +38,12 @@ android {
 //        main.jni.srcDirs = []
 //        main.jniLibs.srcDir 'src/main/libs'
     }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+
 }
 
 dependencies {

+ 16 - 3
common/src/main/code/com/wdkl/ncs/android/lib/base/BaseActivity.kt

@@ -57,6 +57,13 @@ abstract class BaseActivity<PresenterType : BaseContract.BasePresenter, DataBind
      */
     protected val disposableManager by lazy { DisposableManager() }
 
+    private val FULL_SCREEN_FLAG = (
+            View.SYSTEM_UI_FLAG_LAYOUT_STABLE
+                    or View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
+                    or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
+                    or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
+                    or View.SYSTEM_UI_FLAG_FULLSCREEN)
+
     /**
      * @author  LDD
      * @From   com.wdkl.ncs.android.lib.base.BaseActivity
@@ -71,10 +78,12 @@ abstract class BaseActivity<PresenterType : BaseContract.BasePresenter, DataBind
         }
         /**父类初始化*/
         super.onCreate(savedInstanceState)
-        //全屏显示
+        window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN or
+                WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON or
+                WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED or
+                WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON)
+
         requestWindowFeature(Window.FEATURE_NO_TITLE)
-                window.setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
-                WindowManager.LayoutParams.FLAG_FULLSCREEN)
 
         /**执行生命周期监听*/
         lifeCycleDo(LIFE_CYCLE_CREATE)
@@ -84,6 +93,10 @@ abstract class BaseActivity<PresenterType : BaseContract.BasePresenter, DataBind
         mViewBinding = DataBindingUtil.bind(rootView)
         /**设置根视图到Activity*/
         setContentView(rootView)
+
+        //解决全屏显示时弹出界面会跳一下的问题
+        window.decorView.systemUiVisibility = FULL_SCREEN_FLAG
+
         /**执行抽象方法初始化Dagger相应操作*/
         bindDagger()
         /**Presenter绑定View*/

+ 5 - 0
common/src/main/code/com/wdkl/ncs/android/lib/utils/AppTool.kt

@@ -268,6 +268,11 @@ object AppTool {
         fun systemMobileSetting(activity: AppCompatActivity) {
             activity.startActivity(Intent(Settings.ACTION_DATA_ROAMING_SETTINGS))
         }
+
+        @JvmStatic
+        fun startNewActivity(activity: AppCompatActivity, intent: Intent) {
+            activity.startActivity(intent)
+        }
     }
 
     /**

+ 23 - 2
common/src/main/code/com/wdkl/ncs/android/lib/utils/ConnectionObserver.kt

@@ -1,6 +1,7 @@
 package com.wdkl.ncs.android.lib.utils
 
 import android.content.Context
+import android.net.ConnectivityManager
 import com.wdkl.ncs.android.lib.base.BaseApplication
 import com.enation.javashop.net.engine.plugin.connection.ConnectionQuality
 import com.enation.javashop.net.engine.plugin.exception.ExceptionHandle
@@ -96,8 +97,14 @@ abstract class BaseObserver<T>(private val context: Context) : Observer<T> {
 
     override fun onSubscribe(disposable: Disposable) {
         if (VoiNetTool.getAPNType(this.context) == VoiNetTool.netType.noneNet) {
-            disposable.dispose()
-            this.onNoneNet()
+            //有线以太网会被判断为无网,增加以太网判断
+            if (checkEthernet(this.context)) {
+                this.onStart()
+                this.attachSubscribe(disposable)
+            } else {
+                disposable.dispose()
+                this.onNoneNet()
+            }
         } else {
             this.onStart()
             this.attachSubscribe(disposable)
@@ -105,6 +112,20 @@ abstract class BaseObserver<T>(private val context: Context) : Observer<T> {
 
     }
 
+    fun checkEthernet(context: Context): Boolean {
+        val connMgr = context
+                .getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
+        val networkInfo = connMgr.activeNetworkInfo
+        if (networkInfo == null) {
+            return false
+        }
+        if (networkInfo.type == ConnectivityManager.TYPE_ETHERNET) {
+            return true
+        }
+
+        return false
+    }
+
     override fun onError(e: Throwable) {
         if (e is HttpException) {
             var errorJSon = e.response().errorBody()!!.getJsonString()

+ 20 - 0
common/src/main/code/com/wdkl/ncs/android/lib/utils/TimeHandle.kt

@@ -1,5 +1,6 @@
 package com.wdkl.ncs.android.lib.utils
 
+import java.text.ParseException
 import java.text.SimpleDateFormat
 import java.util.*
 
@@ -65,6 +66,7 @@ object TimeHandle {
     fun getDateTime(format: String): String {
         val date = Date(System.currentTimeMillis())
         val sdf = SimpleDateFormat(format)
+        sdf.timeZone = TimeZone.getTimeZone("GMT+8")
         return sdf.format(date)
     }
 
@@ -78,4 +80,22 @@ object TimeHandle {
         }
         return "null"
     }
+
+    /**
+     * 将时间转换为时间戳
+     */
+    fun dateToStamp(s: String, pattern: String): Long {
+        val simpleDateFormat = SimpleDateFormat(pattern)
+        var date: Date? = null
+        try {
+            simpleDateFormat.timeZone = TimeZone.getTimeZone("GMT+8")
+            date = simpleDateFormat.parse(s)
+
+            return date!!.time
+        } catch (e: ParseException) {
+            e.printStackTrace()
+        }
+
+        return 0
+    }
 }

+ 57 - 0
common/src/main/code/com/wdkl/ncs/android/lib/widget/MenuDialog.kt

@@ -0,0 +1,57 @@
+package com.wdkl.ncs.android.lib.widget
+
+import android.app.Dialog
+import android.content.Context
+import android.graphics.Color
+import android.view.LayoutInflater
+import android.view.ViewGroup
+import android.widget.TextView
+import com.enation.javashop.utils.base.tool.ScreenTool
+import com.wdkl.ncs.android.lib.R
+import com.wdkl.ncs.android.lib.utils.reLayout
+
+class MenuDialog {
+
+    private val context : Context
+
+    companion object {
+
+        fun build(context: Context):MenuDialog{
+            return MenuDialog(context)
+        }
+
+    }
+
+    private constructor(context: Context) {
+        this.context = context
+    }
+
+    fun config(voiceCall :(()->Unit)? = null, videoCall :(()->Unit)? = null) : Dialog {
+        val contentView = LayoutInflater.from(context).inflate(R.layout.menu_dialog_lay, null)
+        val dialog = Dialog(context, R.style.Dialog)
+        dialog.setContentView(contentView)
+        contentView.reLayout<ViewGroup.LayoutParams> {
+            params ->
+            params.width = ScreenTool.getScreenWidth(context).toInt()
+            params.height = ScreenTool.getScreenHeight(context).toInt()
+        }
+        val voiceTv = contentView.findViewById<TextView>(R.id.menu_dialog_voice)
+        val videoTv = contentView.findViewById<TextView>(R.id.menu_dialog_video)
+        val cancelTv = contentView.findViewById<TextView>(R.id.menu_dialog_cancel)
+
+        voiceTv.setOnClickListener {
+            dialog.dismiss()
+            voiceCall?.invoke()
+        }
+        videoTv.setOnClickListener {
+            dialog.dismiss()
+            videoCall?.invoke()
+        }
+        cancelTv.setOnClickListener {
+            dialog.dismiss()
+        }
+        dialog.setCanceledOnTouchOutside(false)
+        dialog.setCancelable(false)
+        return dialog
+    }
+}

+ 108 - 0
common/src/main/res/layout/menu_dialog_lay.xml

@@ -0,0 +1,108 @@
+<?xml version="1.0" encoding="utf-8"?>
+<android.support.constraint.ConstraintLayout
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:background="#00000000"
+    android:gravity="center">
+
+
+    <View
+        android:id="@+id/vcode_dialog_bg"
+        android:layout_width="0dp"
+        android:layout_height="0dp"
+        app:layout_constraintVertical_bias="0.4"
+        android:background="@drawable/radis"
+        app:layout_constraintBottom_toBottomOf="parent"
+        app:layout_constraintDimensionRatio="h,1.7:1"
+        app:layout_constraintLeft_toLeftOf="parent"
+        app:layout_constraintRight_toRightOf="parent"
+        app:layout_constraintTop_toTopOf="parent"
+        app:layout_constraintWidth_percent="0.8"
+        />
+
+    <TextView
+        android:id="@+id/menu_dialog_cancel"
+        android:layout_width="0dp"
+        android:layout_height="0dp"
+        android:layout_marginBottom="76dp"
+        android:autoSizeMaxTextSize="40sp"
+        android:autoSizeMinTextSize="10sp"
+        android:autoSizeStepGranularity="2sp"
+        android:autoSizeTextType="uniform"
+        android:background="#ff878a8a"
+        android:ellipsize="end"
+        android:gravity="center"
+        android:lines="1"
+        android:padding="7dp"
+        android:text="取消"
+        android:textColor="#ff000000"
+        android:visibility="visible"
+        app:autoSizeMaxTextSize="40sp"
+        app:autoSizeMinTextSize="10sp"
+        app:autoSizeStepGranularity="2sp"
+        app:autoSizeTextType="uniform"
+        app:layout_constraintBottom_toBottomOf="@id/vcode_dialog_bg"
+        app:layout_constraintDimensionRatio="h,6:1"
+        app:layout_constraintLeft_toLeftOf="@id/vcode_dialog_bg"
+        app:layout_constraintRight_toRightOf="@id/vcode_dialog_bg"
+        app:layout_constraintWidth_percent="0.4"
+        tools:ignore="MissingPrefix" />
+
+    <TextView
+        android:id="@+id/menu_dialog_voice"
+        android:layout_width="0dp"
+        android:layout_height="0dp"
+        android:layout_marginBottom="308dp"
+        android:autoSizeMaxTextSize="40sp"
+        android:autoSizeMinTextSize="10sp"
+        android:autoSizeStepGranularity="2sp"
+        android:autoSizeTextType="uniform"
+        android:background="#fff15353"
+        android:ellipsize="end"
+        android:gravity="center"
+        android:lines="1"
+        android:padding="7dp"
+        android:text="语音通话"
+        android:textColor="#ff000000"
+        app:autoSizeMaxTextSize="40sp"
+        app:autoSizeMinTextSize="10sp"
+        app:autoSizeStepGranularity="2sp"
+        app:autoSizeTextType="uniform"
+        app:layout_constraintBottom_toBottomOf="@id/vcode_dialog_bg"
+        app:layout_constraintDimensionRatio="h,6:1"
+        app:layout_constraintLeft_toLeftOf="@id/vcode_dialog_bg"
+        app:layout_constraintRight_toRightOf="@id/vcode_dialog_bg"
+        app:layout_constraintWidth_percent="0.4"
+        tools:ignore="MissingPrefix" />
+
+    <TextView
+        android:id="@+id/menu_dialog_video"
+        android:layout_width="0dp"
+        android:layout_height="0dp"
+        android:layout_marginBottom="192dp"
+        android:autoSizeMaxTextSize="40sp"
+        android:autoSizeMinTextSize="10sp"
+        android:autoSizeStepGranularity="2sp"
+        android:autoSizeTextType="uniform"
+        android:background="#fff15353"
+        android:ellipsize="end"
+        android:gravity="center"
+        android:lines="1"
+        android:padding="7dp"
+        android:text="视频通话"
+        android:textColor="#ff000000"
+        android:visibility="visible"
+        app:autoSizeMaxTextSize="40sp"
+        app:autoSizeMinTextSize="10sp"
+        app:autoSizeStepGranularity="2sp"
+        app:autoSizeTextType="uniform"
+        app:layout_constraintBottom_toBottomOf="@id/vcode_dialog_bg"
+        app:layout_constraintDimensionRatio="h,6:1"
+        app:layout_constraintLeft_toLeftOf="@id/vcode_dialog_bg"
+        app:layout_constraintRight_toRightOf="@id/vcode_dialog_bg"
+        app:layout_constraintWidth_percent="0.4"
+        tools:ignore="MissingPrefix" />
+</android.support.constraint.ConstraintLayout>

+ 5 - 0
extra/build.gradle

@@ -55,6 +55,11 @@ android {
             }
         }
     }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
 }
 
 dependencies {

+ 1 - 1
gradle.properties

@@ -19,6 +19,6 @@ systemProp.http.proxyPort=80
 org.gradle.daemon=true
 org.gradle.parallel=true
 org.gradle.configureondemand=true
-android.enableAapt2=false
+#android.enableAapt2=false
 #android.useAndroidX=true
 #android.enableJetifier=true

+ 5 - 0
hello/build.gradle

@@ -55,6 +55,11 @@ android {
             }
         }
     }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
 }
 
 dependencies {

+ 5 - 0
home/build.gradle

@@ -59,6 +59,11 @@ android {
             }
         }
     }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
 }
 
 dependencies {

+ 1 - 0
libwebrtc/.gitignore

@@ -0,0 +1 @@
+/build

+ 48 - 0
libwebrtc/build.gradle

@@ -0,0 +1,48 @@
+apply plugin: 'com.android.library'
+
+android {
+    compileSdkVersion target_sdk_version
+    buildToolsVersion build_tools_version
+
+    defaultConfig {
+        minSdkVersion min_sdk_version
+        targetSdkVersion target_sdk_version
+        versionCode app_version_code
+        versionName app_version
+
+        ndk {
+            abiFilters 'armeabi-v7a','arm64-v8a'
+        }
+    }
+
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+        }
+    }
+
+    sourceSets.main {
+        jniLibs.srcDirs = ['libs']
+        java.srcDirs = [
+                "src/main/java/sdk/android/api",
+                "src/main/java/sdk/android/src/java",
+                "src/main/java/rtc_base/java/src",
+                "src/main/java/modules/audio_device/android/java/src",
+        ]
+    }
+
+    compileOptions {
+        sourceCompatibility JavaVersion.VERSION_1_8
+        targetCompatibility JavaVersion.VERSION_1_8
+    }
+
+    lintOptions {
+        checkReleaseBuilds false
+        abortOnError false
+    }
+}
+
+dependencies {
+    implementation fileTree(dir: 'libs', include: ['*.jar'])
+}

BIN
libwebrtc/libs/arm64-v8a/libjingle_peerconnection_so.so


BIN
libwebrtc/libs/armeabi-v7a/libjingle_peerconnection_so.so


+ 21 - 0
libwebrtc/proguard-rules.pro

@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile

+ 2 - 0
libwebrtc/src/main/AndroidManifest.xml

@@ -0,0 +1,2 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="org.webrtc" />

+ 51 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java

@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.os.Build;
+
+public final class BuildInfo {
+  public static String getDevice() {
+    return Build.DEVICE;
+  }
+
+  public static String getDeviceModel() {
+    return Build.MODEL;
+  }
+
+  public static String getProduct() {
+    return Build.PRODUCT;
+  }
+
+  public static String getBrand() {
+    return Build.BRAND;
+  }
+
+  public static String getDeviceManufacturer() {
+    return Build.MANUFACTURER;
+  }
+
+  public static String getAndroidBuildId() {
+    return Build.ID;
+  }
+
+  public static String getBuildType() {
+    return Build.TYPE;
+  }
+
+  public static String getBuildRelease() {
+    return Build.VERSION.RELEASE;
+  }
+
+  public static int getSdkVersion() {
+    return Build.VERSION.SDK_INT;
+  }
+}

+ 324 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java

@@ -0,0 +1,324 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.media.audiofx.AcousticEchoCanceler;
+import android.media.audiofx.AudioEffect;
+import android.media.audiofx.AudioEffect.Descriptor;
+import android.media.audiofx.NoiseSuppressor;
+import android.os.Build;
+
+import org.webrtc.Logging;
+
+import java.util.List;
+import java.util.UUID;
+
+// This class wraps control of three different platform effects. Supported
+// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS).
+// Calling enable() will active all effects that are
+// supported by the device if the corresponding |shouldEnableXXX| member is set.
+public class WebRtcAudioEffects {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioEffects";
+
+  // UUIDs for Software Audio Effects that we want to avoid using.
+  // The implementor field will be set to "The Android Open Source Project".
+  private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER =
+      UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b");
+  private static final UUID AOSP_NOISE_SUPPRESSOR =
+      UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b");
+
+  // Contains the available effect descriptors returned from the
+  // AudioEffect.getEffects() call. This result is cached to avoid doing the
+  // slow OS call multiple times.
+  private static   Descriptor[] cachedEffects;
+
+  // Contains the audio effect objects. Created in enable() and destroyed
+  // in release().
+  private   AcousticEchoCanceler aec;
+  private   NoiseSuppressor ns;
+
+  // Affects the final state given to the setEnabled() method on each effect.
+  // The default state is set to "disabled" but each effect can also be enabled
+  // by calling setAEC() and setNS().
+  // To enable an effect, both the shouldEnableXXX member and the static
+  // canUseXXX() must be true.
+  private boolean shouldEnableAec;
+  private boolean shouldEnableNs;
+
+  // Checks if the device implements Acoustic Echo Cancellation (AEC).
+  // Returns true if the device implements AEC, false otherwise.
+  public static boolean isAcousticEchoCancelerSupported() {
+    // Note: we're using isAcousticEchoCancelerEffectAvailable() instead of
+    // AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects()
+    // OS API call.
+    return isAcousticEchoCancelerEffectAvailable();
+  }
+
+  // Checks if the device implements Noise Suppression (NS).
+  // Returns true if the device implements NS, false otherwise.
+  public static boolean isNoiseSuppressorSupported() {
+    // Note: we're using isNoiseSuppressorEffectAvailable() instead of
+    // NoiseSuppressor.isAvailable() to avoid the expensive getEffects()
+    // OS API call.
+    return isNoiseSuppressorEffectAvailable();
+  }
+
+  // Returns true if the device is blacklisted for HW AEC usage.
+  public static boolean isAcousticEchoCancelerBlacklisted() {
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage();
+    boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
+    if (isBlacklisted) {
+      Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!");
+    }
+    return isBlacklisted;
+  }
+
+  // Returns true if the device is blacklisted for HW NS usage.
+  public static boolean isNoiseSuppressorBlacklisted() {
+    List<String> blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage();
+    boolean isBlacklisted = blackListedModels.contains(Build.MODEL);
+    if (isBlacklisted) {
+      Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!");
+    }
+    return isBlacklisted;
+  }
+
+  // Returns true if the platform AEC should be excluded based on its UUID.
+  // AudioEffect.queryEffects() can throw IllegalStateException.
+  private static boolean isAcousticEchoCancelerExcludedByUUID() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    for (Descriptor d : getAvailableEffects()) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC)
+          && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  // Returns true if the platform NS should be excluded based on its UUID.
+  // AudioEffect.queryEffects() can throw IllegalStateException.
+  private static boolean isNoiseSuppressorExcludedByUUID() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    for (Descriptor d : getAvailableEffects()) {
+      if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  // Returns true if the device supports Acoustic Echo Cancellation (AEC).
+  private static boolean isAcousticEchoCancelerEffectAvailable() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
+  }
+
+  // Returns true if the device supports Noise Suppression (NS).
+  private static boolean isNoiseSuppressorEffectAvailable() {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+    return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
+  }
+
+  // Returns true if all conditions for supporting the HW AEC are fulfilled.
+  // It will not be possible to enable the HW AEC if this method returns false.
+  public static boolean canUseAcousticEchoCanceler() {
+    boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported()
+        && !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler()
+        && !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID();
+    Logging.d(TAG, "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler);
+    return canUseAcousticEchoCanceler;
+  }
+
+  // Returns true if all conditions for supporting the HW NS are fulfilled.
+  // It will not be possible to enable the HW NS if this method returns false.
+  public static boolean canUseNoiseSuppressor() {
+    boolean canUseNoiseSuppressor = isNoiseSuppressorSupported()
+        && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted()
+        && !isNoiseSuppressorExcludedByUUID();
+    Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor);
+    return canUseNoiseSuppressor;
+  }
+
+  public static WebRtcAudioEffects create() {
+    return new WebRtcAudioEffects();
+  }
+
+  private WebRtcAudioEffects() {
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+  }
+
+  // Call this method to enable or disable the platform AEC. It modifies
+  // |shouldEnableAec| which is used in enable() where the actual state
+  // of the AEC effect is modified. Returns true if HW AEC is supported and
+  // false otherwise.
+  public boolean setAEC(boolean enable) {
+    Logging.d(TAG, "setAEC(" + enable + ")");
+    if (!canUseAcousticEchoCanceler()) {
+      Logging.w(TAG, "Platform AEC is not supported");
+      shouldEnableAec = false;
+      return false;
+    }
+    if (aec != null && (enable != shouldEnableAec)) {
+      Logging.e(TAG, "Platform AEC state can't be modified while recording");
+      return false;
+    }
+    shouldEnableAec = enable;
+    return true;
+  }
+
+  // Call this method to enable or disable the platform NS. It modifies
+  // |shouldEnableNs| which is used in enable() where the actual state
+  // of the NS effect is modified. Returns true if HW NS is supported and
+  // false otherwise.
+  public boolean setNS(boolean enable) {
+    Logging.d(TAG, "setNS(" + enable + ")");
+    if (!canUseNoiseSuppressor()) {
+      Logging.w(TAG, "Platform NS is not supported");
+      shouldEnableNs = false;
+      return false;
+    }
+    if (ns != null && (enable != shouldEnableNs)) {
+      Logging.e(TAG, "Platform NS state can't be modified while recording");
+      return false;
+    }
+    shouldEnableNs = enable;
+    return true;
+  }
+
+  public void enable(int audioSession) {
+    Logging.d(TAG, "enable(audioSession=" + audioSession + ")");
+    assertTrue(aec == null);
+    assertTrue(ns == null);
+
+    if (DEBUG) {
+      // Add logging of supported effects but filter out "VoIP effects", i.e.,
+      // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the
+      // DEBUG flag is set since we have seen crashes in this API.
+      for (Descriptor d : AudioEffect.queryEffects()) {
+        if (effectTypeIsVoIP(d.type)) {
+          Logging.d(TAG, "name: " + d.name + ", "
+                  + "mode: " + d.connectMode + ", "
+                  + "implementor: " + d.implementor + ", "
+                  + "UUID: " + d.uuid);
+        }
+      }
+    }
+
+    if (isAcousticEchoCancelerSupported()) {
+      // Create an AcousticEchoCanceler and attach it to the AudioRecord on
+      // the specified audio session.
+      aec = AcousticEchoCanceler.create(audioSession);
+      if (aec != null) {
+        boolean enabled = aec.getEnabled();
+        boolean enable = shouldEnableAec && canUseAcousticEchoCanceler();
+        if (aec.setEnabled(enable) != AudioEffect.SUCCESS) {
+          Logging.e(TAG, "Failed to set the AcousticEchoCanceler state");
+        }
+        Logging.d(TAG, "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled")
+                + ", enable: " + enable + ", is now: "
+                + (aec.getEnabled() ? "enabled" : "disabled"));
+      } else {
+        Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance");
+      }
+    }
+
+    if (isNoiseSuppressorSupported()) {
+      // Create an NoiseSuppressor and attach it to the AudioRecord on the
+      // specified audio session.
+      ns = NoiseSuppressor.create(audioSession);
+      if (ns != null) {
+        boolean enabled = ns.getEnabled();
+        boolean enable = shouldEnableNs && canUseNoiseSuppressor();
+        if (ns.setEnabled(enable) != AudioEffect.SUCCESS) {
+          Logging.e(TAG, "Failed to set the NoiseSuppressor state");
+        }
+        Logging.d(TAG, "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: "
+                + enable + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled"));
+      } else {
+        Logging.e(TAG, "Failed to create the NoiseSuppressor instance");
+      }
+    }
+  }
+
+  // Releases all native audio effect resources. It is a good practice to
+  // release the effect engine when not in use as control can be returned
+  // to other applications or the native resources released.
+  public void release() {
+    Logging.d(TAG, "release");
+    if (aec != null) {
+      aec.release();
+      aec = null;
+    }
+    if (ns != null) {
+      ns.release();
+      ns = null;
+    }
+  }
+
+  // Returns true for effect types in |type| that are of "VoIP" types:
+  // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or
+  // Noise Suppressor (NS). Note that, an extra check for support is needed
+  // in each comparison since some devices includes effects in the
+  // AudioEffect.Descriptor array that are actually not available on the device.
+  // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but
+  // AutomaticGainControl.isAvailable() returns false.
+  private boolean effectTypeIsVoIP(UUID type) {
+    if (Build.VERSION.SDK_INT < 18)
+      return false;
+
+    return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported())
+        || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported());
+  }
+
+  // Helper method which throws an exception when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  // Returns the cached copy of the audio effects array, if available, or
+  // queries the operating system for the list of effects.
+  private static   Descriptor[] getAvailableEffects() {
+    if (cachedEffects != null) {
+      return cachedEffects;
+    }
+    // The caching is best effort only - if this method is called from several
+    // threads in parallel, they may end up doing the underlying OS call
+    // multiple times. It's normally only called on one thread so there's no
+    // real need to optimize for the multiple threads case.
+    cachedEffects = AudioEffect.queryEffects();
+    return cachedEffects;
+  }
+
+  // Returns true if an effect of the specified type is available. Functionally
+  // equivalent to (NoiseSuppressor|AutomaticGainControl|...).isAvailable(), but
+  // faster as it avoids the expensive OS call to enumerate effects.
+  private static boolean isEffectTypeAvailable(UUID effectType) {
+    Descriptor[] effects = getAvailableEffects();
+    if (effects == null) {
+      return false;
+    }
+    for (Descriptor d : effects) {
+      if (d.type.equals(effectType)) {
+        return true;
+      }
+    }
+    return false;
+  }
+}

+ 378 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java

@@ -0,0 +1,378 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.os.Build;
+
+import org.webrtc.ContextUtils;
+import org.webrtc.Logging;
+
+import java.util.Timer;
+import java.util.TimerTask;
+
+// WebRtcAudioManager handles tasks that uses android.media.AudioManager.
+// At construction, storeAudioParameters() is called and it retrieves
+// fundamental audio parameters like native sample rate and number of channels.
+// The result is then provided to the caller by nativeCacheAudioParameters().
+// It is also possible to call init() to set up the audio environment for best
+// possible "VoIP performance". All settings done in init() are reverted by
+// dispose(). This class can also be used without calling init() if the user
+// prefers to set up the audio environment separately. However, it is
+// recommended to always use AudioManager.MODE_IN_COMMUNICATION.
+public class WebRtcAudioManager {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioManager";
+
+  // TODO(bugs.webrtc.org/8914): disabled by default until AAudio support has
+  // been completed. Goal is to always return false on Android O MR1 and higher.
+  private static final boolean blacklistDeviceForAAudioUsage = true;
+
+  // Use mono as default for both audio directions.
+  private static boolean useStereoOutput;
+  private static boolean useStereoInput;
+
+  private static boolean blacklistDeviceForOpenSLESUsage;
+  private static boolean blacklistDeviceForOpenSLESUsageIsOverridden;
+
+  // Call this method to override the default list of blacklisted devices
+  // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
+  // Allows an app to take control over which devices to exclude from using
+  // the OpenSL ES audio output path
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
+    blacklistDeviceForOpenSLESUsageIsOverridden = true;
+    blacklistDeviceForOpenSLESUsage = enable;
+  }
+
+  // Call these methods to override the default mono audio modes for the specified direction(s)
+  // (input and/or output).
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setStereoOutput(boolean enable) {
+    Logging.w(TAG, "Overriding default output behavior: setStereoOutput(" + enable + ')');
+    useStereoOutput = enable;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setStereoInput(boolean enable) {
+    Logging.w(TAG, "Overriding default input behavior: setStereoInput(" + enable + ')');
+    useStereoInput = enable;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean getStereoOutput() {
+    return useStereoOutput;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean getStereoInput() {
+    return useStereoInput;
+  }
+
+  // Default audio data format is PCM 16 bit per sample.
+  // Guaranteed to be supported by all devices.
+  private static final int BITS_PER_SAMPLE = 16;
+
+  private static final int DEFAULT_FRAME_PER_BUFFER = 256;
+
+  // Private utility class that periodically checks and logs the volume level
+  // of the audio stream that is currently controlled by the volume control.
+  // A timer triggers logs once every 30 seconds and the timer's associated
+  // thread is named "WebRtcVolumeLevelLoggerThread".
+  private static class VolumeLogger {
+    private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread";
+    private static final int TIMER_PERIOD_IN_SECONDS = 30;
+
+    private final AudioManager audioManager;
+    private   Timer timer;
+
+    public VolumeLogger(AudioManager audioManager) {
+      this.audioManager = audioManager;
+    }
+
+    public void start() {
+      timer = new Timer(THREAD_NAME);
+      timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING),
+                         audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)),
+          0, TIMER_PERIOD_IN_SECONDS * 1000);
+    }
+
+    private class LogVolumeTask extends TimerTask {
+      private final int maxRingVolume;
+      private final int maxVoiceCallVolume;
+
+      LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) {
+        this.maxRingVolume = maxRingVolume;
+        this.maxVoiceCallVolume = maxVoiceCallVolume;
+      }
+
+      @Override
+      public void run() {
+        final int mode = audioManager.getMode();
+        if (mode == AudioManager.MODE_RINGTONE) {
+          Logging.d(TAG, "STREAM_RING stream volume: "
+                  + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max="
+                  + maxRingVolume + ")");
+        } else if (mode == AudioManager.MODE_IN_COMMUNICATION) {
+          Logging.d(TAG, "VOICE_CALL stream volume: "
+                  + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max="
+                  + maxVoiceCallVolume + ")");
+        }
+      }
+    }
+
+    private void stop() {
+      if (timer != null) {
+        timer.cancel();
+        timer = null;
+      }
+    }
+  }
+
+  private final long nativeAudioManager;
+  private final AudioManager audioManager;
+
+  private boolean initialized;
+  private int nativeSampleRate;
+  private int nativeChannels;
+
+  private boolean hardwareAEC;
+  private boolean hardwareAGC;
+  private boolean hardwareNS;
+  private boolean lowLatencyOutput;
+  private boolean lowLatencyInput;
+  private boolean proAudio;
+  private boolean aAudio;
+  private int sampleRate;
+  private int outputChannels;
+  private int inputChannels;
+  private int outputBufferSize;
+  private int inputBufferSize;
+
+  private final VolumeLogger volumeLogger;
+
+  WebRtcAudioManager(long nativeAudioManager) {
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+    this.nativeAudioManager = nativeAudioManager;
+    audioManager =
+        (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
+    if (DEBUG) {
+      WebRtcAudioUtils.logDeviceInfo(TAG);
+    }
+    volumeLogger = new VolumeLogger(audioManager);
+    storeAudioParameters();
+    nativeCacheAudioParameters(sampleRate, outputChannels, inputChannels, hardwareAEC, hardwareAGC,
+        hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio, outputBufferSize,
+        inputBufferSize, nativeAudioManager);
+    WebRtcAudioUtils.logAudioState(TAG);
+  }
+
+  private boolean init() {
+    Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo());
+    if (initialized) {
+      return true;
+    }
+    Logging.d(TAG, "audio mode is: "
+        + WebRtcAudioUtils.modeToString(audioManager.getMode()));
+    initialized = true;
+    volumeLogger.start();
+    return true;
+  }
+
+  private void dispose() {
+    Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo());
+    if (!initialized) {
+      return;
+    }
+    volumeLogger.stop();
+  }
+
+  private boolean isCommunicationModeEnabled() {
+    return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
+  }
+
+  private boolean isDeviceBlacklistedForOpenSLESUsage() {
+    boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
+        ? blacklistDeviceForOpenSLESUsage
+        : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
+    if (blacklisted) {
+      Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
+    }
+    return blacklisted;
+  }
+
+  private void storeAudioParameters() {
+    outputChannels = getStereoOutput() ? 2 : 1;
+    inputChannels = getStereoInput() ? 2 : 1;
+    sampleRate = getNativeOutputSampleRate();
+    hardwareAEC = isAcousticEchoCancelerSupported();
+    // TODO(henrika): use of hardware AGC is no longer supported. Currently
+    // hardcoded to false. To be removed.
+    hardwareAGC = false;
+    hardwareNS = isNoiseSuppressorSupported();
+    lowLatencyOutput = isLowLatencyOutputSupported();
+    lowLatencyInput = isLowLatencyInputSupported();
+    proAudio = isProAudioSupported();
+    aAudio = isAAudioSupported();
+    outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer()
+                                        : getMinOutputFrameSize(sampleRate, outputChannels);
+    inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer()
+                                      : getMinInputFrameSize(sampleRate, inputChannels);
+  }
+
+  // Gets the current earpiece state.
+  private boolean hasEarpiece() {
+    return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+        PackageManager.FEATURE_TELEPHONY);
+  }
+
+  // Returns true if low-latency audio output is supported.
+  private boolean isLowLatencyOutputSupported() {
+    return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+        PackageManager.FEATURE_AUDIO_LOW_LATENCY);
+  }
+
+  // Returns true if low-latency audio input is supported.
+  // TODO(henrika): remove the hardcoded false return value when OpenSL ES
+  // input performance has been evaluated and tested more.
+  public boolean isLowLatencyInputSupported() {
+    // TODO(henrika): investigate if some sort of device list is needed here
+    // as well. The NDK doc states that: "As of API level 21, lower latency
+    // audio input is supported on select devices. To take advantage of this
+    // feature, first confirm that lower latency output is available".
+    return Build.VERSION.SDK_INT >= 21 && isLowLatencyOutputSupported();
+  }
+
+  // Returns true if the device has professional audio level of functionality
+  // and therefore supports the lowest possible round-trip latency.
+  private boolean isProAudioSupported() {
+    return Build.VERSION.SDK_INT >= 23
+        && ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+               PackageManager.FEATURE_AUDIO_PRO);
+  }
+
+  // AAudio is supported on Androio Oreo MR1 (API 27) and higher.
+  // TODO(bugs.webrtc.org/8914): currently disabled by default.
+  private boolean isAAudioSupported() {
+    if (blacklistDeviceForAAudioUsage) {
+      Logging.w(TAG, "AAudio support is currently disabled on all devices!");
+    }
+    return !blacklistDeviceForAAudioUsage && Build.VERSION.SDK_INT >= 27;
+  }
+
+  // Returns the native output sample rate for this device's output stream.
+  private int getNativeOutputSampleRate() {
+    // Override this if we're running on an old emulator image which only
+    // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE.
+    if (WebRtcAudioUtils.runningOnEmulator()) {
+      Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz.");
+      return 8000;
+    }
+    // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz().
+    // If so, use that value and return here.
+    if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) {
+      Logging.d(TAG, "Default sample rate is overriden to "
+              + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz");
+      return WebRtcAudioUtils.getDefaultSampleRateHz();
+    }
+    // No overrides available. Deliver best possible estimate based on default
+    // Android AudioManager APIs.
+    final int sampleRateHz = getSampleRateForApiLevel();
+    Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz");
+    return sampleRateHz;
+  }
+
+  private int getSampleRateForApiLevel() {
+    if (Build.VERSION.SDK_INT < 17) {
+      return WebRtcAudioUtils.getDefaultSampleRateHz();
+    }
+    String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
+    return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz()
+                                      : Integer.parseInt(sampleRateString);
+  }
+
+  // Returns the native output buffer size for low-latency output streams.
+  private int getLowLatencyOutputFramesPerBuffer() {
+    assertTrue(isLowLatencyOutputSupported());
+    if (Build.VERSION.SDK_INT < 17) {
+      return DEFAULT_FRAME_PER_BUFFER;
+    }
+    String framesPerBuffer =
+        audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
+    return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer);
+  }
+
+  // Returns true if the device supports an audio effect (AEC or NS).
+  // Four conditions must be fulfilled if functions are to return true:
+  // 1) the platform must support the built-in (HW) effect,
+  // 2) explicit use (override) of a WebRTC based version must not be set,
+  // 3) the device must not be blacklisted for use of the effect, and
+  // 4) the UUID of the effect must be approved (some UUIDs can be excluded).
+  private static boolean isAcousticEchoCancelerSupported() {
+    return WebRtcAudioEffects.canUseAcousticEchoCanceler();
+  }
+  private static boolean isNoiseSuppressorSupported() {
+    return WebRtcAudioEffects.canUseNoiseSuppressor();
+  }
+
+  // Returns the minimum output buffer size for Java based audio (AudioTrack).
+  // This size can also be used for OpenSL ES implementations on devices that
+  // lacks support of low-latency output.
+  private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
+    final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+    final int channelConfig =
+        (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+    return AudioTrack.getMinBufferSize(
+               sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+        / bytesPerFrame;
+  }
+
+  // Returns the native input buffer size for input streams.
+  private int getLowLatencyInputFramesPerBuffer() {
+    assertTrue(isLowLatencyInputSupported());
+    return getLowLatencyOutputFramesPerBuffer();
+  }
+
+  // Returns the minimum input buffer size for Java based audio (AudioRecord).
+  // This size can calso be used for OpenSL ES implementations on devices that
+  // lacks support of low-latency input.
+  private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
+    final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
+    final int channelConfig =
+        (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+    return AudioRecord.getMinBufferSize(
+               sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
+        / bytesPerFrame;
+  }
+
+  // Helper method which throws an exception  when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  private native void nativeCacheAudioParameters(int sampleRate, int outputChannels,
+      int inputChannels, boolean hardwareAEC, boolean hardwareAGC, boolean hardwareNS,
+      boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio, boolean aAudio,
+      int outputBufferSize, int inputBufferSize, long nativeAudioManager);
+}

+ 409 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java

@@ -0,0 +1,409 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+
+import java.lang.System;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.concurrent.TimeUnit;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+
+public class WebRtcAudioRecord {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioRecord";
+
+  // Default audio data format is PCM 16 bit per sample.
+  // Guaranteed to be supported by all devices.
+  private static final int BITS_PER_SAMPLE = 16;
+
+  // Requested size of each recorded buffer provided to the client.
+  private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+  // Average number of callbacks per second.
+  private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+  // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required
+  // buffer size). The extra space is allocated to guard against glitches under
+  // high load.
+  private static final int BUFFER_SIZE_FACTOR = 2;
+
+  // The AudioRecordJavaThread is allowed to wait for successful call to join()
+  // but the wait times out afther this amount of time.
+  private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+  private static final int DEFAULT_AUDIO_SOURCE = getDefaultAudioSource();
+  private static int audioSource = DEFAULT_AUDIO_SOURCE;
+
+  private final long nativeAudioRecord;
+
+  private   WebRtcAudioEffects effects;
+
+  private ByteBuffer byteBuffer;
+
+  private   AudioRecord audioRecord;
+  private   AudioRecordThread audioThread;
+
+  private static volatile boolean microphoneMute;
+  private byte[] emptyBytes;
+
+  // Audio recording error handler functions.
+  public enum AudioRecordStartErrorCode {
+    AUDIO_RECORD_START_EXCEPTION,
+    AUDIO_RECORD_START_STATE_MISMATCH,
+  }
+
+  public static interface WebRtcAudioRecordErrorCallback {
+    void onWebRtcAudioRecordInitError(String errorMessage);
+    void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage);
+    void onWebRtcAudioRecordError(String errorMessage);
+  }
+
+  private static   WebRtcAudioRecordErrorCallback errorCallback;
+
+  public static void setErrorCallback(WebRtcAudioRecordErrorCallback errorCallback) {
+    Logging.d(TAG, "Set error callback");
+    WebRtcAudioRecord.errorCallback = errorCallback;
+  }
+
+  /**
+   * Contains audio sample information. Object is passed using {@link
+   * WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback}
+   */
+  public static class AudioSamples {
+    /** See {@link AudioRecord#getAudioFormat()} */
+    private final int audioFormat;
+    /** See {@link AudioRecord#getChannelCount()} */
+    private final int channelCount;
+    /** See {@link AudioRecord#getSampleRate()} */
+    private final int sampleRate;
+
+    private final byte[] data;
+
+    private AudioSamples(AudioRecord audioRecord, byte[] data) {
+      this.audioFormat = audioRecord.getAudioFormat();
+      this.channelCount = audioRecord.getChannelCount();
+      this.sampleRate = audioRecord.getSampleRate();
+      this.data = data;
+    }
+
+    public int getAudioFormat() {
+      return audioFormat;
+    }
+
+    public int getChannelCount() {
+      return channelCount;
+    }
+
+    public int getSampleRate() {
+      return sampleRate;
+    }
+
+    public byte[] getData() {
+      return data;
+    }
+  }
+
+  /** Called when new audio samples are ready. This should only be set for debug purposes */
+  public static interface WebRtcAudioRecordSamplesReadyCallback {
+    void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
+  }
+
+  private static   WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback;
+
+  public static void setOnAudioSamplesReady(WebRtcAudioRecordSamplesReadyCallback callback) {
+    audioSamplesReadyCallback = callback;
+  }
+
+  /**
+   * Audio thread which keeps calling ByteBuffer.read() waiting for audio
+   * to be recorded. Feeds recorded data to the native counterpart as a
+   * periodic sequence of callbacks using DataIsRecorded().
+   * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+   */
+  private class AudioRecordThread extends Thread {
+    private volatile boolean keepAlive = true;
+
+    public AudioRecordThread(String name) {
+      super(name);
+    }
+
+    // TODO(titovartem) make correct fix during webrtc:9175
+    @SuppressWarnings("ByteBufferBackingArray")
+    @Override
+    public void run() {
+      Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+      Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo());
+      assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING);
+
+      long lastTime = System.nanoTime();
+      while (keepAlive) {
+        int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity());
+        if (bytesRead == byteBuffer.capacity()) {
+          if (microphoneMute) {
+            byteBuffer.clear();
+            byteBuffer.put(emptyBytes);
+          }
+          // It's possible we've been shut down during the read, and stopRecording() tried and
+          // failed to join this thread. To be a bit safer, try to avoid calling any native methods
+          // in case they've been unregistered after stopRecording() returned.
+          if (keepAlive) {
+            nativeDataIsRecorded(bytesRead, nativeAudioRecord);
+          }
+          if (audioSamplesReadyCallback != null) {
+            // Copy the entire byte buffer array.  Assume that the start of the byteBuffer is
+            // at index 0.
+            byte[] data = Arrays.copyOf(byteBuffer.array(), byteBuffer.capacity());
+            audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady(
+                new AudioSamples(audioRecord, data));
+          }
+        } else {
+          String errorMessage = "AudioRecord.read failed: " + bytesRead;
+          Logging.e(TAG, errorMessage);
+          if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) {
+            keepAlive = false;
+            reportWebRtcAudioRecordError(errorMessage);
+          }
+        }
+        if (DEBUG) {
+          long nowTime = System.nanoTime();
+          long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime));
+          lastTime = nowTime;
+          Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead);
+        }
+      }
+
+      try {
+        if (audioRecord != null) {
+          audioRecord.stop();
+        }
+      } catch (IllegalStateException e) {
+        Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage());
+      }
+    }
+
+    // Stops the inner thread loop and also calls AudioRecord.stop().
+    // Does not block the calling thread.
+    public void stopThread() {
+      Logging.d(TAG, "stopThread");
+      keepAlive = false;
+    }
+  }
+
+  WebRtcAudioRecord(long nativeAudioRecord) {
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+    this.nativeAudioRecord = nativeAudioRecord;
+    if (DEBUG) {
+      WebRtcAudioUtils.logDeviceInfo(TAG);
+    }
+    effects = WebRtcAudioEffects.create();
+  }
+
+  private boolean enableBuiltInAEC(boolean enable) {
+    Logging.d(TAG, "enableBuiltInAEC(" + enable + ')');
+    if (effects == null) {
+      Logging.e(TAG, "Built-in AEC is not supported on this platform");
+      return false;
+    }
+    return effects.setAEC(enable);
+  }
+
+  private boolean enableBuiltInNS(boolean enable) {
+    Logging.d(TAG, "enableBuiltInNS(" + enable + ')');
+    if (effects == null) {
+      Logging.e(TAG, "Built-in NS is not supported on this platform");
+      return false;
+    }
+    return effects.setNS(enable);
+  }
+
+  private int initRecording(int sampleRate, int channels) {
+    Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+    if (audioRecord != null) {
+      reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording.");
+      return -1;
+    }
+    final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+    final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND;
+    byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
+    Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+    emptyBytes = new byte[byteBuffer.capacity()];
+    // Rather than passing the ByteBuffer with every callback (requiring
+    // the potentially expensive GetDirectBufferAddress) we simply have the
+    // the native class cache the address to the memory once.
+    nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord);
+
+    // Get the minimum buffer size required for the successful creation of
+    // an AudioRecord object, in byte units.
+    // Note that this size doesn't guarantee a smooth recording under load.
+    final int channelConfig = channelCountToConfiguration(channels);
+    int minBufferSize =
+        AudioRecord.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
+    if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) {
+      reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize);
+      return -1;
+    }
+    Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize);
+
+    // Use a larger buffer size than the minimum required when creating the
+    // AudioRecord instance to ensure smooth recording under load. It has been
+    // verified that it does not increase the actual recording latency.
+    int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity());
+    Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes);
+    try {
+      audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig,
+          AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes);
+    } catch (IllegalArgumentException e) {
+      reportWebRtcAudioRecordInitError("AudioRecord ctor error: " + e.getMessage());
+      releaseAudioResources();
+      return -1;
+    }
+    if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
+      reportWebRtcAudioRecordInitError("Failed to create a new AudioRecord instance");
+      releaseAudioResources();
+      return -1;
+    }
+    if (effects != null) {
+      effects.enable(audioRecord.getAudioSessionId());
+    }
+    logMainParameters();
+    logMainParametersExtended();
+    return framesPerBuffer;
+  }
+
+  private boolean startRecording() {
+    Logging.d(TAG, "startRecording");
+    assertTrue(audioRecord != null);
+    assertTrue(audioThread == null);
+    try {
+      audioRecord.startRecording();
+    } catch (IllegalStateException e) {
+      reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION,
+          "AudioRecord.startRecording failed: " + e.getMessage());
+      return false;
+    }
+    if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+      reportWebRtcAudioRecordStartError(
+          AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH,
+          "AudioRecord.startRecording failed - incorrect state :"
+          + audioRecord.getRecordingState());
+      return false;
+    }
+    audioThread = new AudioRecordThread("AudioRecordJavaThread");
+    audioThread.start();
+    return true;
+  }
+
+  private boolean stopRecording() {
+    Logging.d(TAG, "stopRecording");
+    assertTrue(audioThread != null);
+    audioThread.stopThread();
+    if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) {
+      Logging.e(TAG, "Join of AudioRecordJavaThread timed out");
+      WebRtcAudioUtils.logAudioState(TAG);
+    }
+    audioThread = null;
+    if (effects != null) {
+      effects.release();
+    }
+    releaseAudioResources();
+    return true;
+  }
+
+  private void logMainParameters() {
+    Logging.d(TAG, "AudioRecord: "
+            + "session ID: " + audioRecord.getAudioSessionId() + ", "
+            + "channels: " + audioRecord.getChannelCount() + ", "
+            + "sample rate: " + audioRecord.getSampleRate());
+  }
+
+  private void logMainParametersExtended() {
+    if (Build.VERSION.SDK_INT >= 23) {
+      Logging.d(TAG, "AudioRecord: "
+              // The frame count of the native AudioRecord buffer.
+              + "buffer size in frames: " + audioRecord.getBufferSizeInFrames());
+    }
+  }
+
+  // Helper method which throws an exception  when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  private int channelCountToConfiguration(int channels) {
+    return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
+  }
+
+  private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
+
+  private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
+
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setAudioSource(int source) {
+    Logging.w(TAG, "Audio source is changed from: " + audioSource
+            + " to " + source);
+    audioSource = source;
+  }
+
+  private static int getDefaultAudioSource() {
+    return AudioSource.VOICE_COMMUNICATION;
+  }
+
+  // Sets all recorded samples to zero if |mute| is true, i.e., ensures that
+  // the microphone is muted.
+  public static void setMicrophoneMute(boolean mute) {
+    Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
+    microphoneMute = mute;
+  }
+
+  // Releases the native AudioRecord resources.
+  private void releaseAudioResources() {
+    Logging.d(TAG, "releaseAudioResources");
+    if (audioRecord != null) {
+      audioRecord.release();
+      audioRecord = null;
+    }
+  }
+
+  private void reportWebRtcAudioRecordInitError(String errorMessage) {
+    Logging.e(TAG, "Init recording error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioRecordInitError(errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioRecordStartError(
+      AudioRecordStartErrorCode errorCode, String errorMessage) {
+    Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioRecordError(String errorMessage) {
+    Logging.e(TAG, "Run-time recording error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioRecordError(errorMessage);
+    }
+  }
+}

+ 524 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java

@@ -0,0 +1,524 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.media.AudioAttributes;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
+import android.os.Process;
+
+import org.webrtc.ContextUtils;
+import org.webrtc.Logging;
+import org.webrtc.ThreadUtils;
+
+import java.nio.ByteBuffer;
+
+public class WebRtcAudioTrack {
+  private static final boolean DEBUG = false;
+
+  private static final String TAG = "WebRtcAudioTrack";
+
+  // Default audio data format is PCM 16 bit per sample.
+  // Guaranteed to be supported by all devices.
+  private static final int BITS_PER_SAMPLE = 16;
+
+  // Requested size of each recorded buffer provided to the client.
+  private static final int CALLBACK_BUFFER_SIZE_MS = 10;
+
+  // Average number of callbacks per second.
+  private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
+
+  // The AudioTrackThread is allowed to wait for successful call to join()
+  // but the wait times out afther this amount of time.
+  private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000;
+
+  // By default, WebRTC creates audio tracks with a usage attribute
+  // corresponding to voice communications, such as telephony or VoIP.
+  private static final int DEFAULT_USAGE = getDefaultUsageAttribute();
+  private static int usageAttribute = DEFAULT_USAGE;
+
+  // This method overrides the default usage attribute and allows the user
+  // to set it to something else than AudioAttributes.USAGE_VOICE_COMMUNICATION.
+  // NOTE: calling this method will most likely break existing VoIP tuning.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setAudioTrackUsageAttribute(int usage) {
+    Logging.w(TAG, "Default usage attribute is changed from: "
+        + DEFAULT_USAGE + " to " + usage);
+    usageAttribute = usage;
+  }
+
+  private static int getDefaultUsageAttribute() {
+    if (Build.VERSION.SDK_INT >= 21) {
+      return AudioAttributes.USAGE_VOICE_COMMUNICATION;
+    } else {
+      // Not used on SDKs lower than 21.
+      return 0;
+    }
+  }
+
+  private final long nativeAudioTrack;
+  private final AudioManager audioManager;
+  private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
+
+  private ByteBuffer byteBuffer;
+
+  private   AudioTrack audioTrack;
+  private   AudioTrackThread audioThread;
+
+  // Samples to be played are replaced by zeros if |speakerMute| is set to true.
+  // Can be used to ensure that the speaker is fully muted.
+  private static volatile boolean speakerMute;
+  private byte[] emptyBytes;
+
+  // Audio playout/track error handler functions.
+  public enum AudioTrackStartErrorCode {
+    AUDIO_TRACK_START_EXCEPTION,
+    AUDIO_TRACK_START_STATE_MISMATCH,
+  }
+
+  @Deprecated
+  public static interface WebRtcAudioTrackErrorCallback {
+    void onWebRtcAudioTrackInitError(String errorMessage);
+    void onWebRtcAudioTrackStartError(String errorMessage);
+    void onWebRtcAudioTrackError(String errorMessage);
+  }
+
+  // TODO(henrika): upgrade all clients to use this new interface instead.
+  public static interface ErrorCallback {
+    void onWebRtcAudioTrackInitError(String errorMessage);
+    void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
+    void onWebRtcAudioTrackError(String errorMessage);
+  }
+
+  private static   WebRtcAudioTrackErrorCallback errorCallbackOld;
+  private static   ErrorCallback errorCallback;
+
+  @Deprecated
+  public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) {
+    Logging.d(TAG, "Set error callback (deprecated");
+    WebRtcAudioTrack.errorCallbackOld = errorCallback;
+  }
+
+  public static void setErrorCallback(ErrorCallback errorCallback) {
+    Logging.d(TAG, "Set extended error callback");
+    WebRtcAudioTrack.errorCallback = errorCallback;
+  }
+
+  /**
+   * Audio thread which keeps calling AudioTrack.write() to stream audio.
+   * Data is periodically acquired from the native WebRTC layer using the
+   * nativeGetPlayoutData callback function.
+   * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
+   */
+  private class AudioTrackThread extends Thread {
+    private volatile boolean keepAlive = true;
+
+    public AudioTrackThread(String name) {
+      super(name);
+    }
+
+    @Override
+    public void run() {
+      Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
+      Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
+      assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
+
+      // Fixed size in bytes of each 10ms block of audio data that we ask for
+      // using callbacks to the native WebRTC client.
+      final int sizeInBytes = byteBuffer.capacity();
+
+      while (keepAlive) {
+        // Get 10ms of PCM data from the native WebRTC client. Audio data is
+        // written into the common ByteBuffer using the address that was
+        // cached at construction.
+        nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
+        // Write data until all data has been written to the audio sink.
+        // Upon return, the buffer position will have been advanced to reflect
+        // the amount of data that was successfully written to the AudioTrack.
+        assertTrue(sizeInBytes <= byteBuffer.remaining());
+        if (speakerMute) {
+          byteBuffer.clear();
+          byteBuffer.put(emptyBytes);
+          byteBuffer.position(0);
+        }
+        int bytesWritten = writeBytes(audioTrack, byteBuffer, sizeInBytes);
+        if (bytesWritten != sizeInBytes) {
+          Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten);
+          // If a write() returns a negative value, an error has occurred.
+          // Stop playing and report an error in this case.
+          if (bytesWritten < 0) {
+            keepAlive = false;
+            reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten);
+          }
+        }
+        // The byte buffer must be rewinded since byteBuffer.position() is
+        // increased at each call to AudioTrack.write(). If we don't do this,
+        // next call to AudioTrack.write() will fail.
+        byteBuffer.rewind();
+
+        // TODO(henrika): it is possible to create a delay estimate here by
+        // counting number of written frames and subtracting the result from
+        // audioTrack.getPlaybackHeadPosition().
+      }
+
+      // Stops playing the audio data. Since the instance was created in
+      // MODE_STREAM mode, audio will stop playing after the last buffer that
+      // was written has been played.
+      if (audioTrack != null) {
+        Logging.d(TAG, "Calling AudioTrack.stop...");
+        try {
+          audioTrack.stop();
+          Logging.d(TAG, "AudioTrack.stop is done.");
+        } catch (IllegalStateException e) {
+          Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
+        }
+      }
+    }
+
+    private int writeBytes(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
+      if (Build.VERSION.SDK_INT >= 21) {
+        return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
+      } else {
+        return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
+      }
+    }
+
+    // Stops the inner thread loop which results in calling AudioTrack.stop().
+    // Does not block the calling thread.
+    public void stopThread() {
+      Logging.d(TAG, "stopThread");
+      keepAlive = false;
+    }
+  }
+
+  WebRtcAudioTrack(long nativeAudioTrack) {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
+    this.nativeAudioTrack = nativeAudioTrack;
+    audioManager =
+        (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
+    if (DEBUG) {
+      WebRtcAudioUtils.logDeviceInfo(TAG);
+    }
+  }
+
+  private boolean initPlayout(int sampleRate, int channels) {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
+    final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
+    byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
+    Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
+    emptyBytes = new byte[byteBuffer.capacity()];
+    // Rather than passing the ByteBuffer with every callback (requiring
+    // the potentially expensive GetDirectBufferAddress) we simply have the
+    // the native class cache the address to the memory once.
+    nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
+
+    // Get the minimum buffer size required for the successful creation of an
+    // AudioTrack object to be created in the MODE_STREAM mode.
+    // Note that this size doesn't guarantee a smooth playback under load.
+    // TODO(henrika): should we extend the buffer size to avoid glitches?
+    final int channelConfig = channelCountToConfiguration(channels);
+    final int minBufferSizeInBytes =
+        AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
+    Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
+    // For the streaming mode, data must be written to the audio sink in
+    // chunks of size (given by byteBuffer.capacity()) less than or equal
+    // to the total buffer size |minBufferSizeInBytes|. But, we have seen
+    // reports of "getMinBufferSize(): error querying hardware". Hence, it
+    // can happen that |minBufferSizeInBytes| contains an invalid value.
+    if (minBufferSizeInBytes < byteBuffer.capacity()) {
+      reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
+      return false;
+    }
+
+    // Ensure that prevision audio session was stopped correctly before trying
+    // to create a new AudioTrack.
+    if (audioTrack != null) {
+      reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
+      return false;
+    }
+    try {
+      // Create an AudioTrack object and initialize its associated audio buffer.
+      // The size of this buffer determines how long an AudioTrack can play
+      // before running out of data.
+      if (Build.VERSION.SDK_INT >= 21) {
+        // If we are on API level 21 or higher, it is possible to use a special AudioTrack
+        // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
+        // supersede the notion of stream types for defining the behavior of audio playback,
+        // and to allow certain platforms or routing policies to use this information for more
+        // refined volume or routing decisions.
+        audioTrack = createAudioTrackOnLollipopOrHigher(
+            sampleRate, channelConfig, minBufferSizeInBytes);
+      } else {
+        // Use default constructor for API levels below 21.
+        audioTrack =
+            createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
+      }
+    } catch (IllegalArgumentException e) {
+      reportWebRtcAudioTrackInitError(e.getMessage());
+      releaseAudioResources();
+      return false;
+    }
+
+    // It can happen that an AudioTrack is created but it was not successfully
+    // initialized upon creation. Seems to be the case e.g. when the maximum
+    // number of globally available audio tracks is exceeded.
+    if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
+      reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
+      releaseAudioResources();
+      return false;
+    }
+    logMainParameters();
+    logMainParametersExtended();
+    return true;
+  }
+
+  private boolean startPlayout() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "startPlayout");
+    assertTrue(audioTrack != null);
+    assertTrue(audioThread == null);
+
+    // Starts playing an audio track.
+    try {
+      audioTrack.play();
+    } catch (IllegalStateException e) {
+      reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION,
+          "AudioTrack.play failed: " + e.getMessage());
+      releaseAudioResources();
+      return false;
+    }
+    if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+      reportWebRtcAudioTrackStartError(
+          AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH,
+          "AudioTrack.play failed - incorrect state :"
+          + audioTrack.getPlayState());
+      releaseAudioResources();
+      return false;
+    }
+
+    // Create and start new high-priority thread which calls AudioTrack.write()
+    // and where we also call the native nativeGetPlayoutData() callback to
+    // request decoded audio from WebRTC.
+    audioThread = new AudioTrackThread("AudioTrackJavaThread");
+    audioThread.start();
+    return true;
+  }
+
+  private boolean stopPlayout() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "stopPlayout");
+    assertTrue(audioThread != null);
+    logUnderrunCount();
+    audioThread.stopThread();
+
+    Logging.d(TAG, "Stopping the AudioTrackThread...");
+    audioThread.interrupt();
+    if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) {
+      Logging.e(TAG, "Join of AudioTrackThread timed out.");
+      WebRtcAudioUtils.logAudioState(TAG);
+    }
+    Logging.d(TAG, "AudioTrackThread has now been stopped.");
+    audioThread = null;
+    releaseAudioResources();
+    return true;
+  }
+
+  // Get max possible volume index for a phone call audio stream.
+  private int getStreamMaxVolume() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "getStreamMaxVolume");
+    assertTrue(audioManager != null);
+    return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
+  }
+
+  // Set current volume level for a phone call audio stream.
+  private boolean setStreamVolume(int volume) {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "setStreamVolume(" + volume + ")");
+    assertTrue(audioManager != null);
+    if (isVolumeFixed()) {
+      Logging.e(TAG, "The device implements a fixed volume policy.");
+      return false;
+    }
+    audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
+    return true;
+  }
+
+  private boolean isVolumeFixed() {
+    if (Build.VERSION.SDK_INT < 21)
+      return false;
+    return audioManager.isVolumeFixed();
+  }
+
+  /** Get current volume level for a phone call audio stream. */
+  private int getStreamVolume() {
+    threadChecker.checkIsOnValidThread();
+    Logging.d(TAG, "getStreamVolume");
+    assertTrue(audioManager != null);
+    return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
+  }
+
+  private void logMainParameters() {
+    Logging.d(TAG, "AudioTrack: "
+            + "session ID: " + audioTrack.getAudioSessionId() + ", "
+            + "channels: " + audioTrack.getChannelCount() + ", "
+            + "sample rate: " + audioTrack.getSampleRate() + ", "
+            // Gain (>=1.0) expressed as linear multiplier on sample values.
+            + "max gain: " + AudioTrack.getMaxVolume());
+  }
+
+  // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input.
+  // It allows certain platforms or routing policies to use this information for more
+  // refined volume or routing decisions.
+  @TargetApi(21)
+  private static AudioTrack createAudioTrackOnLollipopOrHigher(
+      int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
+    Logging.d(TAG, "createAudioTrackOnLollipopOrHigher");
+    // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control
+    // performance when Android O is supported. Add some logging in the mean time.
+    final int nativeOutputSampleRate =
+        AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL);
+    Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate);
+    if (sampleRateInHz != nativeOutputSampleRate) {
+      Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native");
+    }
+    if (usageAttribute != DEFAULT_USAGE) {
+      Logging.w(TAG, "A non default usage attribute is used: " + usageAttribute);
+    }
+    // Create an audio track where the audio usage is for VoIP and the content type is speech.
+    return new AudioTrack(
+        new AudioAttributes.Builder()
+            .setUsage(usageAttribute)
+            .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
+        .build(),
+        new AudioFormat.Builder()
+          .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+          .setSampleRate(sampleRateInHz)
+          .setChannelMask(channelConfig)
+          .build(),
+        bufferSizeInBytes,
+        AudioTrack.MODE_STREAM,
+        AudioManager.AUDIO_SESSION_ID_GENERATE);
+  }
+
+  @SuppressWarnings("deprecation") // Deprecated in API level 25.
+  private static AudioTrack createAudioTrackOnLowerThanLollipop(
+      int sampleRateInHz, int channelConfig, int bufferSizeInBytes) {
+    return new AudioTrack(AudioManager.STREAM_VOICE_CALL, sampleRateInHz, channelConfig,
+        AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes, AudioTrack.MODE_STREAM);
+  }
+
+  private void logBufferSizeInFrames() {
+    if (Build.VERSION.SDK_INT >= 23) {
+      Logging.d(TAG, "AudioTrack: "
+              // The effective size of the AudioTrack buffer that the app writes to.
+              + "buffer size in frames: " + audioTrack.getBufferSizeInFrames());
+    }
+  }
+
+  private void logBufferCapacityInFrames() {
+    if (Build.VERSION.SDK_INT >= 24) {
+      Logging.d(TAG,
+          "AudioTrack: "
+              // Maximum size of the AudioTrack buffer in frames.
+              + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames());
+    }
+  }
+
+  private void logMainParametersExtended() {
+    logBufferSizeInFrames();
+    logBufferCapacityInFrames();
+  }
+
+  // Prints the number of underrun occurrences in the application-level write
+  // buffer since the AudioTrack was created. An underrun occurs if the app does
+  // not write audio data quickly enough, causing the buffer to underflow and a
+  // potential audio glitch.
+  // TODO(henrika): keep track of this value in the field and possibly add new
+  // UMA stat if needed.
+  private void logUnderrunCount() {
+    if (Build.VERSION.SDK_INT >= 24) {
+      Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount());
+    }
+  }
+
+  // Helper method which throws an exception  when an assertion has failed.
+  private static void assertTrue(boolean condition) {
+    if (!condition) {
+      throw new AssertionError("Expected condition to be true");
+    }
+  }
+
+  private int channelCountToConfiguration(int channels) {
+    return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
+  }
+
+  private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord);
+
+  private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
+
+  // Sets all samples to be played out to zero if |mute| is true, i.e.,
+  // ensures that the speaker is muted.
+  public static void setSpeakerMute(boolean mute) {
+    Logging.w(TAG, "setSpeakerMute(" + mute + ")");
+    speakerMute = mute;
+  }
+
+  // Releases the native AudioTrack resources.
+  private void releaseAudioResources() {
+    Logging.d(TAG, "releaseAudioResources");
+    if (audioTrack != null) {
+      audioTrack.release();
+      audioTrack = null;
+    }
+  }
+
+  private void reportWebRtcAudioTrackInitError(String errorMessage) {
+    Logging.e(TAG, "Init playout error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallbackOld != null) {
+      errorCallbackOld.onWebRtcAudioTrackInitError(errorMessage);
+    }
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioTrackInitError(errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioTrackStartError(
+      AudioTrackStartErrorCode errorCode, String errorMessage) {
+    Logging.e(TAG, "Start playout error: "  + errorCode + ". " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallbackOld != null) {
+      errorCallbackOld.onWebRtcAudioTrackStartError(errorMessage);
+    }
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage);
+    }
+  }
+
+  private void reportWebRtcAudioTrackError(String errorMessage) {
+    Logging.e(TAG, "Run-time playback error: " + errorMessage);
+    WebRtcAudioUtils.logAudioState(TAG);
+    if (errorCallbackOld != null) {
+      errorCallbackOld.onWebRtcAudioTrackError(errorMessage);
+    }
+    if (errorCallback != null) {
+      errorCallback.onWebRtcAudioTrackError(errorMessage);
+    }
+  }
+}

+ 388 - 0
libwebrtc/src/main/java/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java

@@ -0,0 +1,388 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.voiceengine;
+
+import static android.media.AudioManager.MODE_IN_CALL;
+import static android.media.AudioManager.MODE_IN_COMMUNICATION;
+import static android.media.AudioManager.MODE_NORMAL;
+import static android.media.AudioManager.MODE_RINGTONE;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioDeviceInfo;
+import android.media.AudioManager;
+import android.media.AudioRecordingConfiguration;
+import android.media.MediaRecorder.AudioSource;
+import android.os.Build;
+import android.os.Process;
+import java.lang.Thread;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import org.webrtc.ContextUtils;
+import org.webrtc.Logging;
+
+public final class WebRtcAudioUtils {
+  private static final String TAG = "WebRtcAudioUtils";
+
+  // List of devices where we have seen issues (e.g. bad audio quality) using
+  // the low latency output mode in combination with OpenSL ES.
+  // The device name is given by Build.MODEL.
+  private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] {
+      // It is recommended to maintain a list of blacklisted models outside
+      // this package and instead call
+      // WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true)
+      // from the client for devices where OpenSL ES shall be disabled.
+  };
+
+  // List of devices where it has been verified that the built-in effect
+  // bad and where it makes sense to avoid using it and instead rely on the
+  // native WebRTC version instead. The device name is given by Build.MODEL.
+  private static final String[] BLACKLISTED_AEC_MODELS = new String[] {
+      // It is recommended to maintain a list of blacklisted models outside
+      // this package and instead call setWebRtcBasedAcousticEchoCanceler(true)
+      // from the client for devices where the built-in AEC shall be disabled.
+  };
+  private static final String[] BLACKLISTED_NS_MODELS = new String[] {
+    // It is recommended to maintain a list of blacklisted models outside
+    // this package and instead call setWebRtcBasedNoiseSuppressor(true)
+    // from the client for devices where the built-in NS shall be disabled.
+  };
+
+  // Use 16kHz as the default sample rate. A higher sample rate might prevent
+  // us from supporting communication mode on some older (e.g. ICS) devices.
+  private static final int DEFAULT_SAMPLE_RATE_HZ = 16000;
+  private static int defaultSampleRateHz = DEFAULT_SAMPLE_RATE_HZ;
+  // Set to true if setDefaultSampleRateHz() has been called.
+  private static boolean isDefaultSampleRateOverridden;
+
+  // By default, utilize hardware based audio effects for AEC and NS when
+  // available.
+  private static boolean useWebRtcBasedAcousticEchoCanceler;
+  private static boolean useWebRtcBasedNoiseSuppressor;
+
+  // Call these methods if any hardware based effect shall be replaced by a
+  // software based version provided by the WebRTC stack instead.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) {
+    useWebRtcBasedAcousticEchoCanceler = enable;
+  }
+
+    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) {
+    useWebRtcBasedNoiseSuppressor = enable;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) {
+    // TODO(henrika): deprecated; remove when no longer used by any client.
+    Logging.w(TAG, "setWebRtcBasedAutomaticGainControl() is deprecated");
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean useWebRtcBasedAcousticEchoCanceler() {
+    if (useWebRtcBasedAcousticEchoCanceler) {
+      Logging.w(TAG, "Overriding default behavior; now using WebRTC AEC!");
+    }
+    return useWebRtcBasedAcousticEchoCanceler;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean useWebRtcBasedNoiseSuppressor() {
+    if (useWebRtcBasedNoiseSuppressor) {
+      Logging.w(TAG, "Overriding default behavior; now using WebRTC NS!");
+    }
+    return useWebRtcBasedNoiseSuppressor;
+  }
+
+  // TODO(henrika): deprecated; remove when no longer used by any client.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean useWebRtcBasedAutomaticGainControl() {
+    // Always return true here to avoid trying to use any built-in AGC.
+    return true;
+  }
+
+  // Returns true if the device supports an audio effect (AEC or NS).
+  // Four conditions must be fulfilled if functions are to return true:
+  // 1) the platform must support the built-in (HW) effect,
+  // 2) explicit use (override) of a WebRTC based version must not be set,
+  // 3) the device must not be blacklisted for use of the effect, and
+  // 4) the UUID of the effect must be approved (some UUIDs can be excluded).
+  public static boolean isAcousticEchoCancelerSupported() {
+    return WebRtcAudioEffects.canUseAcousticEchoCanceler();
+  }
+  public static boolean isNoiseSuppressorSupported() {
+    return WebRtcAudioEffects.canUseNoiseSuppressor();
+  }
+  // TODO(henrika): deprecated; remove when no longer used by any client.
+  public static boolean isAutomaticGainControlSupported() {
+    // Always return false here to avoid trying to use any built-in AGC.
+    return false;
+  }
+
+  // Call this method if the default handling of querying the native sample
+  // rate shall be overridden. Can be useful on some devices where the
+  // available Android APIs are known to return invalid results.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void setDefaultSampleRateHz(int sampleRateHz) {
+    isDefaultSampleRateOverridden = true;
+    defaultSampleRateHz = sampleRateHz;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized boolean isDefaultSampleRateOverridden() {
+    return isDefaultSampleRateOverridden;
+  }
+
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized int getDefaultSampleRateHz() {
+    return defaultSampleRateHz;
+  }
+
+  public static List<String> getBlackListedModelsForAecUsage() {
+    return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_AEC_MODELS);
+  }
+
+  public static List<String> getBlackListedModelsForNsUsage() {
+    return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_NS_MODELS);
+  }
+
+  // Helper method for building a string of thread information.
+  public static String getThreadInfo() {
+    return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
+        + "]";
+  }
+
+  // Returns true if we're running on emulator.
+  public static boolean runningOnEmulator() {
+    return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
+  }
+
+  // Returns true if the device is blacklisted for OpenSL ES usage.
+  public static boolean deviceIsBlacklistedForOpenSLESUsage() {
+    List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
+    return blackListedModels.contains(Build.MODEL);
+  }
+
+  // Information about the current build, taken from system properties.
+  static void logDeviceInfo(String tag) {
+    Logging.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
+            + "Release: " + Build.VERSION.RELEASE + ", "
+            + "Brand: " + Build.BRAND + ", "
+            + "Device: " + Build.DEVICE + ", "
+            + "Id: " + Build.ID + ", "
+            + "Hardware: " + Build.HARDWARE + ", "
+            + "Manufacturer: " + Build.MANUFACTURER + ", "
+            + "Model: " + Build.MODEL + ", "
+            + "Product: " + Build.PRODUCT);
+  }
+
+  // Logs information about the current audio state. The idea is to call this
+  // method when errors are detected to log under what conditions the error
+  // occurred. Hopefully it will provide clues to what might be the root cause.
+  static void logAudioState(String tag) {
+    logDeviceInfo(tag);
+    final Context context = ContextUtils.getApplicationContext();
+    final AudioManager audioManager =
+        (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+    logAudioStateBasic(tag, audioManager);
+    logAudioStateVolume(tag, audioManager);
+    logAudioDeviceInfo(tag, audioManager);
+  }
+
+  // Reports basic audio statistics.
+  private static void logAudioStateBasic(String tag, AudioManager audioManager) {
+    Logging.d(tag, "Audio State: "
+            + "audio mode: " + modeToString(audioManager.getMode()) + ", "
+            + "has mic: " + hasMicrophone() + ", "
+            + "mic muted: " + audioManager.isMicrophoneMute() + ", "
+            + "music active: " + audioManager.isMusicActive() + ", "
+            + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", "
+            + "BT SCO: " + audioManager.isBluetoothScoOn());
+  }
+
+  private static boolean isVolumeFixed(AudioManager audioManager) {
+    if (Build.VERSION.SDK_INT < 21) {
+      return false;
+    }
+    return audioManager.isVolumeFixed();
+  }
+
+  // Adds volume information for all possible stream types.
+  private static void logAudioStateVolume(String tag, AudioManager audioManager) {
+    final int[] streams = {
+        AudioManager.STREAM_VOICE_CALL,
+        AudioManager.STREAM_MUSIC,
+        AudioManager.STREAM_RING,
+        AudioManager.STREAM_ALARM,
+        AudioManager.STREAM_NOTIFICATION,
+        AudioManager.STREAM_SYSTEM
+    };
+    Logging.d(tag, "Audio State: ");
+    // Some devices may not have volume controls and might use a fixed volume.
+    boolean fixedVolume = isVolumeFixed(audioManager);
+    Logging.d(tag, "  fixed volume=" + fixedVolume);
+    if (!fixedVolume) {
+      for (int stream : streams) {
+        StringBuilder info = new StringBuilder();
+        info.append("  " + streamTypeToString(stream) + ": ");
+        info.append("volume=").append(audioManager.getStreamVolume(stream));
+        info.append(", max=").append(audioManager.getStreamMaxVolume(stream));
+        logIsStreamMute(tag, audioManager, stream, info);
+        Logging.d(tag, info.toString());
+      }
+    }
+  }
+
+  private static void logIsStreamMute(
+      String tag, AudioManager audioManager, int stream, StringBuilder info) {
+    if (Build.VERSION.SDK_INT >= 23) {
+      info.append(", muted=").append(audioManager.isStreamMute(stream));
+    }
+  }
+
+  private static void logAudioDeviceInfo(String tag, AudioManager audioManager) {
+    if (Build.VERSION.SDK_INT < 23) {
+      return;
+    }
+    final AudioDeviceInfo[] devices =
+        audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
+    if (devices.length == 0) {
+      return;
+    }
+    Logging.d(tag, "Audio Devices: ");
+    for (AudioDeviceInfo device : devices) {
+      StringBuilder info = new StringBuilder();
+      info.append("  ").append(deviceTypeToString(device.getType()));
+      info.append(device.isSource() ? "(in): " : "(out): ");
+      // An empty array indicates that the device supports arbitrary channel counts.
+      if (device.getChannelCounts().length > 0) {
+        info.append("channels=").append(Arrays.toString(device.getChannelCounts()));
+        info.append(", ");
+      }
+      if (device.getEncodings().length > 0) {
+        // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4.
+        info.append("encodings=").append(Arrays.toString(device.getEncodings()));
+        info.append(", ");
+      }
+      if (device.getSampleRates().length > 0) {
+        info.append("sample rates=").append(Arrays.toString(device.getSampleRates()));
+        info.append(", ");
+      }
+      info.append("id=").append(device.getId());
+      Logging.d(tag, info.toString());
+    }
+  }
+
+  // Converts media.AudioManager modes into local string representation.
+  static String modeToString(int mode) {
+    switch (mode) {
+      case MODE_IN_CALL:
+        return "MODE_IN_CALL";
+      case MODE_IN_COMMUNICATION:
+        return "MODE_IN_COMMUNICATION";
+      case MODE_NORMAL:
+        return "MODE_NORMAL";
+      case MODE_RINGTONE:
+        return "MODE_RINGTONE";
+      default:
+        return "MODE_INVALID";
+    }
+  }
+
+  private static String streamTypeToString(int stream) {
+    switch(stream) {
+      case AudioManager.STREAM_VOICE_CALL:
+        return "STREAM_VOICE_CALL";
+      case AudioManager.STREAM_MUSIC:
+        return "STREAM_MUSIC";
+      case AudioManager.STREAM_RING:
+        return "STREAM_RING";
+      case AudioManager.STREAM_ALARM:
+        return "STREAM_ALARM";
+      case AudioManager.STREAM_NOTIFICATION:
+        return "STREAM_NOTIFICATION";
+      case AudioManager.STREAM_SYSTEM:
+        return "STREAM_SYSTEM";
+      default:
+        return "STREAM_INVALID";
+    }
+  }
+
+  // Converts AudioDeviceInfo types to local string representation.
+  private static String deviceTypeToString(int type) {
+    switch (type) {
+      case AudioDeviceInfo.TYPE_UNKNOWN:
+        return "TYPE_UNKNOWN";
+      case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+        return "TYPE_BUILTIN_EARPIECE";
+      case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+        return "TYPE_BUILTIN_SPEAKER";
+      case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+        return "TYPE_WIRED_HEADSET";
+      case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+        return "TYPE_WIRED_HEADPHONES";
+      case AudioDeviceInfo.TYPE_LINE_ANALOG:
+        return "TYPE_LINE_ANALOG";
+      case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+        return "TYPE_LINE_DIGITAL";
+      case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+        return "TYPE_BLUETOOTH_SCO";
+      case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+        return "TYPE_BLUETOOTH_A2DP";
+      case AudioDeviceInfo.TYPE_HDMI:
+        return "TYPE_HDMI";
+      case AudioDeviceInfo.TYPE_HDMI_ARC:
+        return "TYPE_HDMI_ARC";
+      case AudioDeviceInfo.TYPE_USB_DEVICE:
+        return "TYPE_USB_DEVICE";
+      case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+        return "TYPE_USB_ACCESSORY";
+      case AudioDeviceInfo.TYPE_DOCK:
+        return "TYPE_DOCK";
+      case AudioDeviceInfo.TYPE_FM:
+        return "TYPE_FM";
+      case AudioDeviceInfo.TYPE_BUILTIN_MIC:
+        return "TYPE_BUILTIN_MIC";
+      case AudioDeviceInfo.TYPE_FM_TUNER:
+        return "TYPE_FM_TUNER";
+      case AudioDeviceInfo.TYPE_TV_TUNER:
+        return "TYPE_TV_TUNER";
+      case AudioDeviceInfo.TYPE_TELEPHONY:
+        return "TYPE_TELEPHONY";
+      case AudioDeviceInfo.TYPE_AUX_LINE:
+        return "TYPE_AUX_LINE";
+      case AudioDeviceInfo.TYPE_IP:
+        return "TYPE_IP";
+      case AudioDeviceInfo.TYPE_BUS:
+        return "TYPE_BUS";
+      case AudioDeviceInfo.TYPE_USB_HEADSET:
+        return "TYPE_USB_HEADSET";
+      default:
+        return "TYPE_UNKNOWN";
+    }
+  }
+
+  // Returns true if the device can record audio via a microphone.
+  private static boolean hasMicrophone() {
+    return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature(
+        PackageManager.FEATURE_MICROPHONE);
+  }
+}

+ 46 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ContextUtils.java

@@ -0,0 +1,46 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import org.webrtc.Logging;
+
+/**
+ * Class for storing the application context and retrieving it in a static context. Similar to
+ * org.chromium.base.ContextUtils.
+ */
+public class ContextUtils {
+  private static final String TAG = "ContextUtils";
+  private static Context applicationContext;
+
+  /**
+   * Stores the application context that will be returned by getApplicationContext. This is called
+   * by PeerConnectionFactory.initialize. The application context must be set before creating
+   * a PeerConnectionFactory and must not be modified while it is alive.
+   */
+  public static void initialize(Context applicationContext) {
+    if (applicationContext == null) {
+      throw new IllegalArgumentException(
+          "Application context cannot be null for ContextUtils.initialize.");
+    }
+    ContextUtils.applicationContext = applicationContext;
+  }
+
+  /**
+   * Returns the stored application context.
+   *
+   * @deprecated crbug.com/webrtc/8937
+   */
+  @Deprecated
+  public static Context getApplicationContext() {
+    return applicationContext;
+  }
+}

+ 22 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Loggable.java

@@ -0,0 +1,22 @@
+/*
+ *  Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.Logging.Severity;
+
+/**
+ * Java interface for WebRTC logging. The default implementation uses webrtc.Logging.
+ *
+ * When injected, the Loggable will receive logging from both Java and native.
+ */
+public interface Loggable {
+  public void onLogMessage(String message, Severity severity, String tag);
+}

+ 199 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Logging.java

@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.EnumSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Java wrapper for WebRTC logging. Logging defaults to java.util.logging.Logger, but a custom
+ * logger implementing the Loggable interface can be injected along with a Severity. All subsequent
+ * log messages will then be redirected to the injected Loggable, except those with a severity lower
+ * than the specified severity, which will be discarded.
+ *
+ * It is also possible to switch to native logging (rtc::LogMessage) if one of the following static
+ * functions are called from the app:
+ * - Logging.enableLogThreads
+ * - Logging.enableLogTimeStamps
+ * - Logging.enableLogToDebugOutput
+ *
+ * The priority goes:
+ * 1. Injected loggable
+ * 2. Native logging
+ * 3. Fallback logging.
+ * Only one method will be used at a time.
+ *
+ * Injecting a Loggable or using any of the enable... methods requires that the native library is
+ * loaded, using PeerConnectionFactory.initialize.
+ */
+public class Logging {
+  private static final Logger fallbackLogger = createFallbackLogger();
+  private static volatile boolean loggingEnabled;
+    private static Loggable loggable;
+  private static Severity loggableSeverity;
+
+  private static Logger createFallbackLogger() {
+    final Logger fallbackLogger = Logger.getLogger("org.webrtc.Logging");
+    fallbackLogger.setLevel(Level.ALL);
+    return fallbackLogger;
+  }
+
+  static void injectLoggable(Loggable injectedLoggable, Severity severity) {
+    if (injectedLoggable != null) {
+      loggable = injectedLoggable;
+      loggableSeverity = severity;
+    }
+  }
+
+  static void deleteInjectedLoggable() {
+    loggable = null;
+  }
+
+  // TODO(solenberg): Remove once dependent projects updated.
+  @Deprecated
+  public enum TraceLevel {
+    TRACE_NONE(0x0000),
+    TRACE_STATEINFO(0x0001),
+    TRACE_WARNING(0x0002),
+    TRACE_ERROR(0x0004),
+    TRACE_CRITICAL(0x0008),
+    TRACE_APICALL(0x0010),
+    TRACE_DEFAULT(0x00ff),
+    TRACE_MODULECALL(0x0020),
+    TRACE_MEMORY(0x0100),
+    TRACE_TIMER(0x0200),
+    TRACE_STREAM(0x0400),
+    TRACE_DEBUG(0x0800),
+    TRACE_INFO(0x1000),
+    TRACE_TERSEINFO(0x2000),
+    TRACE_ALL(0xffff);
+
+    public final int level;
+    TraceLevel(int level) {
+      this.level = level;
+    }
+  }
+
+  // Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity.
+  public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE }
+
+  public static void enableLogThreads() {
+    nativeEnableLogThreads();
+  }
+
+  public static void enableLogTimeStamps() {
+    nativeEnableLogTimeStamps();
+  }
+
+  // TODO(solenberg): Remove once dependent projects updated.
+  @Deprecated
+  public static void enableTracing(String path, EnumSet<TraceLevel> levels) {}
+
+  // Enable diagnostic logging for messages of |severity| to the platform debug
+  // output. On Android, the output will be directed to Logcat.
+  // Note: this function starts collecting the output of the RTC_LOG() macros.
+  // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+  @SuppressWarnings("NoSynchronizedMethodCheck")
+  public static synchronized void enableLogToDebugOutput(Severity severity) {
+    if (loggable != null) {
+      throw new IllegalStateException(
+          "Logging to native debug output not supported while Loggable is injected. "
+          + "Delete the Loggable before calling this method.");
+    }
+    nativeEnableLogToDebugOutput(severity.ordinal());
+    loggingEnabled = true;
+  }
+
+  public static void log(Severity severity, String tag, String message) {
+    if (tag == null || message == null) {
+      throw new IllegalArgumentException("Logging tag or message may not be null.");
+    }
+    if (loggable != null) {
+      // Filter log messages below loggableSeverity.
+      if (severity.ordinal() < loggableSeverity.ordinal()) {
+        return;
+      }
+      loggable.onLogMessage(message, severity, tag);
+      return;
+    }
+
+    // Try native logging if no loggable is injected.
+    if (loggingEnabled) {
+      nativeLog(severity.ordinal(), tag, message);
+      return;
+    }
+
+    // Fallback to system log.
+    Level level;
+    switch (severity) {
+      case LS_ERROR:
+        level = Level.SEVERE;
+        break;
+      case LS_WARNING:
+        level = Level.WARNING;
+        break;
+      case LS_INFO:
+        level = Level.INFO;
+        break;
+      default:
+        level = Level.FINE;
+        break;
+    }
+    fallbackLogger.log(level, tag + ": " + message);
+  }
+
+  public static void d(String tag, String message) {
+    log(Severity.LS_INFO, tag, message);
+  }
+
+  public static void e(String tag, String message) {
+    log(Severity.LS_ERROR, tag, message);
+  }
+
+  public static void w(String tag, String message) {
+    log(Severity.LS_WARNING, tag, message);
+  }
+
+  public static void e(String tag, String message, Throwable e) {
+    log(Severity.LS_ERROR, tag, message);
+    log(Severity.LS_ERROR, tag, e.toString());
+    log(Severity.LS_ERROR, tag, getStackTraceString(e));
+  }
+
+  public static void w(String tag, String message, Throwable e) {
+    log(Severity.LS_WARNING, tag, message);
+    log(Severity.LS_WARNING, tag, e.toString());
+    log(Severity.LS_WARNING, tag, getStackTraceString(e));
+  }
+
+  public static void v(String tag, String message) {
+    log(Severity.LS_VERBOSE, tag, message);
+  }
+
+  private static String getStackTraceString(Throwable e) {
+    if (e == null) {
+      return "";
+    }
+
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter(sw);
+    e.printStackTrace(pw);
+    return sw.toString();
+  }
+
+  private static native void nativeEnableLogToDebugOutput(int nativeSeverity);
+  private static native void nativeEnableLogThreads();
+  private static native void nativeEnableLogTimeStamps();
+  private static native void nativeLog(int severity, String tag, String message);
+}

+ 2 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/OWNERS

@@ -0,0 +1,2 @@
+magjed@webrtc.org
+sakal@webrtc.org

+ 45 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/Size.java

@@ -0,0 +1,45 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Class for representing size of an object. Very similar to android.util.Size but available on all
+ * devices.
+ */
+public class Size {
+  public int width;
+  public int height;
+
+  public Size(int width, int height) {
+    this.width = width;
+    this.height = height;
+  }
+
+  @Override
+  public String toString() {
+    return width + "x" + height;
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    if (!(other instanceof Size)) {
+      return false;
+    }
+    final Size otherSize = (Size) other;
+    return width == otherSize.width && height == otherSize.height;
+  }
+
+  @Override
+  public int hashCode() {
+    // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+    return 1 + 65537 * width + height;
+  }
+}

+ 214 - 0
libwebrtc/src/main/java/rtc_base/java/src/org/webrtc/ThreadUtils.java

@@ -0,0 +1,214 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.Handler;
+import android.os.Looper;
+import android.os.SystemClock;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+public class ThreadUtils {
+    /**
+     * Utility class to be used for checking that a method is called on the correct thread.
+     */
+    public static class ThreadChecker {
+        private Thread thread = Thread.currentThread();
+
+        public void checkIsOnValidThread() {
+            if (thread == null) {
+                thread = Thread.currentThread();
+            }
+            if (Thread.currentThread() != thread) {
+                throw new IllegalStateException("Wrong thread");
+            }
+        }
+
+        public void detachThread() {
+            thread = null;
+        }
+    }
+
+    /**
+     * Throws exception if called from other than main thread.
+     */
+    public static void checkIsOnMainThread() {
+        if (Thread.currentThread() != Looper.getMainLooper().getThread()) {
+            throw new IllegalStateException("Not on main thread!");
+        }
+    }
+
+    /**
+     * Utility interface to be used with executeUninterruptibly() to wait for blocking operations
+     * to complete without getting interrupted..
+     */
+    public interface BlockingOperation {
+        void run() throws InterruptedException;
+    }
+
+    /**
+     * Utility method to make sure a blocking operation is executed to completion without getting
+     * interrupted. This should be used in cases where the operation is waiting for some critical
+     * work, e.g. cleanup, that must complete before returning. If the thread is interrupted during
+     * the blocking operation, this function will re-run the operation until completion, and only then
+     * re-interrupt the thread.
+     */
+    public static void executeUninterruptibly(BlockingOperation operation) {
+        boolean wasInterrupted = false;
+        while (true) {
+            try {
+                operation.run();
+                break;
+            } catch (InterruptedException e) {
+                // Someone is asking us to return early at our convenience. We can't cancel this operation,
+                // but we should preserve the information and pass it along.
+                wasInterrupted = true;
+            }
+        }
+        // Pass interruption information along.
+        if (wasInterrupted) {
+            Thread.currentThread().interrupt();
+        }
+    }
+
+    public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) {
+        final long startTimeMs = SystemClock.elapsedRealtime();
+        long timeRemainingMs = timeoutMs;
+        boolean wasInterrupted = false;
+        while (timeRemainingMs > 0) {
+            try {
+                thread.join(timeRemainingMs);
+                break;
+            } catch (InterruptedException e) {
+                // Someone is asking us to return early at our convenience. We can't cancel this operation,
+                // but we should preserve the information and pass it along.
+                wasInterrupted = true;
+                final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+                timeRemainingMs = timeoutMs - elapsedTimeMs;
+            }
+        }
+        // Pass interruption information along.
+        if (wasInterrupted) {
+            Thread.currentThread().interrupt();
+        }
+        return !thread.isAlive();
+    }
+
+    public static void joinUninterruptibly(final Thread thread) {
+        executeUninterruptibly(new BlockingOperation() {
+            @Override
+            public void run() throws InterruptedException {
+                thread.join();
+            }
+        });
+    }
+
+    public static void awaitUninterruptibly(final CountDownLatch latch) {
+        executeUninterruptibly(new BlockingOperation() {
+            @Override
+            public void run() throws InterruptedException {
+                latch.await();
+            }
+        });
+    }
+
+    public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) {
+        final long startTimeMs = SystemClock.elapsedRealtime();
+        long timeRemainingMs = timeoutMs;
+        boolean wasInterrupted = false;
+        boolean result = false;
+        do {
+            try {
+                result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS);
+                break;
+            } catch (InterruptedException e) {
+                // Someone is asking us to return early at our convenience. We can't cancel this operation,
+                // but we should preserve the information and pass it along.
+                wasInterrupted = true;
+                final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+                timeRemainingMs = timeoutMs - elapsedTimeMs;
+            }
+        } while (timeRemainingMs > 0);
+        // Pass interruption information along.
+        if (wasInterrupted) {
+            Thread.currentThread().interrupt();
+        }
+        return result;
+    }
+
+    /**
+     * Post |callable| to |handler| and wait for the result.
+     */
+    public static <V> V invokeAtFrontUninterruptibly(
+            final Handler handler, final Callable<V> callable) {
+        if (handler.getLooper().getThread() == Thread.currentThread()) {
+            try {
+                return callable.call();
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+        }
+        // Place-holder classes that are assignable inside nested class.
+        class CaughtException {
+            Exception e;
+        }
+        class Result {
+            public V value;
+        }
+        final Result result = new Result();
+        final CaughtException caughtException = new CaughtException();
+        final CountDownLatch barrier = new CountDownLatch(1);
+        handler.post(new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    result.value = callable.call();
+                } catch (Exception e) {
+                    caughtException.e = e;
+                }
+                barrier.countDown();
+            }
+        });
+        awaitUninterruptibly(barrier);
+        // Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add
+        // stack trace for the waiting thread as well.
+        if (caughtException.e != null) {
+            final RuntimeException runtimeException = new RuntimeException(caughtException.e);
+            runtimeException.setStackTrace(
+                    concatStackTraces(caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
+            throw runtimeException;
+        }
+        return result.value;
+    }
+
+    /**
+     * Post |runner| to |handler|, at the front, and wait for completion.
+     */
+    public static void invokeAtFrontUninterruptibly(final Handler handler, final Runnable runner) {
+        invokeAtFrontUninterruptibly(handler, new Callable<Void>() {
+            @Override
+            public Void call() {
+                runner.run();
+                return null;
+            }
+        });
+    }
+
+    static StackTraceElement[] concatStackTraces(
+            StackTraceElement[] inner, StackTraceElement[] outer) {
+        final StackTraceElement[] combined = new StackTraceElement[inner.length + outer.length];
+        System.arraycopy(inner, 0, combined, 0, inner.length);
+        System.arraycopy(outer, 0, combined, inner.length, outer.length);
+        return combined;
+    }
+}

+ 21 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioDecoderFactoryFactory.java

@@ -0,0 +1,21 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioDecoderFactory}.
+ */
+public interface AudioDecoderFactoryFactory {
+  /**
+   * Returns a pointer to a {@code webrtc::AudioDecoderFactory}. The caller takes ownership.
+   */
+  long createNativeAudioDecoderFactory();
+}

+ 21 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioEncoderFactoryFactory.java

@@ -0,0 +1,21 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Implementations of this interface can create a native {@code webrtc::AudioEncoderFactory}.
+ */
+public interface AudioEncoderFactoryFactory {
+  /**
+   * Returns a pointer to a {@code webrtc::AudioEncoderFactory}. The caller takes ownership.
+   */
+  long createNativeAudioEncoderFactory();
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioProcessingFactory.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Factory for creating webrtc::AudioProcessing instances. */
+public interface AudioProcessingFactory {
+  /**
+   * Dynamically allocates a webrtc::AudioProcessing instance and returns a pointer to it.
+   * The caller takes ownership of the object.
+   */
+  public long createNative();
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioSource.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface.  Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+  public AudioSource(long nativeSource) {
+    super(nativeSource);
+  }
+
+  /** Returns a pointer to webrtc::AudioSourceInterface. */
+  long getNativeAudioSource() {
+    return getNativeMediaSource();
+  }
+}

+ 32 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/AudioTrack.java

@@ -0,0 +1,32 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+  public AudioTrack(long nativeTrack) {
+    super(nativeTrack);
+  }
+
+  /** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
+   *  0 to 10.
+   */
+  public void setVolume(double volume) {
+    nativeSetVolume(getNativeAudioTrack(), volume);
+  }
+
+  /** Returns a pointer to webrtc::AudioTrackInterface. */
+  long getNativeAudioTrack() {
+    return getNativeMediaStreamTrack();
+  }
+
+  private static native void nativeSetVolume(long track, double volume);
+}

+ 23 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioDecoderFactoryFactory.java

@@ -0,0 +1,23 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Creates a native {@code webrtc::AudioDecoderFactory} with the builtin audio decoders.
+ */
+public class BuiltinAudioDecoderFactoryFactory implements AudioDecoderFactoryFactory {
+  @Override
+  public long createNativeAudioDecoderFactory() {
+    return nativeCreateBuiltinAudioDecoderFactory();
+  }
+
+  private static native long nativeCreateBuiltinAudioDecoderFactory();
+}

+ 23 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/BuiltinAudioEncoderFactoryFactory.java

@@ -0,0 +1,23 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * This class creates a native {@code webrtc::AudioEncoderFactory} with the builtin audio encoders.
+ */
+public class BuiltinAudioEncoderFactoryFactory implements AudioEncoderFactoryFactory {
+  @Override
+  public long createNativeAudioEncoderFactory() {
+    return nativeCreateBuiltinAudioEncoderFactory();
+  }
+
+  private static native long nativeCreateBuiltinAudioEncoderFactory();
+}

+ 41 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CallSessionFileRotatingLogSink.java

@@ -0,0 +1,41 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+  private long nativeSink;
+
+  public static byte[] getLogData(String dirPath) {
+    if (dirPath == null) {
+      throw new IllegalArgumentException("dirPath may not be null.");
+    }
+    return nativeGetLogData(dirPath);
+  }
+
+  public CallSessionFileRotatingLogSink(
+      String dirPath, int maxFileSize, Logging.Severity severity) {
+    if (dirPath == null) {
+      throw new IllegalArgumentException("dirPath may not be null.");
+    }
+    nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+  }
+
+  public void dispose() {
+    if (nativeSink != 0) {
+      nativeDeleteSink(nativeSink);
+      nativeSink = 0;
+    }
+  }
+
+  private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
+  private static native void nativeDeleteSink(long sink);
+  private static native byte[] nativeGetLogData(String dirPath);
+}

+ 35 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Capturer.java

@@ -0,0 +1,35 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+
+public class Camera1Capturer extends CameraCapturer {
+  private final boolean captureToTexture;
+
+  public Camera1Capturer(
+      String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
+    super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
+
+    this.captureToTexture = captureToTexture;
+  }
+
+  @Override
+  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+      CameraSession.Events events, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+      int framerate) {
+    Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
+        surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
+        framerate);
+  }
+}

+ 186 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera1Enumerator.java

@@ -0,0 +1,186 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.os.SystemClock;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class Camera1Enumerator implements CameraEnumerator {
+    private final static String TAG = "Camera1Enumerator";
+    // Each entry contains the supported formats for corresponding camera index. The formats for all
+    // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+    // reference.
+    private static List<List<CaptureFormat>> cachedSupportedFormats;
+
+    private final boolean captureToTexture;
+
+    public Camera1Enumerator() {
+        this(true /* captureToTexture */);
+    }
+
+    public Camera1Enumerator(boolean captureToTexture) {
+        this.captureToTexture = captureToTexture;
+    }
+
+    // Returns device names that can be used to create a new VideoCapturerAndroid.
+    @Override
+    public String[] getDeviceNames() {
+        ArrayList<String> namesList = new ArrayList<>();
+        for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+            String name = getDeviceName(i);
+            if (name != null) {
+                namesList.add(name);
+                Logging.d(TAG, "Index: " + i + ". " + name);
+            } else {
+                Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
+            }
+        }
+        String[] namesArray = new String[namesList.size()];
+        return namesList.toArray(namesArray);
+    }
+
+    @Override
+    public boolean isFrontFacing(String deviceName) {
+        android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+        return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
+    }
+
+    @Override
+    public boolean isBackFacing(String deviceName) {
+        android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+        return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
+    }
+
+    @Override
+    public List<CaptureFormat> getSupportedFormats(String deviceName) {
+        return getSupportedFormats(getCameraIndex(deviceName));
+    }
+
+    @Override
+    public CameraVideoCapturer createCapturer(
+            String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+        return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
+    }
+
+    private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+        android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+        try {
+            android.hardware.Camera.getCameraInfo(index, info);
+        } catch (Exception e) {
+            Logging.e(TAG, "getCameraInfo failed on index " + index, e);
+            return null;
+        }
+        return info;
+    }
+
+    static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+        if (cachedSupportedFormats == null) {
+            cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+            for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+                cachedSupportedFormats.add(enumerateFormats(i));
+            }
+        }
+        return cachedSupportedFormats.get(cameraId);
+    }
+
+    private static List<CaptureFormat> enumerateFormats(int cameraId) {
+        Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+        final long startTimeMs = SystemClock.elapsedRealtime();
+        final android.hardware.Camera.Parameters parameters;
+        android.hardware.Camera camera = null;
+        try {
+            Logging.d(TAG, "Opening camera with index " + cameraId);
+            camera = android.hardware.Camera.open(cameraId);
+            parameters = camera.getParameters();
+        } catch (RuntimeException e) {
+            Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+            return new ArrayList<CaptureFormat>();
+        } finally {
+            if (camera != null) {
+                camera.release();
+            }
+        }
+
+        final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+        try {
+            int minFps = 0;
+            int maxFps = 0;
+            final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+            if (listFpsRange != null) {
+                // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+                // corresponding to the highest fps.
+                final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+                minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+                maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+            }
+            for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+                formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+            }
+        } catch (Exception e) {
+            Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+        }
+
+        final long endTimeMs = SystemClock.elapsedRealtime();
+        Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+                + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+        return formatList;
+    }
+
+    // Convert from android.hardware.Camera.Size to Size.
+    static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
+        final List<Size> sizes = new ArrayList<Size>();
+        for (android.hardware.Camera.Size size : cameraSizes) {
+            sizes.add(new Size(size.width, size.height));
+        }
+        return sizes;
+    }
+
+    // Convert from int[2] to CaptureFormat.FramerateRange.
+    static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
+        final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+        for (int[] range : arrayRanges) {
+            ranges.add(new CaptureFormat.FramerateRange(
+                    range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+                    range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+        }
+        return ranges;
+    }
+
+    // Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
+    // if no such camera can be found.
+    static int getCameraIndex(String deviceName) {
+        Logging.d(TAG, "getCameraIndex: " + deviceName);
+        for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+            if (deviceName.equals(getDeviceName(i))) {
+                return i;
+            }
+        }
+        throw new IllegalArgumentException("No such camera: " + deviceName);
+    }
+
+    // Returns the name of the camera with camera index. Returns null if the
+    // camera can not be used.
+    static String getDeviceName(int index) {
+        android.hardware.Camera.CameraInfo info = getCameraInfo(index);
+        if (info == null) {
+            return null;
+        }
+
+        String facing =
+                (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+        return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
+    }
+}

+ 37 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Capturer.java

@@ -0,0 +1,37 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.hardware.camera2.CameraManager;
+
+@TargetApi(21)
+public class Camera2Capturer extends CameraCapturer {
+  private final Context context;
+    private final CameraManager cameraManager;
+
+  public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
+    super(cameraName, eventsHandler, new Camera2Enumerator(context));
+
+    this.context = context;
+    cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+  }
+
+  @Override
+  protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
+      CameraSession.Events events, Context applicationContext,
+      SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
+      int framerate) {
+    Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
+        surfaceTextureHelper, cameraName, width, height, framerate);
+  }
+}

+ 245 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/Camera2Enumerator.java

@@ -0,0 +1,245 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.AndroidException;
+import android.util.Range;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@TargetApi(21)
+public class Camera2Enumerator implements CameraEnumerator {
+  private final static String TAG = "Camera2Enumerator";
+  private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+  // Each entry contains the supported formats for a given camera index. The formats are enumerated
+  // lazily in getSupportedFormats(), and cached for future reference.
+  private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
+      new HashMap<String, List<CaptureFormat>>();
+
+  final Context context;
+    final CameraManager cameraManager;
+
+  public Camera2Enumerator(Context context) {
+    this.context = context;
+    this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+  }
+
+  @Override
+  public String[] getDeviceNames() {
+    try {
+      return cameraManager.getCameraIdList();
+      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+      // catch statement with an Exception from a newer API, even if the code is never executed.
+      // https://code.google.com/p/android/issues/detail?id=209129
+    } catch (/* CameraAccessException */ AndroidException e) {
+      Logging.e(TAG, "Camera access exception: " + e);
+      return new String[] {};
+    }
+  }
+
+  @Override
+  public boolean isFrontFacing(String deviceName) {
+    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+    return characteristics != null
+        && characteristics.get(CameraCharacteristics.LENS_FACING)
+        == CameraMetadata.LENS_FACING_FRONT;
+  }
+
+  @Override
+  public boolean isBackFacing(String deviceName) {
+    CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
+
+    return characteristics != null
+        && characteristics.get(CameraCharacteristics.LENS_FACING)
+        == CameraMetadata.LENS_FACING_BACK;
+  }
+
+  @Override
+  public List<CaptureFormat> getSupportedFormats(String deviceName) {
+    return getSupportedFormats(context, deviceName);
+  }
+
+  @Override
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+    return new Camera2Capturer(context, deviceName, eventsHandler);
+  }
+
+  private   CameraCharacteristics getCameraCharacteristics(String deviceName) {
+    try {
+      return cameraManager.getCameraCharacteristics(deviceName);
+      // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
+      // catch statement with an Exception from a newer API, even if the code is never executed.
+      // https://code.google.com/p/android/issues/detail?id=209129
+    } catch (/* CameraAccessException */ AndroidException e) {
+      Logging.e(TAG, "Camera access exception: " + e);
+      return null;
+    }
+  }
+
+  /**
+   * Checks if API is supported and all cameras have better than legacy support.
+   */
+  public static boolean isSupported(Context context) {
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+      return false;
+    }
+
+    CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+    try {
+      String[] cameraIds = cameraManager.getCameraIdList();
+      for (String id : cameraIds) {
+        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
+        if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
+            == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+          return false;
+        }
+      }
+    } catch (AndroidException e) {
+      Logging.e(TAG, "Camera access exception: " + e);
+      return false;
+    }
+    return true;
+  }
+
+  static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
+    if (fpsRanges.length == 0) {
+      return 1000;
+    }
+    return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
+  }
+
+  static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
+    final StreamConfigurationMap streamMap =
+        cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+    final int supportLevel =
+        cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+
+    final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
+    final List<Size> sizes = convertSizes(nativeSizes);
+
+    // Video may be stretched pre LMR1 on legacy implementations.
+    // Filter out formats that have different aspect ratio than the sensor array.
+    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
+        && supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+      final Rect activeArraySize =
+          cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+      final ArrayList<Size> filteredSizes = new ArrayList<Size>();
+
+      for (Size size : sizes) {
+        if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
+          filteredSizes.add(size);
+        }
+      }
+
+      return filteredSizes;
+    } else {
+      return sizes;
+    }
+  }
+
+  static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
+    return getSupportedFormats(
+        (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
+  }
+
+  static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
+    synchronized (cachedSupportedFormats) {
+      if (cachedSupportedFormats.containsKey(cameraId)) {
+        return cachedSupportedFormats.get(cameraId);
+      }
+
+      Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+      final long startTimeMs = SystemClock.elapsedRealtime();
+
+      final CameraCharacteristics cameraCharacteristics;
+      try {
+        cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+      } catch (Exception ex) {
+        Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+        return new ArrayList<CaptureFormat>();
+      }
+
+      final StreamConfigurationMap streamMap =
+          cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+      Range<Integer>[] fpsRanges =
+          cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+      List<CaptureFormat.FramerateRange> framerateRanges =
+          convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
+      List<Size> sizes = getSupportedSizes(cameraCharacteristics);
+
+      int defaultMaxFps = 0;
+      for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
+        defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
+      }
+
+      final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+      for (Size size : sizes) {
+        long minFrameDurationNs = 0;
+        try {
+          minFrameDurationNs = streamMap.getOutputMinFrameDuration(
+              SurfaceTexture.class, new android.util.Size(size.width, size.height));
+        } catch (Exception e) {
+          // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+        }
+        final int maxFps = (minFrameDurationNs == 0)
+            ? defaultMaxFps
+            : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
+        formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
+        Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
+      }
+
+      cachedSupportedFormats.put(cameraId, formatList);
+      final long endTimeMs = SystemClock.elapsedRealtime();
+      Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+              + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+      return formatList;
+    }
+  }
+
+  // Convert from android.util.Size to Size.
+  private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
+    final List<Size> sizes = new ArrayList<Size>();
+    for (android.util.Size size : cameraSizes) {
+      sizes.add(new Size(size.getWidth(), size.getHeight()));
+    }
+    return sizes;
+  }
+
+  // Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
+  static List<CaptureFormat.FramerateRange> convertFramerates(
+      Range<Integer>[] arrayRanges, int unitFactor) {
+    final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+    for (Range<Integer> range : arrayRanges) {
+      ranges.add(new CaptureFormat.FramerateRange(
+          range.getLower() * unitFactor, range.getUpper() * unitFactor));
+    }
+    return ranges;
+  }
+}

+ 206 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerationAndroid.java

@@ -0,0 +1,206 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+
+import android.graphics.ImageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+  private final static String TAG = "CameraEnumerationAndroid";
+
+  static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
+      // 0, Unknown resolution
+      new Size(160, 120), // 1, QQVGA
+      new Size(240, 160), // 2, HQVGA
+      new Size(320, 240), // 3, QVGA
+      new Size(400, 240), // 4, WQVGA
+      new Size(480, 320), // 5, HVGA
+      new Size(640, 360), // 6, nHD
+      new Size(640, 480), // 7, VGA
+      new Size(768, 480), // 8, WVGA
+      new Size(854, 480), // 9, FWVGA
+      new Size(800, 600), // 10, SVGA
+      new Size(960, 540), // 11, qHD
+      new Size(960, 640), // 12, DVGA
+      new Size(1024, 576), // 13, WSVGA
+      new Size(1024, 600), // 14, WVSGA
+      new Size(1280, 720), // 15, HD
+      new Size(1280, 1024), // 16, SXGA
+      new Size(1920, 1080), // 17, Full HD
+      new Size(1920, 1440), // 18, Full HD 4:3
+      new Size(2560, 1440), // 19, QHD
+      new Size(3840, 2160) // 20, UHD
+      ));
+
+  public static class CaptureFormat {
+    // Class to represent a framerate range. The framerate varies because of lightning conditions.
+    // The values are multiplied by 1000, so 1000 represents one frame per second.
+    public static class FramerateRange {
+      public int min;
+      public int max;
+
+      public FramerateRange(int min, int max) {
+        this.min = min;
+        this.max = max;
+      }
+
+      @Override
+      public String toString() {
+        return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+      }
+
+      @Override
+      public boolean equals(Object other) {
+        if (!(other instanceof FramerateRange)) {
+          return false;
+        }
+        final FramerateRange otherFramerate = (FramerateRange) other;
+        return min == otherFramerate.min && max == otherFramerate.max;
+      }
+
+      @Override
+      public int hashCode() {
+        // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+        return 1 + 65537 * min + max;
+      }
+    }
+
+    public final int width;
+    public final int height;
+    public final FramerateRange framerate;
+
+    // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
+    // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
+    // all imageFormats.
+    public final int imageFormat = ImageFormat.NV21;
+
+    public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
+      this.width = width;
+      this.height = height;
+      this.framerate = new FramerateRange(minFramerate, maxFramerate);
+    }
+
+    public CaptureFormat(int width, int height, FramerateRange framerate) {
+      this.width = width;
+      this.height = height;
+      this.framerate = framerate;
+    }
+
+    // Calculates the frame size of this capture format.
+    public int frameSize() {
+      return frameSize(width, height, imageFormat);
+    }
+
+    // Calculates the frame size of the specified image format. Currently only
+    // supporting ImageFormat.NV21.
+    // The size is width * height * number of bytes per pixel.
+    // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+    public static int frameSize(int width, int height, int imageFormat) {
+      if (imageFormat != ImageFormat.NV21) {
+        throw new UnsupportedOperationException("Don't know how to calculate "
+            + "the frame size of non-NV21 image formats.");
+      }
+      return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+    }
+
+    @Override
+    public String toString() {
+      return width + "x" + height + "@" + framerate;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof CaptureFormat)) {
+        return false;
+      }
+      final CaptureFormat otherFormat = (CaptureFormat) other;
+      return width == otherFormat.width && height == otherFormat.height
+          && framerate.equals(otherFormat.framerate);
+    }
+
+    @Override
+    public int hashCode() {
+      return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
+    }
+  }
+
+  // Helper class for finding the closest supported format for the two functions below. It creates a
+  // comparator based on the difference to some requested parameters, where the element with the
+  // minimum difference is the element that is closest to the requested parameters.
+  private static abstract class ClosestComparator<T> implements Comparator<T> {
+    // Difference between supported and requested parameter.
+    abstract int diff(T supportedParameter);
+
+    @Override
+    public int compare(T t1, T t2) {
+      return diff(t1) - diff(t2);
+    }
+  }
+
+  // Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
+  // lower bound, to allow the framerate to fluctuate based on lightning conditions.
+  public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+      List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
+    return Collections.min(
+        supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
+          // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
+          // from requested.
+          private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
+          private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
+          private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
+
+          // Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
+          private static final int MIN_FPS_THRESHOLD = 8000;
+          private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
+          private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
+
+          // Use one weight for small |value| less than |threshold|, and another weight above.
+          private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
+            return (value < threshold) ? value * lowWeight
+                                       : threshold * lowWeight + (value - threshold) * highWeight;
+          }
+
+          @Override
+          int diff(CaptureFormat.FramerateRange range) {
+            final int minFpsError = progressivePenalty(
+                range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+            final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
+                MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
+            return minFpsError + maxFpsError;
+          }
+        });
+  }
+
+  public static Size getClosestSupportedSize(
+      List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+    return Collections.min(supportedSizes, new ClosestComparator<Size>() {
+      @Override
+      int diff(Size size) {
+        return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+      }
+    });
+  }
+
+  // Helper method for camera classes.
+  static void reportCameraResolution(Histogram histogram, Size resolution) {
+    int index = COMMON_RESOLUTIONS.indexOf(resolution);
+    // 0 is reserved for unknown resolution, so add 1.
+    // indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
+    histogram.addSample(index + 1);
+  }
+}

+ 25 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraEnumerator.java

@@ -0,0 +1,25 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import java.util.List;
+
+public interface CameraEnumerator {
+  public String[] getDeviceNames();
+  public boolean isFrontFacing(String deviceName);
+  public boolean isBackFacing(String deviceName);
+  public List<CaptureFormat> getSupportedFormats(String deviceName);
+
+  public CameraVideoCapturer createCapturer(
+      String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
+}

+ 167 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CameraVideoCapturer.java

@@ -0,0 +1,167 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaRecorder;
+
+
+/**
+ * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
+ * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
+ * class for detecting camera freezes.
+ */
+public interface CameraVideoCapturer extends VideoCapturer {
+  /**
+   * Camera events handler - can be used to be notifed about camera events. The callbacks are
+   * executed from an arbitrary thread.
+   */
+  public interface CameraEventsHandler {
+    // Camera error handler - invoked when camera can not be opened
+    // or any camera exception happens on camera thread.
+    void onCameraError(String errorDescription);
+
+    // Called when camera is disconnected.
+    void onCameraDisconnected();
+
+    // Invoked when camera stops receiving frames.
+    void onCameraFreezed(String errorDescription);
+
+    // Callback invoked when camera is opening.
+    void onCameraOpening(String cameraName);
+
+    // Callback invoked when first camera frame is available after camera is started.
+    void onFirstFrameAvailable();
+
+    // Callback invoked when camera is closed.
+    void onCameraClosed();
+  }
+
+  /**
+   * Camera switch handler - one of these functions are invoked with the result of switchCamera().
+   * The callback may be called on an arbitrary thread.
+   */
+  public interface CameraSwitchHandler {
+    // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
+    void onCameraSwitchDone(boolean isFrontCamera);
+
+    // Invoked on failure, e.g. camera is stopped or only one camera available.
+    void onCameraSwitchError(String errorDescription);
+  }
+
+  /**
+   * Switch camera to the next valid camera id. This can only be called while the camera is running.
+   * This function can be called from any thread.
+   */
+  void switchCamera(CameraSwitchHandler switchEventsHandler);
+
+  /**
+   * MediaRecorder add/remove handler - one of these functions are invoked with the result of
+   * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
+   * The callback may be called on an arbitrary thread.
+   */
+  @Deprecated
+  public interface MediaRecorderHandler {
+    // Invoked on success.
+    void onMediaRecorderSuccess();
+
+    // Invoked on failure, e.g. camera is stopped or any exception happens.
+    void onMediaRecorderError(String errorDescription);
+  }
+
+  /**
+   * Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
+   * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
+   * This function can be called from any thread.
+   */
+  @Deprecated
+  default void addMediaRecorderToCamera(
+      MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
+    throw new UnsupportedOperationException("Deprecated and not implemented.");
+  }
+
+  /**
+   * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
+   * This function can be called from any thread.
+   */
+  @Deprecated
+  default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
+    throw new UnsupportedOperationException("Deprecated and not implemented.");
+  }
+
+  /**
+   * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
+   * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
+   * thread.
+   */
+  public static class CameraStatistics {
+    private final static String TAG = "CameraStatistics";
+    private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+    private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
+
+    private final SurfaceTextureHelper surfaceTextureHelper;
+    private final CameraEventsHandler eventsHandler;
+    private int frameCount;
+    private int freezePeriodCount;
+    // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+    private final Runnable cameraObserver = new Runnable() {
+      @Override
+      public void run() {
+        final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
+        Logging.d(TAG, "Camera fps: " + cameraFps + ".");
+        if (frameCount == 0) {
+          ++freezePeriodCount;
+          if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
+              && eventsHandler != null) {
+            Logging.e(TAG, "Camera freezed.");
+            if (surfaceTextureHelper.isTextureInUse()) {
+              // This can only happen if we are capturing to textures.
+              eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+            } else {
+              eventsHandler.onCameraFreezed("Camera failure.");
+            }
+            return;
+          }
+        } else {
+          freezePeriodCount = 0;
+        }
+        frameCount = 0;
+        surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+      }
+    };
+
+    public CameraStatistics(
+        SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
+      if (surfaceTextureHelper == null) {
+        throw new IllegalArgumentException("SurfaceTextureHelper is null");
+      }
+      this.surfaceTextureHelper = surfaceTextureHelper;
+      this.eventsHandler = eventsHandler;
+      this.frameCount = 0;
+      this.freezePeriodCount = 0;
+      surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+    }
+
+    private void checkThread() {
+      if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
+        throw new IllegalStateException("Wrong thread");
+      }
+    }
+
+    public void addFrame() {
+      checkThread();
+      ++frameCount;
+    }
+
+    public void release() {
+      surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
+    }
+  }
+}

+ 27 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CapturerObserver.java

@@ -0,0 +1,27 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
+ * {@link VideoSource#getCapturerObserver}.
+ *
+ * All callbacks must be executed on a single thread.
+ */
+public interface CapturerObserver {
+  /** Notify if the capturer have been started successfully or not. */
+  void onCapturerStarted(boolean success);
+  /** Notify that the capturer has been stopped. */
+  void onCapturerStopped();
+
+  /** Delivers a captured frame. */
+  void onFrameCaptured(VideoFrame frame);
+}

+ 145 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/CryptoOptions.java

@@ -0,0 +1,145 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * CryptoOptions defines advanced cryptographic settings for native WebRTC.
+ * These settings must be passed into RTCConfiguration. WebRTC is secur by
+ * default and you should not need to set any of these options unless you are
+ * specifically looking for an additional crypto feature such as AES_GCM
+ * support. This class is the Java binding of native api/crypto/cryptooptions.h
+ */
+public final class CryptoOptions {
+  /**
+   * SRTP Related Peer Connection Options.
+   */
+  public final class Srtp {
+    /**
+     * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used
+     * if both sides enable it
+     */
+    private final boolean enableGcmCryptoSuites;
+    /**
+     * If set to true, the (potentially insecure) crypto cipher
+     * SRTP_AES128_CM_SHA1_32 will be included in the list of supported ciphers
+     * during negotiation. It will only be used if both peers support it and no
+     * other ciphers get preferred.
+     */
+    private final boolean enableAes128Sha1_32CryptoCipher;
+    /**
+     * If set to true, encrypted RTP header extensions as defined in RFC 6904
+     * will be negotiated. They will only be used if both peers support them.
+     */
+    private final boolean enableEncryptedRtpHeaderExtensions;
+
+    private Srtp(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+        boolean enableEncryptedRtpHeaderExtensions) {
+      this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+      this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+      this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+    }
+
+    @CalledByNative("Srtp")
+    public boolean getEnableGcmCryptoSuites() {
+      return enableGcmCryptoSuites;
+    }
+
+    @CalledByNative("Srtp")
+    public boolean getEnableAes128Sha1_32CryptoCipher() {
+      return enableAes128Sha1_32CryptoCipher;
+    }
+
+    @CalledByNative("Srtp")
+    public boolean getEnableEncryptedRtpHeaderExtensions() {
+      return enableEncryptedRtpHeaderExtensions;
+    }
+  }
+
+  /**
+   * Options to be used when the FrameEncryptor / FrameDecryptor APIs are used.
+   */
+  public final class SFrame {
+    /**
+     * If set all RtpSenders must have an FrameEncryptor attached to them before
+     * they are allowed to send packets. All RtpReceivers must have a
+     * FrameDecryptor attached to them before they are able to receive packets.
+     */
+    private final boolean requireFrameEncryption;
+
+    private SFrame(boolean requireFrameEncryption) {
+      this.requireFrameEncryption = requireFrameEncryption;
+    }
+
+    @CalledByNative("SFrame")
+    public boolean getRequireFrameEncryption() {
+      return requireFrameEncryption;
+    }
+  }
+
+  private final Srtp srtp;
+  private final SFrame sframe;
+
+  private CryptoOptions(boolean enableGcmCryptoSuites, boolean enableAes128Sha1_32CryptoCipher,
+      boolean enableEncryptedRtpHeaderExtensions, boolean requireFrameEncryption) {
+    this.srtp = new Srtp(
+        enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher, enableEncryptedRtpHeaderExtensions);
+    this.sframe = new SFrame(requireFrameEncryption);
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  @CalledByNative
+  public Srtp getSrtp() {
+    return srtp;
+  }
+
+  @CalledByNative
+  public SFrame getSFrame() {
+    return sframe;
+  }
+
+  public static class Builder {
+    private boolean enableGcmCryptoSuites;
+    private boolean enableAes128Sha1_32CryptoCipher;
+    private boolean enableEncryptedRtpHeaderExtensions;
+    private boolean requireFrameEncryption;
+
+    private Builder() {}
+
+    public Builder setEnableGcmCryptoSuites(boolean enableGcmCryptoSuites) {
+      this.enableGcmCryptoSuites = enableGcmCryptoSuites;
+      return this;
+    }
+
+    public Builder setEnableAes128Sha1_32CryptoCipher(boolean enableAes128Sha1_32CryptoCipher) {
+      this.enableAes128Sha1_32CryptoCipher = enableAes128Sha1_32CryptoCipher;
+      return this;
+    }
+
+    public Builder setEnableEncryptedRtpHeaderExtensions(
+        boolean enableEncryptedRtpHeaderExtensions) {
+      this.enableEncryptedRtpHeaderExtensions = enableEncryptedRtpHeaderExtensions;
+      return this;
+    }
+
+    public Builder setRequireFrameEncryption(boolean requireFrameEncryption) {
+      this.requireFrameEncryption = requireFrameEncryption;
+      return this;
+    }
+
+    public CryptoOptions createCryptoOptions() {
+      return new CryptoOptions(enableGcmCryptoSuites, enableAes128Sha1_32CryptoCipher,
+          enableEncryptedRtpHeaderExtensions, requireFrameEncryption);
+    }
+  }
+}

+ 195 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DataChannel.java

@@ -0,0 +1,195 @@
+/*
+ *  Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+  /** Java wrapper for WebIDL RTCDataChannel. */
+  public static class Init {
+    public boolean ordered = true;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int maxRetransmitTimeMs = -1;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int maxRetransmits = -1;
+    public String protocol = "";
+    public boolean negotiated;
+    // Optional unsigned short in WebIDL, -1 means unspecified.
+    public int id = -1;
+
+    @CalledByNative("Init")
+    boolean getOrdered() {
+      return ordered;
+    }
+
+    @CalledByNative("Init")
+    int getMaxRetransmitTimeMs() {
+      return maxRetransmitTimeMs;
+    }
+
+    @CalledByNative("Init")
+    int getMaxRetransmits() {
+      return maxRetransmits;
+    }
+
+    @CalledByNative("Init")
+    String getProtocol() {
+      return protocol;
+    }
+
+    @CalledByNative("Init")
+    boolean getNegotiated() {
+      return negotiated;
+    }
+
+    @CalledByNative("Init")
+    int getId() {
+      return id;
+    }
+  }
+
+  /** Java version of C++ DataBuffer.  The atom of data in a DataChannel. */
+  public static class Buffer {
+    /** The underlying data. */
+    public final ByteBuffer data;
+
+    /**
+     * Indicates whether |data| contains UTF-8 text or "binary data"
+     * (i.e. anything else).
+     */
+    public final boolean binary;
+
+    @CalledByNative("Buffer")
+    public Buffer(ByteBuffer data, boolean binary) {
+      this.data = data;
+      this.binary = binary;
+    }
+  }
+
+  /** Java version of C++ DataChannelObserver. */
+  public interface Observer {
+    /** The data channel's bufferedAmount has changed. */
+    @CalledByNative("Observer") public void onBufferedAmountChange(long previousAmount);
+    /** The data channel state has changed. */
+    @CalledByNative("Observer") public void onStateChange();
+    /**
+     * A data buffer was successfully received.  NOTE: |buffer.data| will be
+     * freed once this function returns so callers who want to use the data
+     * asynchronously must make sure to copy it first.
+     */
+    @CalledByNative("Observer") public void onMessage(Buffer buffer);
+  }
+
+  /** Keep in sync with DataChannelInterface::DataState. */
+  public enum State {
+    CONNECTING,
+    OPEN,
+    CLOSING,
+    CLOSED;
+
+    @CalledByNative("State")
+    static State fromNativeIndex(int nativeIndex) {
+      return values()[nativeIndex];
+    }
+  }
+
+  private long nativeDataChannel;
+  private long nativeObserver;
+
+  @CalledByNative
+  public DataChannel(long nativeDataChannel) {
+    this.nativeDataChannel = nativeDataChannel;
+  }
+
+  /** Register |observer|, replacing any previously-registered observer. */
+  public void registerObserver(Observer observer) {
+    checkDataChannelExists();
+    if (nativeObserver != 0) {
+      nativeUnregisterObserver(nativeObserver);
+    }
+    nativeObserver = nativeRegisterObserver(observer);
+  }
+
+  /** Unregister the (only) observer. */
+  public void unregisterObserver() {
+    checkDataChannelExists();
+    nativeUnregisterObserver(nativeObserver);
+  }
+
+  public String label() {
+    checkDataChannelExists();
+    return nativeLabel();
+  }
+
+  public int id() {
+    checkDataChannelExists();
+    return nativeId();
+  }
+
+  public State state() {
+    checkDataChannelExists();
+    return nativeState();
+  }
+
+  /**
+   * Return the number of bytes of application data (UTF-8 text and binary data)
+   * that have been queued using SendBuffer but have not yet been transmitted
+   * to the network.
+   */
+  public long bufferedAmount() {
+    checkDataChannelExists();
+    return nativeBufferedAmount();
+  }
+
+  /** Close the channel. */
+  public void close() {
+    checkDataChannelExists();
+    nativeClose();
+  }
+
+  /** Send |data| to the remote peer; return success. */
+  public boolean send(Buffer buffer) {
+    checkDataChannelExists();
+    // TODO(fischman): this could be cleverer about avoiding copies if the
+    // ByteBuffer is direct and/or is backed by an array.
+    byte[] data = new byte[buffer.data.remaining()];
+    buffer.data.get(data);
+    return nativeSend(data, buffer.binary);
+  }
+
+  /** Dispose of native resources attached to this channel. */
+  public void dispose() {
+    checkDataChannelExists();
+    JniCommon.nativeReleaseRef(nativeDataChannel);
+    nativeDataChannel = 0;
+  }
+
+  @CalledByNative
+  long getNativeDataChannel() {
+    return nativeDataChannel;
+  }
+
+  private void checkDataChannelExists() {
+    if (nativeDataChannel == 0) {
+      throw new IllegalStateException("DataChannel has been disposed.");
+    }
+  }
+
+  private native long nativeRegisterObserver(Observer observer);
+  private native void nativeUnregisterObserver(long observer);
+  private native String nativeLabel();
+  private native int nativeId();
+  private native State nativeState();
+  private native long nativeBufferedAmount();
+  private native void nativeClose();
+  private native boolean nativeSend(byte[] data, boolean binary);
+};

+ 68 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java

@@ -0,0 +1,68 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW decoders.
+ */
+public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
+  private final VideoDecoderFactory hardwareVideoDecoderFactory;
+  private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory();
+  private final   VideoDecoderFactory platformSoftwareVideoDecoderFactory;
+
+  /**
+   * Create decoder factory using default hardware decoder factory.
+   */
+  public DefaultVideoDecoderFactory(  EglBase.Context eglContext) {
+    this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext);
+    this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext);
+  }
+
+  /**
+   * Create decoder factory using explicit hardware decoder factory.
+   */
+  DefaultVideoDecoderFactory(VideoDecoderFactory hardwareVideoDecoderFactory) {
+    this.hardwareVideoDecoderFactory = hardwareVideoDecoderFactory;
+    this.platformSoftwareVideoDecoderFactory = null;
+  }
+
+  @Override
+  public   VideoDecoder createDecoder(VideoCodecInfo codecType) {
+    VideoDecoder softwareDecoder = softwareVideoDecoderFactory.createDecoder(codecType);
+    final VideoDecoder hardwareDecoder = hardwareVideoDecoderFactory.createDecoder(codecType);
+    if (softwareDecoder == null && platformSoftwareVideoDecoderFactory != null) {
+      softwareDecoder = platformSoftwareVideoDecoderFactory.createDecoder(codecType);
+    }
+    if (hardwareDecoder != null && softwareDecoder != null) {
+      // Both hardware and software supported, wrap it in a software fallback
+      return new VideoDecoderFallback(
+          /* fallback= */ softwareDecoder, /* primary= */ hardwareDecoder);
+    }
+    return hardwareDecoder != null ? hardwareDecoder : softwareDecoder;
+  }
+
+  @Override
+  public VideoCodecInfo[] getSupportedCodecs() {
+    LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<VideoCodecInfo>();
+
+    supportedCodecInfos.addAll(Arrays.asList(softwareVideoDecoderFactory.getSupportedCodecs()));
+    supportedCodecInfos.addAll(Arrays.asList(hardwareVideoDecoderFactory.getSupportedCodecs()));
+    if (platformSoftwareVideoDecoderFactory != null) {
+      supportedCodecInfos.addAll(
+          Arrays.asList(platformSoftwareVideoDecoderFactory.getSupportedCodecs()));
+    }
+
+    return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+  }
+}

+ 66 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java

@@ -0,0 +1,66 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+
+import android.util.Log;
+
+import java.util.Arrays;
+import java.util.LinkedHashSet;
+
+/**
+ * Helper class that combines HW and SW encoders.
+ */
+public class DefaultVideoEncoderFactory implements VideoEncoderFactory {
+    private final VideoEncoderFactory hardwareVideoEncoderFactory;
+    private final VideoEncoderFactory softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
+
+    /**
+     * Create encoder factory using default hardware encoder factory.
+     */
+    public DefaultVideoEncoderFactory(
+            EglBase.Context eglContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
+        this.hardwareVideoEncoderFactory = new HardwareVideoEncoderFactory(eglContext, enableIntelVp8Encoder, enableH264HighProfile);
+    }
+
+    /**
+     * Create encoder factory using explicit hardware encoder factory.
+     */
+    DefaultVideoEncoderFactory(VideoEncoderFactory hardwareVideoEncoderFactory) {
+        this.hardwareVideoEncoderFactory = hardwareVideoEncoderFactory;
+    }
+
+
+    @Override
+    public VideoEncoder createEncoder(VideoCodecInfo info) {
+        final VideoEncoder softwareEncoder = softwareVideoEncoderFactory.createEncoder(info);
+        final VideoEncoder hardwareEncoder = hardwareVideoEncoderFactory.createEncoder(info);
+
+
+        if (hardwareEncoder != null && softwareEncoder != null) {
+
+            // Both hardware and software supported, wrap it in a software fallback
+            return new VideoEncoderFallback(
+                    /* fallback= */softwareEncoder, /* primary= */ hardwareEncoder);
+        }
+        return hardwareEncoder != null ? hardwareEncoder : softwareEncoder;
+    }
+
+    @Override
+    public VideoCodecInfo[] getSupportedCodecs() {
+        LinkedHashSet<VideoCodecInfo> supportedCodecInfos = new LinkedHashSet<>();
+
+        supportedCodecInfos.addAll(Arrays.asList(softwareVideoEncoderFactory.getSupportedCodecs()));
+        supportedCodecInfos.addAll(Arrays.asList(hardwareVideoEncoderFactory.getSupportedCodecs()));
+
+        return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
+    }
+}

+ 96 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/DtmfSender.java

@@ -0,0 +1,96 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ DtmfSenderInterface. */
+public class DtmfSender {
+  private long nativeDtmfSender;
+
+  public DtmfSender(long nativeDtmfSender) {
+    this.nativeDtmfSender = nativeDtmfSender;
+  }
+
+  /**
+   * @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
+   */
+  public boolean canInsertDtmf() {
+    checkDtmfSenderExists();
+    return nativeCanInsertDtmf(nativeDtmfSender);
+  }
+
+  /**
+   * Queues a task that sends the provided DTMF tones.
+   * <p>
+   * If insertDtmf is called on the same object while an existing task for this
+   * object to generate DTMF is still running, the previous task is canceled.
+   *
+   * @param tones        This parameter is treated as a series of characters. The characters 0
+   *                     through 9, A through D, #, and * generate the associated DTMF tones. The
+   *                     characters a to d are equivalent to A to D. The character ',' indicates a
+   *                     delay of 2 seconds before processing the next character in the tones
+   *                     parameter. Unrecognized characters are ignored.
+   * @param duration     Indicates the duration in ms to use for each character passed in the tones
+   *                     parameter. The duration cannot be more than 6000 or less than 70.
+   * @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
+   *                     as short as possible.
+   * @return             true on success and false on failure.
+   */
+  public boolean insertDtmf(String tones, int duration, int interToneGap) {
+    checkDtmfSenderExists();
+    return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
+  }
+
+  /**
+   * @return The tones remaining to be played out
+   */
+  public String tones() {
+    checkDtmfSenderExists();
+    return nativeTones(nativeDtmfSender);
+  }
+
+  /**
+   * @return The current tone duration value in ms. This value will be the value last set via the
+   *         insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
+   */
+  public int duration() {
+    checkDtmfSenderExists();
+    return nativeDuration(nativeDtmfSender);
+  }
+
+  /**
+   * @return The current value of the between-tone gap in ms. This value will be the value last set
+   *         via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
+   *         called.
+   */
+  public int interToneGap() {
+    checkDtmfSenderExists();
+    return nativeInterToneGap(nativeDtmfSender);
+  }
+
+  public void dispose() {
+    checkDtmfSenderExists();
+    JniCommon.nativeReleaseRef(nativeDtmfSender);
+    nativeDtmfSender = 0;
+  }
+
+  private void checkDtmfSenderExists() {
+    if (nativeDtmfSender == 0) {
+      throw new IllegalStateException("DtmfSender has been disposed.");
+    }
+  }
+
+  private static native boolean nativeCanInsertDtmf(long dtmfSender);
+  private static native boolean nativeInsertDtmf(
+      long dtmfSender, String tones, int duration, int interToneGap);
+  private static native String nativeTones(long dtmfSender);
+  private static native int nativeDuration(long dtmfSender);
+  private static native int nativeInterToneGap(long dtmfSender);
+};

+ 202 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase.java

@@ -0,0 +1,202 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+
+import android.view.Surface;
+import javax.microedition.khronos.egl.EGL10;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public interface EglBase {
+  // EGL wrapper for an actual EGLContext.
+  public interface Context {
+    public final static long NO_CONTEXT = 0;
+
+    /**
+     * Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
+     * unsupported.
+     *
+     * @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
+     */
+    long getNativeEglContext();
+  }
+
+  // According to the documentation, EGL can be used from multiple threads at the same time if each
+  // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
+  // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
+  // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+  public static final Object lock = new Object();
+
+  // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+  // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+  // This is similar to how GlSurfaceView does:
+  // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+  public static final int EGL_OPENGL_ES2_BIT = 4;
+  // Android-specific extension.
+  public static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+  // clang-format off
+  public static final int[] CONFIG_PLAIN = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_RGBA = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_PIXEL_BUFFER = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+    EGL10.EGL_NONE
+  };
+  public static final int[] CONFIG_RECORDABLE = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL_RECORDABLE_ANDROID, 1,
+    EGL10.EGL_NONE
+  };
+  // clang-format on
+
+  /**
+   * Create a new context with the specified config attributes, sharing data with |sharedContext|.
+   * If |sharedContext| is null, a root context is created. This function will try to create an EGL
+   * 1.4 context if possible, and an EGL 1.0 context otherwise.
+   */
+  public static EglBase create(  Context sharedContext, int[] configAttributes) {
+    if (sharedContext == null) {
+      return EglBase14Impl.isEGL14Supported() ? createEgl14(configAttributes)
+                                              : createEgl10(configAttributes);
+    } else if (sharedContext instanceof EglBase14.Context) {
+      return createEgl14((EglBase14.Context) sharedContext, configAttributes);
+    } else if (sharedContext instanceof EglBase10.Context) {
+      return createEgl10((EglBase10.Context) sharedContext, configAttributes);
+    }
+    throw new IllegalArgumentException("Unrecognized Context");
+  }
+
+  /**
+   * Helper function for creating a plain root context. This function will try to create an EGL 1.4
+   * context if possible, and an EGL 1.0 context otherwise.
+   */
+  public static EglBase create() {
+    return create(null /* shaderContext */, CONFIG_PLAIN);
+  }
+
+  /**
+   * Helper function for creating a plain context, sharing data with |sharedContext|. This function
+   * will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
+   */
+  public static EglBase create(Context sharedContext) {
+    return create(sharedContext, CONFIG_PLAIN);
+  }
+
+  /** Explicitly create a root EGl 1.0 context with the specified config attributes. */
+  public static EglBase10 createEgl10(int[] configAttributes) {
+    return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.0 context with the specified config attributes and shared
+   * context.
+   */
+  public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
+    return new EglBase10Impl(
+        sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.0 context with the specified config attributes
+   * and shared context.
+   */
+  public static EglBase10 createEgl10(
+      javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
+    return new EglBase10Impl(sharedContext, configAttributes);
+  }
+
+  /** Explicitly create a root EGl 1.4 context with the specified config attributes. */
+  public static EglBase14 createEgl14(int[] configAttributes) {
+    return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.4 context with the specified config attributes and shared
+   * context.
+   */
+  public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
+    return new EglBase14Impl(
+        sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
+  }
+
+  /**
+   * Explicitly create a root EGl 1.4 context with the specified config attributes
+   * and shared context.
+   */
+  public static EglBase14 createEgl14(
+      android.opengl.EGLContext sharedContext, int[] configAttributes) {
+    return new EglBase14Impl(sharedContext, configAttributes);
+  }
+
+  void createSurface(Surface surface);
+
+  // Create EGLSurface from the Android SurfaceTexture.
+  void createSurface(SurfaceTexture surfaceTexture);
+
+  // Create dummy 1x1 pixel buffer surface so the context can be made current.
+  void createDummyPbufferSurface();
+
+  void createPbufferSurface(int width, int height);
+
+  Context getEglBaseContext();
+
+  boolean hasSurface();
+
+  int surfaceWidth();
+
+  int surfaceHeight();
+
+  void releaseSurface();
+
+  void release();
+
+  void makeCurrent();
+
+  // Detach the current EGL context, so that it can be made current on another thread.
+  void detachCurrent();
+
+  void swapBuffers();
+
+  void swapBuffers(long presentationTimeStampNs);
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase10.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/** EGL 1.0 implementation of EglBase. */
+public interface EglBase10 extends EglBase {
+  interface Context extends EglBase.Context {
+    EGLContext getRawContext();
+  }
+}

+ 20 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglBase14.java

@@ -0,0 +1,20 @@
+/*
+ *  Copyright 2019 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.EGLContext;
+
+/** EGL 1.4 implementation of EglBase. */
+public interface EglBase14 extends EglBase {
+  interface Context extends EglBase.Context {
+    EGLContext getRawContext();
+  }
+}

+ 753 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EglRenderer.java

@@ -0,0 +1,753 @@
+/*
+ *  Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import android.view.Surface;
+
+import java.nio.ByteBuffer;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Implements VideoSink by displaying the video stream on an EGL Surface. This class is intended to
+ * be used as a helper class for rendering on SurfaceViews and TextureViews.
+ */
+public class EglRenderer implements VideoSink {
+  private static final String TAG = "EglRenderer";
+  private static final long LOG_INTERVAL_SEC = 4;
+
+  public interface FrameListener { void onFrame(Bitmap frame); }
+
+  private static class FrameListenerAndParams {
+    public final FrameListener listener;
+    public final float scale;
+    public final RendererCommon.GlDrawer drawer;
+    public final boolean applyFpsReduction;
+
+    public FrameListenerAndParams(FrameListener listener, float scale,
+        RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
+      this.listener = listener;
+      this.scale = scale;
+      this.drawer = drawer;
+      this.applyFpsReduction = applyFpsReduction;
+    }
+  }
+
+  private class EglSurfaceCreation implements Runnable {
+    private Object surface;
+
+    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+    @SuppressWarnings("NoSynchronizedMethodCheck")
+    public synchronized void setSurface(Object surface) {
+      this.surface = surface;
+    }
+
+    @Override
+    // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
+    @SuppressWarnings("NoSynchronizedMethodCheck")
+    public synchronized void run() {
+      if (surface != null && eglBase != null && !eglBase.hasSurface()) {
+        if (surface instanceof Surface) {
+          eglBase.createSurface((Surface) surface);
+        } else if (surface instanceof SurfaceTexture) {
+          eglBase.createSurface((SurfaceTexture) surface);
+        } else {
+          throw new IllegalStateException("Invalid surface: " + surface);
+        }
+        eglBase.makeCurrent();
+        // Necessary for YUV frames with odd width.
+        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+      }
+    }
+  }
+
+  /**
+   * Handler that triggers a callback when an uncaught exception happens when handling a message.
+   */
+  private static class HandlerWithExceptionCallback extends Handler {
+    private final Runnable exceptionCallback;
+
+    public HandlerWithExceptionCallback(Looper looper, Runnable exceptionCallback) {
+      super(looper);
+      this.exceptionCallback = exceptionCallback;
+    }
+
+    @Override
+    public void dispatchMessage(Message msg) {
+      try {
+        super.dispatchMessage(msg);
+      } catch (Exception e) {
+        Logging.e(TAG, "Exception on EglRenderer thread", e);
+        exceptionCallback.run();
+        throw e;
+      }
+    }
+  }
+
+  protected final String name;
+
+  // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
+  // on |handlerLock|.
+  private final Object handlerLock = new Object();
+    private Handler renderThreadHandler;
+
+  private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
+
+  // Variables for fps reduction.
+  private final Object fpsReductionLock = new Object();
+  // Time for when next frame should be rendered.
+  private long nextFrameTimeNs;
+  // Minimum duration between frames when fps reduction is active, or -1 if video is completely
+  // paused.
+  private long minRenderPeriodNs;
+
+  // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
+  // from the render thread.
+    private EglBase eglBase;
+  private final VideoFrameDrawer frameDrawer;
+    private RendererCommon.GlDrawer drawer;
+  private boolean usePresentationTimeStamp;
+  private final Matrix drawMatrix = new Matrix();
+
+  // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
+  private final Object frameLock = new Object();
+    private VideoFrame pendingFrame;
+
+  // These variables are synchronized on |layoutLock|.
+  private final Object layoutLock = new Object();
+  private float layoutAspectRatio;
+  // If true, mirrors the video stream horizontally.
+  private boolean mirrorHorizontally;
+  // If true, mirrors the video stream vertically.
+  private boolean mirrorVertically;
+
+  // These variables are synchronized on |statisticsLock|.
+  private final Object statisticsLock = new Object();
+  // Total number of video frames received in renderFrame() call.
+  private int framesReceived;
+  // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+  // yet.
+  private int framesDropped;
+  // Number of rendered video frames.
+  private int framesRendered;
+  // Start time for counting these statistics, or 0 if we haven't started measuring yet.
+  private long statisticsStartTimeNs;
+  // Time in ns spent in renderFrameOnRenderThread() function.
+  private long renderTimeNs;
+  // Time in ns spent by the render thread in the swapBuffers() function.
+  private long renderSwapBufferTimeNs;
+
+  // Used for bitmap capturing.
+  private final GlTextureFrameBuffer bitmapTextureFramebuffer =
+      new GlTextureFrameBuffer(GLES20.GL_RGBA);
+
+  private final Runnable logStatisticsRunnable = new Runnable() {
+    @Override
+    public void run() {
+      logStatistics();
+      synchronized (handlerLock) {
+        if (renderThreadHandler != null) {
+          renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+          renderThreadHandler.postDelayed(
+              logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+        }
+      }
+    }
+  };
+
+  private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
+
+  /**
+   * Standard constructor. The name will be used for the render thread name and included when
+   * logging. In order to render something, you must first call init() and createEglSurface.
+   */
+  public EglRenderer(String name) {
+    this(name, new VideoFrameDrawer());
+  }
+
+  public EglRenderer(String name, VideoFrameDrawer videoFrameDrawer) {
+    this.name = name;
+    this.frameDrawer = videoFrameDrawer;
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+   * init()/release() cycle. If usePresentationTimeStamp is true, eglPresentationTimeANDROID will be
+   * set with the frame timestamps, which specifies desired presentation time and might be useful
+   * for e.g. syncing audio and video.
+   */
+  public void init(  final EglBase.Context sharedContext, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer, boolean usePresentationTimeStamp) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        throw new IllegalStateException(name + "Already initialized");
+      }
+      logD("Initializing EglRenderer");
+      this.drawer = drawer;
+      this.usePresentationTimeStamp = usePresentationTimeStamp;
+
+      final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
+      renderThread.start();
+      renderThreadHandler =
+          new HandlerWithExceptionCallback(renderThread.getLooper(), new Runnable() {
+            @Override
+            public void run() {
+              synchronized (handlerLock) {
+                renderThreadHandler = null;
+              }
+            }
+          });
+      // Create EGL context on the newly created render thread. It should be possibly to create the
+      // context on this thread and make it current on the render thread, but this causes failure on
+      // some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
+      ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
+        // If sharedContext is null, then texture frames are disabled. This is typically for old
+        // devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
+        // caused trouble on some weird devices.
+        if (sharedContext == null) {
+          logD("EglBase10.create context");
+          eglBase = EglBase.createEgl10(configAttributes);
+        } else {
+          logD("EglBase.create shared context");
+          eglBase = EglBase.create(sharedContext, configAttributes);
+        }
+      });
+      renderThreadHandler.post(eglSurfaceCreationRunnable);
+      final long currentTimeNs = System.nanoTime();
+      resetStatistics(currentTimeNs);
+      renderThreadHandler.postDelayed(
+          logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
+    }
+  }
+
+  /**
+   * Same as above with usePresentationTimeStamp set to false.
+   *
+   * @see #init(EglBase.Context, int[], RendererCommon.GlDrawer, boolean)
+   */
+  public void init(  final EglBase.Context sharedContext, final int[] configAttributes,
+      RendererCommon.GlDrawer drawer) {
+    init(sharedContext, configAttributes, drawer, /* usePresentationTimeStamp= */ false);
+  }
+
+  public void createEglSurface(Surface surface) {
+    createEglSurfaceInternal(surface);
+  }
+
+  public void createEglSurface(SurfaceTexture surfaceTexture) {
+    createEglSurfaceInternal(surfaceTexture);
+  }
+
+  private void createEglSurfaceInternal(Object surface) {
+    eglSurfaceCreationRunnable.setSurface(surface);
+    postToRenderThread(eglSurfaceCreationRunnable);
+  }
+
+  /**
+   * Block until any pending frame is returned and all GL resources released, even if an interrupt
+   * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+   * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+   * don't call this function, the GL resources might leak.
+   */
+  public void release() {
+    logD("Releasing.");
+    final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        logD("Already released");
+        return;
+      }
+      renderThreadHandler.removeCallbacks(logStatisticsRunnable);
+      // Release EGL and GL resources on render thread.
+      renderThreadHandler.postAtFrontOfQueue(() -> {
+        // Detach current shader program.
+        GLES20.glUseProgram(/* program= */ 0);
+        if (drawer != null) {
+          drawer.release();
+          drawer = null;
+        }
+        frameDrawer.release();
+        bitmapTextureFramebuffer.release();
+        if (eglBase != null) {
+          logD("eglBase detach and release.");
+          eglBase.detachCurrent();
+          eglBase.release();
+          eglBase = null;
+        }
+        frameListeners.clear();
+        eglCleanupBarrier.countDown();
+      });
+      final Looper renderLooper = renderThreadHandler.getLooper();
+      // TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
+      renderThreadHandler.post(() -> {
+        logD("Quitting render thread.");
+        renderLooper.quit();
+      });
+      // Don't accept any more frames or messages to the render thread.
+      renderThreadHandler = null;
+    }
+    // Make sure the EGL/GL cleanup posted above is executed.
+    ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+    synchronized (frameLock) {
+      if (pendingFrame != null) {
+        pendingFrame.release();
+        pendingFrame = null;
+      }
+    }
+    logD("Releasing done.");
+  }
+
+  /**
+   * Reset the statistics logged in logStatistics().
+   */
+  private void resetStatistics(long currentTimeNs) {
+    synchronized (statisticsLock) {
+      statisticsStartTimeNs = currentTimeNs;
+      framesReceived = 0;
+      framesDropped = 0;
+      framesRendered = 0;
+      renderTimeNs = 0;
+      renderSwapBufferTimeNs = 0;
+    }
+  }
+
+  public void printStackTrace() {
+    synchronized (handlerLock) {
+      final Thread renderThread =
+          (renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
+      if (renderThread != null) {
+        final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
+        if (renderStackTrace.length > 0) {
+          logW("EglRenderer stack trace:");
+          for (StackTraceElement traceElem : renderStackTrace) {
+            logW(traceElem.toString());
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Set if the video stream should be mirrored horizontally or not.
+   */
+  public void setMirror(final boolean mirror) {
+    logD("setMirrorHorizontally: " + mirror);
+    synchronized (layoutLock) {
+      this.mirrorHorizontally = mirror;
+    }
+  }
+
+  /**
+   * Set if the video stream should be mirrored vertically or not.
+   */
+  public void setMirrorVertically(final boolean mirrorVertically) {
+    logD("setMirrorVertically: " + mirrorVertically);
+    synchronized (layoutLock) {
+      this.mirrorVertically = mirrorVertically;
+    }
+  }
+
+  /**
+   * Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
+   * Set this to 0 to disable cropping.
+   */
+  public void setLayoutAspectRatio(float layoutAspectRatio) {
+    logD("setLayoutAspectRatio: " + layoutAspectRatio);
+    synchronized (layoutLock) {
+      this.layoutAspectRatio = layoutAspectRatio;
+    }
+  }
+
+  /**
+   * Limit render framerate.
+   *
+   * @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
+   *            reduction.
+   */
+  public void setFpsReduction(float fps) {
+    logD("setFpsReduction: " + fps);
+    synchronized (fpsReductionLock) {
+      final long previousRenderPeriodNs = minRenderPeriodNs;
+      if (fps <= 0) {
+        minRenderPeriodNs = Long.MAX_VALUE;
+      } else {
+        minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
+      }
+      if (minRenderPeriodNs != previousRenderPeriodNs) {
+        // Fps reduction changed - reset frame time.
+        nextFrameTimeNs = System.nanoTime();
+      }
+    }
+  }
+
+  public void disableFpsReduction() {
+    setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
+  }
+
+  public void pauseVideo() {
+    setFpsReduction(0 /* fps */);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received. This version uses
+   * the drawer of the EglRenderer that was passed in init.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   */
+  public void addFrameListener(final FrameListener listener, final float scale) {
+    addFrameListener(listener, scale, null, false /* applyFpsReduction */);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
+   */
+  public void addFrameListener(
+      final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
+    addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
+  }
+
+  /**
+   * Register a callback to be invoked when a new video frame has been received.
+   *
+   * @param listener The callback to be invoked. The callback will be invoked on the render thread.
+   *                 It should be lightweight and must not call removeFrameListener.
+   * @param scale    The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
+   *                 required.
+   * @param drawer   Custom drawer to use for this frame listener or null to use the default one.
+   * @param applyFpsReduction This callback will not be called for frames that have been dropped by
+   *                          FPS reduction.
+   */
+  public void addFrameListener(final FrameListener listener, final float scale,
+        final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
+    postToRenderThread(() -> {
+      final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
+      frameListeners.add(
+          new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
+    });
+  }
+
+  /**
+   * Remove any pending callback that was added with addFrameListener. If the callback is not in
+   * the queue, nothing happens. It is ensured that callback won't be called after this method
+   * returns.
+   *
+   * @param runnable The callback to remove.
+   */
+  public void removeFrameListener(final FrameListener listener) {
+    final CountDownLatch latch = new CountDownLatch(1);
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        return;
+      }
+      if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
+        throw new RuntimeException("removeFrameListener must not be called on the render thread.");
+      }
+      postToRenderThread(() -> {
+        latch.countDown();
+        final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
+        while (iter.hasNext()) {
+          if (iter.next().listener == listener) {
+            iter.remove();
+          }
+        }
+      });
+    }
+    ThreadUtils.awaitUninterruptibly(latch);
+  }
+
+  // VideoSink interface.
+  @Override
+  public void onFrame(VideoFrame frame) {
+    synchronized (statisticsLock) {
+      ++framesReceived;
+    }
+    final boolean dropOldFrame;
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        logD("Dropping frame - Not initialized or already released.");
+        return;
+      }
+      synchronized (frameLock) {
+        dropOldFrame = (pendingFrame != null);
+        if (dropOldFrame) {
+          pendingFrame.release();
+        }
+        pendingFrame = frame;
+        pendingFrame.retain();
+        renderThreadHandler.post(this ::renderFrameOnRenderThread);
+      }
+    }
+    if (dropOldFrame) {
+      synchronized (statisticsLock) {
+        ++framesDropped;
+      }
+    }
+  }
+
+  /**
+   * Release EGL surface. This function will block until the EGL surface is released.
+   */
+  public void releaseEglSurface(final Runnable completionCallback) {
+    // Ensure that the render thread is no longer touching the Surface before returning from this
+    // function.
+    eglSurfaceCreationRunnable.setSurface(null /* surface */);
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
+        renderThreadHandler.postAtFrontOfQueue(() -> {
+          if (eglBase != null) {
+            eglBase.detachCurrent();
+            eglBase.releaseSurface();
+          }
+          completionCallback.run();
+        });
+        return;
+      }
+    }
+    completionCallback.run();
+  }
+
+  /**
+   * Private helper function to post tasks safely.
+   */
+  private void postToRenderThread(Runnable runnable) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler != null) {
+        renderThreadHandler.post(runnable);
+      }
+    }
+  }
+
+  private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
+    if (eglBase != null && eglBase.hasSurface()) {
+      logD("clearSurface");
+      GLES20.glClearColor(r, g, b, a);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      eglBase.swapBuffers();
+    }
+  }
+
+  /**
+   * Post a task to clear the surface to a transparent uniform color.
+   */
+  public void clearImage() {
+    clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+  }
+
+  /**
+   * Post a task to clear the surface to a specific color.
+   */
+  public void clearImage(final float r, final float g, final float b, final float a) {
+    synchronized (handlerLock) {
+      if (renderThreadHandler == null) {
+        return;
+      }
+      renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
+    }
+  }
+
+  /**
+   * Renders and releases |pendingFrame|.
+   */
+  private void renderFrameOnRenderThread() {
+    // Fetch and render |pendingFrame|.
+    final VideoFrame frame;
+    synchronized (frameLock) {
+      if (pendingFrame == null) {
+        return;
+      }
+      frame = pendingFrame;
+      pendingFrame = null;
+    }
+    if (eglBase == null || !eglBase.hasSurface()) {
+      logD("Dropping frame - No surface");
+      frame.release();
+      return;
+    }
+    // Check if fps reduction is active.
+    final boolean shouldRenderFrame;
+    synchronized (fpsReductionLock) {
+      if (minRenderPeriodNs == Long.MAX_VALUE) {
+        // Rendering is paused.
+        shouldRenderFrame = false;
+      } else if (minRenderPeriodNs <= 0) {
+        // FPS reduction is disabled.
+        shouldRenderFrame = true;
+      } else {
+        final long currentTimeNs = System.nanoTime();
+        if (currentTimeNs < nextFrameTimeNs) {
+          logD("Skipping frame rendering - fps reduction is active.");
+          shouldRenderFrame = false;
+        } else {
+          nextFrameTimeNs += minRenderPeriodNs;
+          // The time for the next frame should always be in the future.
+          nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
+          shouldRenderFrame = true;
+        }
+      }
+    }
+
+    final long startTimeNs = System.nanoTime();
+
+    final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
+    final float drawnAspectRatio;
+    synchronized (layoutLock) {
+      drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
+    }
+
+    final float scaleX;
+    final float scaleY;
+
+    if (frameAspectRatio > drawnAspectRatio) {
+      scaleX = drawnAspectRatio / frameAspectRatio;
+      scaleY = 1f;
+    } else {
+      scaleX = 1f;
+      scaleY = frameAspectRatio / drawnAspectRatio;
+    }
+
+    drawMatrix.reset();
+    drawMatrix.preTranslate(0.5f, 0.5f);
+    drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+    drawMatrix.preScale(scaleX, scaleY);
+    drawMatrix.preTranslate(-0.5f, -0.5f);
+
+    if (shouldRenderFrame) {
+      GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
+          eglBase.surfaceWidth(), eglBase.surfaceHeight());
+
+      final long swapBuffersStartTimeNs = System.nanoTime();
+      if (usePresentationTimeStamp) {
+        eglBase.swapBuffers(frame.getTimestampNs());
+      } else {
+        eglBase.swapBuffers();
+      }
+
+      final long currentTimeNs = System.nanoTime();
+      synchronized (statisticsLock) {
+        ++framesRendered;
+        renderTimeNs += (currentTimeNs - startTimeNs);
+        renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
+      }
+    }
+
+    notifyCallbacks(frame, shouldRenderFrame);
+    frame.release();
+  }
+
+  private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
+    if (frameListeners.isEmpty())
+      return;
+
+    drawMatrix.reset();
+    drawMatrix.preTranslate(0.5f, 0.5f);
+    drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
+    drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
+    drawMatrix.preTranslate(-0.5f, -0.5f);
+
+    Iterator<FrameListenerAndParams> it = frameListeners.iterator();
+    while (it.hasNext()) {
+      FrameListenerAndParams listenerAndParams = it.next();
+      if (!wasRendered && listenerAndParams.applyFpsReduction) {
+        continue;
+      }
+      it.remove();
+
+      final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
+      final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
+
+      if (scaledWidth == 0 || scaledHeight == 0) {
+        listenerAndParams.listener.onFrame(null);
+        continue;
+      }
+
+      bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
+
+      GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
+      GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+          GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
+
+      GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
+      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+      frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
+          0 /* viewportY */, scaledWidth, scaledHeight);
+
+      final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
+      GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
+      GLES20.glReadPixels(
+          0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
+
+      GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+      GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
+
+      final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
+      bitmap.copyPixelsFromBuffer(bitmapBuffer);
+      listenerAndParams.listener.onFrame(bitmap);
+    }
+  }
+
+  private String averageTimeAsString(long sumTimeNs, int count) {
+    return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " us";
+  }
+
+  private void logStatistics() {
+    final DecimalFormat fpsFormat = new DecimalFormat("#.0");
+    final long currentTimeNs = System.nanoTime();
+    synchronized (statisticsLock) {
+      final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
+      if (elapsedTimeNs <= 0) {
+        return;
+      }
+      final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
+      logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
+          + " Frames received: " + framesReceived + "."
+          + " Dropped: " + framesDropped + "."
+          + " Rendered: " + framesRendered + "."
+          + " Render fps: " + fpsFormat.format(renderFps) + "."
+          + " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
+          + " Average swapBuffer time: "
+          + averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
+      resetStatistics(currentTimeNs);
+    }
+  }
+
+  private void logD(String string) {
+    Logging.d(TAG, name + string);
+  }
+
+  private void logW(String string) {
+    Logging.w(TAG, name + string);
+  }
+}

+ 139 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/EncodedImage.java

@@ -0,0 +1,139 @@
+/*
+ *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * An encoded frame from a video stream. Used as an input for decoders and as an output for
+ * encoders.
+ */
+public class EncodedImage {
+  // Must be kept in sync with common_types.h FrameType.
+  public enum FrameType {
+    EmptyFrame(0),
+    VideoFrameKey(3),
+    VideoFrameDelta(4);
+
+    private final int nativeIndex;
+
+    private FrameType(int nativeIndex) {
+      this.nativeIndex = nativeIndex;
+    }
+
+    public int getNative() {
+      return nativeIndex;
+    }
+
+    @CalledByNative("FrameType")
+    static FrameType fromNativeIndex(int nativeIndex) {
+      for (FrameType type : FrameType.values()) {
+        if (type.getNative() == nativeIndex) {
+          return type;
+        }
+      }
+      throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
+    }
+  }
+
+  public final ByteBuffer buffer;
+  public final int encodedWidth;
+  public final int encodedHeight;
+  public final long captureTimeMs; // Deprecated
+  public final long captureTimeNs;
+  public final FrameType frameType;
+  public final int rotation;
+  public final boolean completeFrame;
+  public final Integer qp;
+
+  @CalledByNative
+  private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
+      FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
+    this.buffer = buffer;
+    this.encodedWidth = encodedWidth;
+    this.encodedHeight = encodedHeight;
+    this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
+    this.captureTimeNs = captureTimeNs;
+    this.frameType = frameType;
+    this.rotation = rotation;
+    this.completeFrame = completeFrame;
+    this.qp = qp;
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class Builder {
+    private ByteBuffer buffer;
+    private int encodedWidth;
+    private int encodedHeight;
+    private long captureTimeNs;
+    private EncodedImage.FrameType frameType;
+    private int rotation;
+    private boolean completeFrame;
+    private Integer qp;
+
+    private Builder() {}
+
+    public Builder setBuffer(ByteBuffer buffer) {
+      this.buffer = buffer;
+      return this;
+    }
+
+    public Builder setEncodedWidth(int encodedWidth) {
+      this.encodedWidth = encodedWidth;
+      return this;
+    }
+
+    public Builder setEncodedHeight(int encodedHeight) {
+      this.encodedHeight = encodedHeight;
+      return this;
+    }
+
+    @Deprecated
+    public Builder setCaptureTimeMs(long captureTimeMs) {
+      this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
+      return this;
+    }
+
+    public Builder setCaptureTimeNs(long captureTimeNs) {
+      this.captureTimeNs = captureTimeNs;
+      return this;
+    }
+
+    public Builder setFrameType(EncodedImage.FrameType frameType) {
+      this.frameType = frameType;
+      return this;
+    }
+
+    public Builder setRotation(int rotation) {
+      this.rotation = rotation;
+      return this;
+    }
+
+    public Builder setCompleteFrame(boolean completeFrame) {
+      this.completeFrame = completeFrame;
+      return this;
+    }
+
+    public Builder setQp(Integer qp) {
+      this.qp = qp;
+      return this;
+    }
+
+    public EncodedImage createEncodedImage() {
+      return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
+          rotation, completeFrame, qp);
+    }
+  }
+}

+ 22 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FecControllerFactoryFactoryInterface.java

@@ -0,0 +1,22 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Factory for creating webrtc::FecControllerFactory instances.
+ */
+public interface FecControllerFactoryFactoryInterface {
+  /**
+   * Dynamically allocates a webrtc::FecControllerFactory instance and returns a pointer to it.
+   * The caller takes ownership of the object.
+   */
+  public long createNative();
+}

+ 201 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FileVideoCapturer.java

@@ -0,0 +1,201 @@
+/*
+ *  Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.os.SystemClock;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.Charset;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.TimeUnit;
+
+public class FileVideoCapturer implements VideoCapturer {
+  private interface VideoReader {
+    VideoFrame getNextFrame();
+    void close();
+  }
+
+  /**
+   * Read video data from file for the .y4m container.
+   */
+  @SuppressWarnings("StringSplitter")
+  private static class VideoReaderY4M implements VideoReader {
+    private static final String TAG = "VideoReaderY4M";
+    private static final String Y4M_FRAME_DELIMETER = "FRAME";
+    private static final int FRAME_DELIMETER_LENGTH = Y4M_FRAME_DELIMETER.length() + 1;
+
+    private final int frameWidth;
+    private final int frameHeight;
+    // First char after header
+    private final long videoStart;
+    private final RandomAccessFile mediaFile;
+    private final FileChannel mediaFileChannel;
+
+    public VideoReaderY4M(String file) throws IOException {
+      mediaFile = new RandomAccessFile(file, "r");
+      mediaFileChannel = mediaFile.getChannel();
+      StringBuilder builder = new StringBuilder();
+      for (;;) {
+        int c = mediaFile.read();
+        if (c == -1) {
+          // End of file reached.
+          throw new RuntimeException("Found end of file before end of header for file: " + file);
+        }
+        if (c == '\n') {
+          // End of header found.
+          break;
+        }
+        builder.append((char) c);
+      }
+      videoStart = mediaFileChannel.position();
+      String header = builder.toString();
+      String[] headerTokens = header.split("[ ]");
+      int w = 0;
+      int h = 0;
+      String colorSpace = "";
+      for (String tok : headerTokens) {
+        char c = tok.charAt(0);
+        switch (c) {
+          case 'W':
+            w = Integer.parseInt(tok.substring(1));
+            break;
+          case 'H':
+            h = Integer.parseInt(tok.substring(1));
+            break;
+          case 'C':
+            colorSpace = tok.substring(1);
+            break;
+        }
+      }
+      Logging.d(TAG, "Color space: " + colorSpace);
+      if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
+        throw new IllegalArgumentException(
+            "Does not support any other color space than I420 or I420mpeg2");
+      }
+      if ((w % 2) == 1 || (h % 2) == 1) {
+        throw new IllegalArgumentException("Does not support odd width or height");
+      }
+      frameWidth = w;
+      frameHeight = h;
+      Logging.d(TAG, "frame dim: (" + w + ", " + h + ")");
+    }
+
+    @Override
+    public VideoFrame getNextFrame() {
+      final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+      final JavaI420Buffer buffer = JavaI420Buffer.allocate(frameWidth, frameHeight);
+      final ByteBuffer dataY = buffer.getDataY();
+      final ByteBuffer dataU = buffer.getDataU();
+      final ByteBuffer dataV = buffer.getDataV();
+      final int chromaHeight = (frameHeight + 1) / 2;
+      final int sizeY = frameHeight * buffer.getStrideY();
+      final int sizeU = chromaHeight * buffer.getStrideU();
+      final int sizeV = chromaHeight * buffer.getStrideV();
+
+      try {
+        ByteBuffer frameDelim = ByteBuffer.allocate(FRAME_DELIMETER_LENGTH);
+        if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+          // We reach end of file, loop
+          mediaFileChannel.position(videoStart);
+          if (mediaFileChannel.read(frameDelim) < FRAME_DELIMETER_LENGTH) {
+            throw new RuntimeException("Error looping video");
+          }
+        }
+        String frameDelimStr = new String(frameDelim.array(), Charset.forName("US-ASCII"));
+        if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
+          throw new RuntimeException(
+              "Frames should be delimited by FRAME plus newline, found delimter was: '"
+              + frameDelimStr + "'");
+        }
+
+        mediaFileChannel.read(dataY);
+        mediaFileChannel.read(dataU);
+        mediaFileChannel.read(dataV);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+
+      return new VideoFrame(buffer, 0 /* rotation */, captureTimeNs);
+    }
+
+    @Override
+    public void close() {
+      try {
+        // Closing a file also closes the channel.
+        mediaFile.close();
+      } catch (IOException e) {
+        Logging.e(TAG, "Problem closing file", e);
+      }
+    }
+  }
+
+  private final static String TAG = "FileVideoCapturer";
+  private final VideoReader videoReader;
+  private CapturerObserver capturerObserver;
+  private final Timer timer = new Timer();
+
+  private final TimerTask tickTask = new TimerTask() {
+    @Override
+    public void run() {
+      tick();
+    }
+  };
+
+  public FileVideoCapturer(String inputFile) throws IOException {
+    try {
+      videoReader = new VideoReaderY4M(inputFile);
+    } catch (IOException e) {
+      Logging.d(TAG, "Could not open video file: " + inputFile);
+      throw e;
+    }
+  }
+
+  public void tick() {
+    VideoFrame videoFrame = videoReader.getNextFrame();
+    capturerObserver.onFrameCaptured(videoFrame);
+    videoFrame.release();
+  }
+
+  @Override
+  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
+      CapturerObserver capturerObserver) {
+    this.capturerObserver = capturerObserver;
+  }
+
+  @Override
+  public void startCapture(int width, int height, int framerate) {
+    timer.schedule(tickTask, 0, 1000 / framerate);
+  }
+
+  @Override
+  public void stopCapture() throws InterruptedException {
+    timer.cancel();
+  }
+
+  @Override
+  public void changeCaptureFormat(int width, int height, int framerate) {
+    // Empty on purpose
+  }
+
+  @Override
+  public void dispose() {
+    videoReader.close();
+  }
+
+  @Override
+  public boolean isScreencast() {
+    return false;
+  }
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameDecryptor.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameDecryptor interface allows Java API users to provide a
+ * pointer to their native implementation of the FrameDecryptorInterface.
+ * FrameDecryptors are extremely performance sensitive as they must process all
+ * incoming video and audio frames. Due to this reason they should always be
+ * backed by a native implementation
+ * @note Not ready for production use.
+ */
+public interface FrameDecryptor {
+  /**
+   * @return A FrameDecryptorInterface pointer.
+   */
+  long getNativeFrameDecryptor();
+}

+ 26 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/FrameEncryptor.java

@@ -0,0 +1,26 @@
+/*
+ *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * The FrameEncryptor interface allows Java API users to provide a pointer to
+ * their native implementation of the FrameEncryptorInterface.
+ * FrameEncyptors are extremely performance sensitive as they must process all
+ * outgoing video and audio frames. Due to this reason they should always be
+ * backed by a native implementation.
+ * @note Not ready for production use.
+ */
+public interface FrameEncryptor {
+  /**
+   * @return A FrameEncryptorInterface pointer.
+   */
+  long getNativeFrameEncryptor();
+}

+ 0 - 0
libwebrtc/src/main/java/sdk/android/api/org/webrtc/GlRectDrawer.java


Някои файлове не бяха показани, защото твърде много файлове са промени