diff --git a/.hgtags b/.hgtags index 01dc7543d74e..296a936df8de 100644 --- a/.hgtags +++ b/.hgtags @@ -274,3 +274,6 @@ b8084fc54e158a2adcdac710ea0c3a7c9559309e FIREFOX_NIGHTLY_119_END e891ad8158b3106dceb2199ed2c74c0c2f05fa88 FIREFOX_BETA_120_BASE e891ad8158b3106dceb2199ed2c74c0c2f05fa88 FIREFOX_BETA_120_BASE 0000000000000000000000000000000000000000 FIREFOX_BETA_120_BASE +0000000000000000000000000000000000000000 FIREFOX_BETA_120_BASE +ffe93e4e0835b70448d485c82e3900bc4f47f205 FIREFOX_BETA_120_BASE +fc01df84fa6d91b9dabb4b608eb947cef3230871 FIREFOX_NIGHTLY_120_END diff --git a/CLOBBER b/CLOBBER index 76c565a589d6..4b5cff82ef2a 100644 --- a/CLOBBER +++ b/CLOBBER @@ -22,4 +22,4 @@ # changes to stick? As of bug 928195, this shouldn't be necessary! Please # don't change CLOBBER for WebIDL changes any more. -Modified build files in third_party/libwebrtc - Bug 1855330 - pt4 - BUILD.gn fixes for updated libwebrtc/third_party. r?ng! +Modified build files in third_party/libwebrtc - Bug 1857862 - Vendor libwebrtc from b38d9d2b6f diff --git a/browser/config/version.txt b/browser/config/version.txt index f5d18d2f71bc..ff40f6958029 100644 --- a/browser/config/version.txt +++ b/browser/config/version.txt @@ -1 +1 @@ -120.0a1 +121.0a1 diff --git a/browser/config/version_display.txt b/browser/config/version_display.txt index f5d18d2f71bc..ff40f6958029 100644 --- a/browser/config/version_display.txt +++ b/browser/config/version_display.txt @@ -1 +1 @@ -120.0a1 +121.0a1 diff --git a/config/milestone.txt b/config/milestone.txt index df7d85faf041..d7d8d3dc3ff4 100644 --- a/config/milestone.txt +++ b/config/milestone.txt @@ -10,4 +10,4 @@ # hardcoded milestones in the tree from these two files. #-------------------------------------------------------- -120.0a1 +121.0a1 diff --git a/dom/media/systemservices/video_engine/desktop_capture_impl.cc b/dom/media/systemservices/video_engine/desktop_capture_impl.cc index 3f03789fa394..532605b813d4 100644 --- a/dom/media/systemservices/video_engine/desktop_capture_impl.cc +++ b/dom/media/systemservices/video_engine/desktop_capture_impl.cc @@ -361,8 +361,15 @@ static DesktopCaptureOptions CreateDesktopCaptureOptions() { } options.set_allow_cropping_window_capturer(true); # if defined(RTC_ENABLE_WIN_WGC) - if (mozilla::StaticPrefs::media_webrtc_capture_allow_wgc()) { - options.set_allow_wgc_capturer(true); + if (mozilla::StaticPrefs::media_webrtc_capture_screen_allow_wgc()) { + options.set_allow_wgc_screen_capturer(true); + options.set_allow_wgc_zero_hertz( + mozilla::StaticPrefs::media_webrtc_capture_wgc_allow_zero_hertz()); + } + if (mozilla::StaticPrefs::media_webrtc_capture_window_allow_wgc()) { + options.set_allow_wgc_window_capturer(true); + options.set_allow_wgc_zero_hertz( + mozilla::StaticPrefs::media_webrtc_capture_wgc_allow_zero_hertz()); } # endif #endif diff --git a/dom/media/webrtc/jsapi/RTCEncodedAudioFrame.cpp b/dom/media/webrtc/jsapi/RTCEncodedAudioFrame.cpp index 3a3395934425..43f7a1f13b22 100644 --- a/dom/media/webrtc/jsapi/RTCEncodedAudioFrame.cpp +++ b/dom/media/webrtc/jsapi/RTCEncodedAudioFrame.cpp @@ -60,7 +60,9 @@ RTCEncodedAudioFrame::RTCEncodedAudioFrame( Unused << mMetadata.mContributingSources.Value().AppendElement(csrc, fallible); } - mMetadata.mSequenceNumber.Construct(audioFrame.GetHeader().sequenceNumber); + if (const auto optionalSeqNum = audioFrame.SequenceNumber()) { + mMetadata.mSequenceNumber.Construct(*optionalSeqNum); + } } // Base class needs this, but can't do it itself because of an assertion in diff --git a/dom/media/webrtc/third_party_build/default_config_env b/dom/media/webrtc/third_party_build/default_config_env index 8cf50174c0a0..b3cbad8dd0cf 100644 --- a/dom/media/webrtc/third_party_build/default_config_env +++ b/dom/media/webrtc/third_party_build/default_config_env @@ -5,41 +5,41 @@ export MOZ_LIBWEBRTC_SRC=$STATE_DIR/moz-libwebrtc # The previous fast-forward bug number is used for some error messaging. -export MOZ_PRIOR_FASTFORWARD_BUG="1847074" +export MOZ_PRIOR_FASTFORWARD_BUG="1851693" # Fast-forwarding each Chromium version of libwebrtc should be done # under a separate bugzilla bug. This bug number is used when crafting # the commit summary as each upstream commit is vendored into the # mercurial repository. The bug used for the v106 fast-forward was # 1800920. -export MOZ_FASTFORWARD_BUG="1851693" +export MOZ_FASTFORWARD_BUG="1857862" # MOZ_NEXT_LIBWEBRTC_MILESTONE and MOZ_NEXT_FIREFOX_REL_TARGET are # not used during fast-forward processing, but facilitate generating this # default config. To generate an default config for the next update, run # bash dom/media/webrtc/third_party_build/update_default_config_env.sh -export MOZ_NEXT_LIBWEBRTC_MILESTONE=116 -export MOZ_NEXT_FIREFOX_REL_TARGET=120 +export MOZ_NEXT_LIBWEBRTC_MILESTONE=117 +export MOZ_NEXT_FIREFOX_REL_TARGET=121 # For Chromium release branches, see: # https://chromiumdash.appspot.com/branches -# Chromium's v115 release branch was 5790. This is used to pre-stack +# Chromium's v116 release branch was 5845. This is used to pre-stack # the previous release branch's commits onto the appropriate base commit # (the first common commit between trunk and the release branch). -export MOZ_PRIOR_UPSTREAM_BRANCH_HEAD_NUM="5790" +export MOZ_PRIOR_UPSTREAM_BRANCH_HEAD_NUM="5845" -# New target release branch for v116 is branch-heads/5845. This is used +# New target release branch for v117 is branch-heads/5938. This is used # to calculate the next upstream commit. -export MOZ_TARGET_UPSTREAM_BRANCH_HEAD="branch-heads/5845" +export MOZ_TARGET_UPSTREAM_BRANCH_HEAD="branch-heads/5938" # For local development 'mozpatches' is fine for a branch name, but when # pushing the patch stack to github, it should be named something like -# 'moz-mods-chr116-for-rel120'. +# 'moz-mods-chr117-for-rel121'. export MOZ_LIBWEBRTC_BRANCH="mozpatches" # After elm has been merged to mozilla-central, the patch stack in # moz-libwebrtc should be pushed to github. The script # push_official_branch.sh uses this branch name when pushing to the # public repo. -export MOZ_LIBWEBRTC_OFFICIAL_BRANCH="moz-mods-chr116-for-rel120" +export MOZ_LIBWEBRTC_OFFICIAL_BRANCH="moz-mods-chr117-for-rel121" diff --git a/dom/media/webrtc/third_party_build/vendor-libwebrtc.py b/dom/media/webrtc/third_party_build/vendor-libwebrtc.py index 918e423e4c11..093486177866 100644 --- a/dom/media/webrtc/third_party_build/vendor-libwebrtc.py +++ b/dom/media/webrtc/third_party_build/vendor-libwebrtc.py @@ -116,7 +116,6 @@ def get_included_path_overrides(): "sdk/android/api/org/webrtc/DataChannel.java", "sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java", "sdk/android/api/org/webrtc/audio/AudioDeviceModule.java", - "sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java", "sdk/android/api/org/webrtc/SessionDescription.java", "sdk/android/api/org/webrtc/GlUtil.java", "sdk/android/api/org/webrtc/VideoSource.java", diff --git a/dom/media/webrtc/third_party_build/verify_vendoring.sh b/dom/media/webrtc/third_party_build/verify_vendoring.sh index 452f12d259fc..54f3260e477e 100644 --- a/dom/media/webrtc/third_party_build/verify_vendoring.sh +++ b/dom/media/webrtc/third_party_build/verify_vendoring.sh @@ -61,11 +61,7 @@ Typically this is due to changes made in mercurial to files residing under third_party/libwebrtc that have not been reflected in moz-libwebrtc git repo's patch-stack. -Frequently using 'hg log' on the files listed with 'hg status' will give -information on commits that need to be added to the patch-stack. After -identifying a commit or commit range from mercurial that should be added -to the git patch-stack, the following commands should help remedy the -situation: +The following commands should help remedy the situation: ./mach python $SCRIPT_DIR/extract-for-git.py $COMMIT_AFTER_PATCHSTACK_UPDATE::tip mv mailbox.patch $MOZ_LIBWEBRTC_SRC (cd $MOZ_LIBWEBRTC_SRC && \\ diff --git a/mobile/android/geckoview/build.gradle b/mobile/android/geckoview/build.gradle index 519a85395abf..7550be98bb73 100644 --- a/mobile/android/geckoview/build.gradle +++ b/mobile/android/geckoview/build.gradle @@ -109,7 +109,6 @@ android { srcDir "${topsrcdir}/third_party/libwebrtc/sdk/android/api" srcDir "${topsrcdir}/third_party/libwebrtc/sdk/android/src" srcDir "${topsrcdir}/third_party/libwebrtc/rtc_base/java" - srcDir "${topsrcdir}/third_party/libwebrtc/modules/audio_device/android/java" } srcDir "${topobjdir}/mobile/android/geckoview/src/main/java" diff --git a/modules/libpref/init/StaticPrefList.yaml b/modules/libpref/init/StaticPrefList.yaml index 47713922f26a..b60c704dca34 100644 --- a/modules/libpref/init/StaticPrefList.yaml +++ b/modules/libpref/init/StaticPrefList.yaml @@ -10812,7 +10812,17 @@ value: true mirror: always -- name: media.webrtc.capture.allow-wgc +- name: media.webrtc.capture.screen.allow-wgc + type: RelaxedAtomicBool + value: false + mirror: always + +- name: media.webrtc.capture.window.allow-wgc + type: RelaxedAtomicBool + value: false + mirror: always + +- name: media.webrtc.capture.wgc.allow-zero-hertz type: RelaxedAtomicBool value: false mirror: always diff --git a/services/sync/modules/constants.sys.mjs b/services/sync/modules/constants.sys.mjs index c72d458b4521..7d895d0c5c71 100644 --- a/services/sync/modules/constants.sys.mjs +++ b/services/sync/modules/constants.sys.mjs @@ -4,7 +4,7 @@ // Don't manually modify this line, as it is automatically replaced on merge day // by the gecko_migration.py script. -export const WEAVE_VERSION = "1.122.0"; +export const WEAVE_VERSION = "1.123.0"; // Sync Server API version that the client supports. export const SYNC_API_VERSION = "1.5"; diff --git a/third_party/libwebrtc/.gn b/third_party/libwebrtc/.gn index d5cf5c031c1c..3208b6bb4e07 100644 --- a/third_party/libwebrtc/.gn +++ b/third_party/libwebrtc/.gn @@ -76,7 +76,7 @@ default_args = { # Chromium fix resolves the problem. fuchsia_sdk_readelf_exec = "//third_party/llvm-build/Release+Asserts/bin/llvm-readelf" - fuchsia_target_api_level = 9 + fuchsia_target_api_level = 11 use_cxx17 = true } diff --git a/third_party/libwebrtc/BUILD.gn b/third_party/libwebrtc/BUILD.gn index 72bfb3393c30..a7771d29c027 100644 --- a/third_party/libwebrtc/BUILD.gn +++ b/third_party/libwebrtc/BUILD.gn @@ -64,12 +64,12 @@ if (!build_with_chromium && !build_with_mozilla) { "modules/video_capture:video_capture_internal_impl", "modules/video_coding:video_codec_perf_tests", "net/dcsctp:dcsctp_unittests", - "pc:peer_connection_mediachannel_split_unittests", "pc:peerconnection_unittests", "pc:rtc_pc_unittests", "pc:slow_peer_connection_unittests", "pc:svc_tests", "rtc_tools:rtp_generator", + "rtc_tools:video_encoder", "rtc_tools:video_replay", "stats:rtc_stats_unittests", "system_wrappers:system_wrappers_unittests", @@ -138,6 +138,14 @@ config("common_inherited_config") { cflags = [] ldflags = [] + if (rtc_jni_generator_legacy_symbols) { + defines += [ "RTC_JNI_GENERATOR_LEGACY_SYMBOLS" ] + } + + if (rtc_objc_prefix != "") { + defines += [ "RTC_OBJC_TYPE_PREFIX=${rtc_objc_prefix}" ] + } + if (rtc_dlog_always_on) { defines += [ "DLOG_ALWAYS_ON" ] } diff --git a/third_party/libwebrtc/DEPS b/third_party/libwebrtc/DEPS index 315a7838ad73..6c1c21187fbc 100644 --- a/third_party/libwebrtc/DEPS +++ b/third_party/libwebrtc/DEPS @@ -10,7 +10,7 @@ vars = { # chromium waterfalls. More info at: crbug.com/570091. 'checkout_configuration': 'default', 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', - 'chromium_revision': '8603a0cee25d1cc4d701ef742b3df7c775440524', + 'chromium_revision': '6b95b8aa08c9d0a1529f49dcdb2b17afe36a0ca5', # Fetch the prebuilt binaries for llvm-cov and llvm-profdata. Needed to # process the raw profiles produced by instrumented targets (built with @@ -25,7 +25,7 @@ vars = { # By default, download the fuchsia sdk from the public sdk directory. 'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/core/', - 'fuchsia_version': 'version:13.20230615.1.1', + 'fuchsia_version': 'version:13.20230714.0.1', # By default, download the fuchsia images from the fuchsia GCS bucket. 'fuchsia_images_bucket': 'fuchsia', 'checkout_fuchsia': False, @@ -35,8 +35,12 @@ vars = { 'checkout_fuchsia_boot_images': "terminal.qemu-x64", 'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""', + # Fetch configuration files required for the 'use_remoteexec' gn arg + 'download_remoteexec_cfg': False, + # RBE instance to use for running remote builds + 'rbe_instance': 'projects/rbe-webrtc-developer/instances/default_instance', # reclient CIPD package version - 'reclient_version': 're_client_version:0.108.0.7cdbbe9-gomaip', + 'reclient_version': 're_client_version:0.110.0.43ec6b1-gomaip', # ninja CIPD package version # https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja @@ -46,30 +50,30 @@ vars = { deps = { # TODO(kjellander): Move this to be Android-only. 'src/base': - 'https://chromium.googlesource.com/chromium/src/base@ca4474373784d15364b5d190970e5bdfa1544c2a', + 'https://chromium.googlesource.com/chromium/src/base@dcfe245ca1e349e0f71279dc975e02b6676ce0be', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@6c0e0e0c84aa581f9bfa042e511dc9aaffa8fd82', + 'https://chromium.googlesource.com/chromium/src/build@b74cdc4550c2d21b53fd73bb5f5cec13466454a2', 'src/buildtools': - 'https://chromium.googlesource.com/chromium/src/buildtools@3739a3619309af3b788379ad0936ca00b981616e', + 'https://chromium.googlesource.com/chromium/src/buildtools@16be42a9ff1f7e4a3e53b93b3adc181fa7ff9161', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. 'src/examples/androidtests/third_party/gradle': { 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3', 'condition': 'checkout_android', }, 'src/ios': { - 'url': 'https://chromium.googlesource.com/chromium/src/ios@0df9bead2936138bd3853fdf826b29470cfa517e', + 'url': 'https://chromium.googlesource.com/chromium/src/ios@51a637843f008fc981cf7f6401cbddc5a59aa59c', 'condition': 'checkout_ios', }, 'src/testing': - 'https://chromium.googlesource.com/chromium/src/testing@f3b8f1d8c1d7ca49f9a77b8e669c357572f4447c', + 'https://chromium.googlesource.com/chromium/src/testing@b946312a8903933cd21deb8e3a8dae4275391dda', 'src/third_party': - 'https://chromium.googlesource.com/chromium/src/third_party@770155421d251b9541301084d0db46812540c251', + 'https://chromium.googlesource.com/chromium/src/third_party@93b5f4c4085cdef6b3d742153ab934aa0c7c46df', 'src/buildtools/linux64': { 'packages': [ { 'package': 'gn/gn/linux-${{arch}}', - 'version': 'git_revision:4bd1a77e67958fb7f6739bd4542641646f264e5d', + 'version': 'git_revision:fae280eabe5d31accc53100137459ece19a7a295', } ], 'dep_type': 'cipd', @@ -79,7 +83,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/mac-${{arch}}', - 'version': 'git_revision:4bd1a77e67958fb7f6739bd4542641646f264e5d', + 'version': 'git_revision:fae280eabe5d31accc53100137459ece19a7a295', } ], 'dep_type': 'cipd', @@ -89,7 +93,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/windows-amd64', - 'version': 'git_revision:4bd1a77e67958fb7f6739bd4542641646f264e5d', + 'version': 'git_revision:fae280eabe5d31accc53100137459ece19a7a295', } ], 'dep_type': 'cipd', @@ -111,11 +115,11 @@ deps = { 'src/third_party/clang-format/script': 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git@f97059df7f8b205064625cdb5f97b56668a125ef', 'src/buildtools/third_party/libc++/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@055b2e17ae4f0e2c025ad0c7508b01787df17758', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@84fb809dd6dae36d556dc0bb702c6cc2ce9d4b80', 'src/buildtools/third_party/libc++abi/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@c2a35d1b2cf4b6ca85f5235c76ad9b1aff97e801', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@d4760c0af99ccc9bce077960d5ddde4d66146c05', 'src/buildtools/third_party/libunwind/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@b5a43ecdac82a248f8a700a68c722b4d98708377', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@6c0013015be8a2be9de4b1e54cdc9d576b1d0729', 'src/third_party/ninja': { 'packages': [ @@ -131,7 +135,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_system_sdk/public', - 'version': 'RGY8Vyf8jjszRIJRFxZj7beXSUEHTQM90MtYejUvdMgC', + 'version': '4QeolYaSKWBtVTgzJU4tHUfzA9OJTDM8YUcD426IctwC', }, ], 'condition': 'checkout_android', @@ -162,7 +166,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/bundletool', - 'version': 'LbB0aRQ3VKjRJZmc_PD0VTZ1O34_zD92rh71aOEOEcEC', + 'version': '2PJKytTLILAjCO3G7sCO27FO48XB9qrRTHp420zr5G0C', }, ], 'condition': 'checkout_android', @@ -170,11 +174,11 @@ deps = { }, 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@ae88f198a49d77993e9c44b017d0e69c810dc668', + 'https://boringssl.googlesource.com/boringssl.git@b98ce18c5b3f0c28bd64b27b6494f176404da4e4', 'src/third_party/breakpad/breakpad': 'https://chromium.googlesource.com/breakpad/breakpad.git@8988364bcddd9b194b0bf931c10bc125987330ed', 'src/third_party/catapult': - 'https://chromium.googlesource.com/catapult.git@89fad9023d62d7031789a904b2aa6bd1d4d0a3e2', + 'https://chromium.googlesource.com/catapult.git@84e3795d98335b636d0be626b1833026a5c6d60b', 'src/third_party/ced/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', }, @@ -187,11 +191,11 @@ deps = { 'src/third_party/crc32c/src': 'https://chromium.googlesource.com/external/github.com/google/crc32c.git@fa5ade41ee480003d9c5af6f43567ba22e4e17e6', 'src/third_party/depot_tools': - 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@3ffad8166e1c233624dcac4e5a12a59944f1231a', + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@54e86436a88ed946752b818769fb74ba9938603b', 'src/third_party/ffmpeg': 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@881c5c3f6412020c37e97e178e0f5da9ddd2ae90', 'src/third_party/flatbuffers/src': - 'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@13fc75cb6b7b44793f3f5b4ba025ff403d012c9f', + 'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@28861d1d7d5ec6ce34d4bbdc10bec4aace341167', 'src/third_party/grpc/src': { 'url': 'https://chromium.googlesource.com/external/github.com/grpc/grpc.git@822dab21d9995c5cf942476b35ca12a1aa9d2737', }, @@ -201,7 +205,7 @@ deps = { 'condition': 'checkout_linux', }, 'src/third_party/freetype/src': - 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@5c00a46805d6423fc45b4ba2c0f2e22dd0450d73', + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@9e3c5d7e183c1a8d5ed8868d7d28ef18d3ec9ec8', 'src/third_party/harfbuzz-ng/src': 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@db700b5670d9475cc8ed4880cc9447b232c5e432', 'src/third_party/google_benchmark/src': { @@ -229,7 +233,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/jdk', - 'version': '2Of9Pe_OdO4xoAATuiLDiMVNebKTNO3WrwJGqil4RosC', + 'version': 'IivIDwNBf73mf7UwCOBceRUuDdtizMCgSOQDfUGHArsC', }, ], 'condition': 'host_os == "linux" and checkout_android', @@ -258,7 +262,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/kotlin_stdlib', - 'version': 'z4_AYYz2Tw5GKikuiDLTuxxf0NJVGLkC3CVcyiIpc-gC', + 'version': 'Z1gsqhL967kFQecxKrRwXHbl-vwQjpv0l7PMUZ0EVO8C', }, ], 'condition': 'checkout_android', @@ -269,7 +273,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/kotlinc', - 'version': 'QSwE30iq_KLKxImEnQEwDMQM_cU10eZSAwNobs8BEsoC', + 'version': 'Rr02Gf2EkaeSs3EhSUHhPqDHSd1AzimrM6cRYUJCPjQC', }, ], 'condition': 'checkout_android', @@ -279,23 +283,23 @@ deps = { 'src/third_party/libFuzzer/src': 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git@26cc39e59b2bf5cbc20486296248a842c536878d', 'src/third_party/libjpeg_turbo': - 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@aa4075f116e4312537d0d3e9dbd5e31096539f94', + 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@30bdb85e302ecfc52593636b2f44af438e05e784', 'src/third_party/libsrtp': 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@5b7c744eb8310250ccc534f3f86a2015b3887a0a', 'src/third_party/dav1d/libdav1d': 'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@f8ae94eca0f53502a2cddd29a263c1edea4822a0', 'src/third_party/libaom/source/libaom': - 'https://aomedia.googlesource.com/aom.git@233000f66e9ff0bb09226a2f222a029bb4c89de6', + 'https://aomedia.googlesource.com/aom.git@af2b02ce0529f150723ace91f4918b180c651bf2', 'src/third_party/libunwindstack': { 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@4dbfa0e8c844c8e243b297bc185e54a99ff94f9e', 'condition': 'checkout_android', }, 'src/third_party/perfetto': - 'https://android.googlesource.com/platform/external/perfetto.git@0ba4c2cd12264c4d33787fb700b93c67ee9fbc11', + 'https://android.googlesource.com/platform/external/perfetto.git@ab16995d92ae1354633b3025682bc34c29411c20', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@2245df50a6d360d33fccd51479c48f2210ed607a', + 'https://chromium.googlesource.com/webm/libvpx.git@9ad950a9c4f8906fcdabb3402db1a17a9d99ee4c', 'src/third_party/libyuv': - 'https://chromium.googlesource.com/libyuv/libyuv.git@552571e8b24b2619c39ec176e6cb8e75d3e7fdd3', + 'https://chromium.googlesource.com/libyuv/libyuv.git@04821d1e7d60845525e8db55c7bcd41ef5be9406', 'src/third_party/lss': { 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@ce877209e11aa69dcfffbd53ef90ea1d07136521', 'condition': 'checkout_android or checkout_linux', @@ -316,7 +320,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'vw5kLlW3-suSlCKSO9OQpFWpR8oDnvQ8k1RgKNUapQYC', + 'version': 'O1BBWiBTIeNUcraX8STMtQXVaCleu6SJJjWCcnfhPLkC', }, ], 'condition': 'checkout_android', @@ -329,18 +333,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'PwglNZFRNPkBBXdnY9NfrZFk2ULWDTRxhV9rl2kvkpUC', + 'version': 'vw5kLlW3-suSlCKSO9OQpFWpR8oDnvQ8k1RgKNUapQYC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, 'src/third_party/requests/src': { - 'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@refs/tags/v2.23.0', + 'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@c7e0fc087ceeadb8b4c84a0953a422c474093d6d', 'condition': 'checkout_android', }, 'src/tools': - 'https://chromium.googlesource.com/chromium/src/tools@eb2e55cf816468d0b8899ce5d8429f7eb8c42f01', + 'https://chromium.googlesource.com/chromium/src/tools@4057b98943f73331c3198b46f860533ecd692585', 'src/third_party/accessibility_test_framework': { 'packages': [ @@ -401,7 +405,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_toolchain/android_toolchain', - 'version': 'version:2@r25c.cr1', + 'version': 'R_8suM8m0oHbZ1awdxGXvKEFpAOETscbfZxkkMthyk8C', }, ], 'condition': 'checkout_android', @@ -412,7 +416,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/androidx', - 'version': 'MqkmMx1Ct4Fk2Vb_FY05yLzXxVnH9evr2OqP6tpU9MEC', + 'version': 'y7rF_rx56mD3FGhMiqnlbQ6HOqHJ95xUFNX1m-_a988C', }, ], 'condition': 'checkout_android', @@ -423,7 +427,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/manifest_merger', - 'version': 'UNXioFXYvz7k7UmE2WYAaXuYIK3Ky0aSQ0IuDEdS9soC', + 'version': '8fr-1Vf_pfxN9ulzWVaZvIXggDgWDs-0dtlGA1Sue48C', }, ], 'condition': 'checkout_android', @@ -433,8 +437,8 @@ deps = { 'src/third_party/android_sdk/public': { 'packages': [ { - 'package': 'chromium/third_party/android_sdk/public/build-tools/33.0.0', - 'version': '-VRKr36Uw8L_iFqqo9nevIBgNMggND5iWxjidyjnCgsC', + 'package': 'chromium/third_party/android_sdk/public/build-tools/34.0.0', + 'version': 'YK9Rzw3fDzMHVzatNN6VlyoD_81amLZpN1AbmkdOd6AC', }, { 'package': 'chromium/third_party/android_sdk/public/emulator', @@ -446,11 +450,11 @@ deps = { }, { 'package': 'chromium/third_party/android_sdk/public/platform-tools', - 'version': 'RSI3iwryh7URLGRgJHsCvUxj092woTPnKt4pwFcJ6L8C', + 'version': 'HWVsGs2HCKgSVv41FsOcsfJbNcB0UFiNrF6Tc4yRArYC', }, { - 'package': 'chromium/third_party/android_sdk/public/platforms/android-33', - 'version': 'eo5KvW6UVor92LwZai8Zulc624BQZoCu-yn7wa1z_YcC', + 'package': 'chromium/third_party/android_sdk/public/platforms/android-34', + 'version': 'u-bhWbTME6u-DjypTgr3ZikCyeAeU6txkR9ET6Uudc8C', }, { 'package': 'chromium/third_party/android_sdk/public/platforms/android-tiramisuprivacysandbox', @@ -517,7 +521,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/turbine', - 'version': 'G8Cku1fztaDd9to_8gk3PNWM2iRacAxD9zcUAgUPUEAC', + 'version': '2I2Nz480QsuCxpQ1lMfbigX8l5HAhX3_ykWU4TKRGo4C', }, ], 'condition': 'checkout_android', @@ -528,11 +532,11 @@ deps = { 'packages': [ { 'package': 'infra/tools/luci/isolate/${{platform}}', - 'version': 'git_revision:39f255d5875293d3e1d978888b819ac124a8b0cc', + 'version': 'git_revision:f02582af78f530a7bbfe2f059fa5d211c9517756', }, { 'package': 'infra/tools/luci/swarming/${{platform}}', - 'version': 'git_revision:39f255d5875293d3e1d978888b819ac124a8b0cc', + 'version': 'git_revision:f02582af78f530a7bbfe2f059fa5d211c9517756', }, ], 'dep_type': 'cipd', @@ -1748,7 +1752,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_squareup_okio_okio_jvm', - 'version': 'version:2@3.0.0.cr1', + 'version': 'version:2@3.3.0.cr1', }, ], 'condition': 'checkout_android', @@ -1759,7 +1763,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_squareup_wire_wire_runtime_jvm', - 'version': 'version:2@4.5.1.cr1', + 'version': 'version:2@4.7.0.cr1', }, ], 'condition': 'checkout_android', @@ -1891,7 +1895,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/net_bytebuddy_byte_buddy', - 'version': 'version:2@1.14.4.cr1', + 'version': 'version:2@1.14.5.cr1', }, ], 'condition': 'checkout_android', @@ -1902,7 +1906,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/net_bytebuddy_byte_buddy_agent', - 'version': 'version:2@1.14.4.cr1', + 'version': 'version:2@1.14.5.cr1', }, ], 'condition': 'checkout_android', @@ -2111,7 +2115,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_android', - 'version': 'version:2@5.3.1.cr1', + 'version': 'version:2@5.4.0.cr1', }, ], 'condition': 'checkout_android', @@ -2122,7 +2126,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_core', - 'version': 'version:2@5.3.1.cr1', + 'version': 'version:2@5.4.0.cr1', }, ], 'condition': 'checkout_android', @@ -2133,7 +2137,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_subclass', - 'version': 'version:2@5.3.1.cr1', + 'version': 'version:2@5.4.0.cr1', }, ], 'condition': 'checkout_android', @@ -2693,6 +2697,21 @@ hooks = [ '-vpython-tool', 'install', ], }, + # Download remote exec cfg files + { + 'name': 'fetch_reclient_cfgs', + 'pattern': '.', + 'condition': 'download_remoteexec_cfg', + 'action': ['python3', + 'src/buildtools/reclient_cfgs/fetch_reclient_cfgs.py', + '--rbe_instance', + Var('rbe_instance'), + '--reproxy_cfg_template', + 'reproxy.cfg.template', + '--quiet', + '--hook', + ], + }, ] recursedeps = [] diff --git a/third_party/libwebrtc/README.moz-ff-commit b/third_party/libwebrtc/README.moz-ff-commit index 1a7b3de8e657..f3bf82b28fe7 100644 --- a/third_party/libwebrtc/README.moz-ff-commit +++ b/third_party/libwebrtc/README.moz-ff-commit @@ -24660,3 +24660,624 @@ aa6c910f47 # MOZ_LIBWEBRTC_SRC=/Users/mfroman/no_tm_backup/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh # base of lastest vendoring 402f60c2ea +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c4e0254909 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d1780836f4 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ee97e6ad88 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4e434c313e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d7c7b07c5d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +17e8a5cc7d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d20bbc4a15 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0510463439 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +84fdf990e8 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5246ae20a2 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c3a74024bf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +afa0f22070 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +eec1810760 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +423faa6067 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +656817c485 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +eee10391ca +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +00a8576a67 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +58e97b8600 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +93008bde6c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8beb6314ef +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d0c86830d0 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b55c63b8b5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c9d96dfebe +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +589ee5ae62 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fc68f1f7d9 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dcf600d7a5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +48a2af35e1 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0776415a41 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cde980fa46 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +269a3d415e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0c1c722185 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +38aa4ef5f4 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +34589929fe +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9a3ab3dcca +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +69998be21c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e4e33b8ee3 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +365a5717ae +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +299cdc9057 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0606eafb9f +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +58ee9dff08 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3adaeefbc6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c26d96ac56 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +be90237a0a +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a2e945e042 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ceabb9e8e5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +96293f0876 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1f31c201cd +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +533a97be2b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +54d7547faf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7a24f2a7eb +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ea668e36a9 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7e4b2a5265 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2d7ccb4149 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +396d8c8a44 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9e2567cfa1 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9c62b32d28 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d797cb6ca7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cabd77a5c7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b4062e5611 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f92cc6d7b4 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ba50cb322c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fad6617506 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +079b36bd4f +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9948623027 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +415e30fdbb +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +becfe2e571 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6fc700ec3d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2cf8eb9f78 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d51dada719 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7a35f108c7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e21745a78b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +283c3df2ac +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +aad4ae54d4 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0725072604 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d4bd50b018 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +758f26852d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c2919fe1e2 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5f4a7e004e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +804ff860e6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1a8c1aedbc +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a8b955def2 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +51712a3196 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cc6042d876 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +26f72901c6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3f10b4917e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e9ae738c7c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2feee9da60 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +599367595d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +1043047243 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +df81547595 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f186e32d9e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4e5b89f77b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a6c4f12fad +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8fcc6df79d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +32457632a6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dfe026ce08 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9d677f4cdc +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ebf71114a3 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6a4f409241 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4200233adc +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +32a8169a65 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +99b984a1c5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bf6e60e5ce +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7b07ab93e8 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +73d51f8e84 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0e9556a90c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9e5defcf74 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cbfbb591cf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +775470214a +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +aa8faa6423 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +94abc09246 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c0ed83eac2 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fe5be2eb4f +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a4592cec2c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c9302855d8 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4272c47782 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +726992d7a4 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fd5bdca28c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e2dff95ac6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +50d62c1adf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e66a85c278 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2a19c68d7b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ab9458408d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b6c0ddc48d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d3685676bf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +dbb89430ef +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d3cb2f8b95 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2206b63af0 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e9e03a9160 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2e48e4b112 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +630c40d716 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e546ff99a6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0689cfc6ce +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +97094eb530 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d48638687b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +090a8a0c42 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f58c818148 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +00f11224fd +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +612d0f9a06 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +efec6e28a5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4d752ec647 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d351ac6200 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +f09fba81be +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +15f0fabfb3 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +666d707450 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +950e231b63 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +00c0660469 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +92665682fe +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7dbf55437f +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b81bf53f0e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7ee2a38527 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7b42f35bcc +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ac412a4ee3 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ea06be2682 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +87e22fe0ab +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8c9e035edb +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9130431b54 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0145db4091 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +86cfe50c0e +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +db7a947172 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +3d7889a4ca +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6143ec939a +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b90cd91983 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +920abcc9bc +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +6e937574f7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4f4e989436 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c787adcfbf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a6c76d0c29 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +bcb0b8eb04 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +5eb521955a +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d212551ad3 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +0d8b79eb40 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4b87d7ac2a +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +96137813f7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +875cd32eac +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b39633462c +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +787e326c18 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +a9d5141367 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fe2e7eaa20 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9a09ed73c5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +86a7969a6d +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +23fc2bee6b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +55b89a8068 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +50ca701e68 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +ebb2bfd239 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +33eff43a12 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fb5a4a366b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7f41b0b073 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9f3ea9d934 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +73ae5ca594 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +9b82b2f8d6 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +cad3aed5fc +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b38d9d2b6f +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +b17806a4cf +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +e2e04513e7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c71bfccaa5 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +8dc55689d2 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +34d82df2ba +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +2bfa071d09 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +fc92d33327 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c59be6d261 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +4f51b34ab8 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +c479e46618 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +7bd90baca8 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +82e5f91a2b +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +d95382fab7 +# MOZ_LIBWEBRTC_SRC=/home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc MOZ_LIBWEBRTC_BRANCH=mozpatches bash dom/media/webrtc/third_party_build/fast-forward-libwebrtc.sh +# base of lastest vendoring +70aa7e99e4 diff --git a/third_party/libwebrtc/README.mozilla b/third_party/libwebrtc/README.mozilla index cca98988d489..2eacfa852b29 100644 --- a/third_party/libwebrtc/README.mozilla +++ b/third_party/libwebrtc/README.mozilla @@ -16464,3 +16464,417 @@ libwebrtc updated from /Users/mfroman/no_tm_backup/elm/.moz-fast-forward/moz-lib libwebrtc updated from /Users/mfroman/no_tm_backup/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-09-27T04:11:23.619074. # ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-github https://github.com/mjfroman/moz-libwebrtc-third-party --commit build-31edcc46d68 third_party third_party updated from commit https://github.com/mjfroman/moz-libwebrtc-third-party/archive/build-31edcc46d68.tar.gz on 2023-10-04T17:59:33.430931. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T07:45:49.537748. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T07:48:24.618520. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T07:49:35.602541. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T07:51:40.220841. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T07:52:54.347554. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T07:54:45.474668. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:08:38.802184. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:11:06.484540. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:12:21.983066. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:13:40.407085. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:30:55.334751. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:32:24.534077. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:33:49.637867. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:35:20.635176. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:36:47.754766. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:38:28.498217. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:39:56.809983. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:42:08.647186. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:43:42.663755. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:45:09.759132. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:46:51.749685. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:48:19.341351. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:49:46.330205. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:51:11.477403. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:52:38.231722. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:54:19.288145. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:56:25.903899. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T08:58:29.470875. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:00:38.156016. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:02:34.064258. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:04:08.055191. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:05:44.939191. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:07:10.195869. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:08:39.714282. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:10:45.401998. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:12:08.943886. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:14:19.439087. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:15:47.850084. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:17:19.797655. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:18:47.712756. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:20:30.747665. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:34:29.353644. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:35:54.655413. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:37:22.522995. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:39:31.122296. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:40:59.546457. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:42:27.254599. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:44:39.769619. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:46:08.273549. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:48:19.621930. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:49:48.013931. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:51:56.809497. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:53:25.050672. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:55:35.332897. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:57:02.814880. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:58:31.921187. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T09:59:58.665492. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:15:23.063372. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:16:50.221646. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:18:30.857915. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:20:38.621864. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:22:46.573556. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:24:11.790268. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:25:41.563968. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:27:12.406802. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T10:45:12.565314. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:02:03.026453. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:03:55.414334. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:05:47.524219. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:06:59.449648. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:08:15.705927. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:09:32.105699. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:10:43.250633. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:11:55.086115. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:13:07.591917. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:14:20.695070. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:15:35.535979. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:16:48.566402. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:18:09.469977. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:19:20.313414. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:20:32.696634. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:21:44.256480. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:22:58.546258. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:24:52.380679. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:26:24.783045. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:28:16.602500. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:29:32.363762. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:30:49.420244. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:32:02.615735. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:33:16.788677. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:34:32.731527. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:35:46.874402. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:36:57.598498. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:38:08.680790. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:39:23.522773. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:40:36.658705. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:41:52.461411. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:43:09.510809. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:44:21.207223. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:45:36.657888. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:46:47.932973. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:54:34.085500. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:55:48.271204. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:57:02.678023. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T11:58:17.746978. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T12:45:21.956982. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:19:10.281884. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:20:23.466624. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:21:35.661462. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:22:48.182253. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:24:15.637927. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:25:27.731337. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:26:43.291947. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:28:41.888791. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:29:56.181525. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:31:10.810919. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:32:23.261637. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:34:16.596755. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:35:31.624197. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:36:46.634700. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:37:57.794254. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:39:11.155979. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:40:23.629116. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:41:36.180997. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:42:52.644345. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:44:04.957107. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:45:17.326543. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:46:25.898395. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:47:39.737101. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:48:54.092939. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:56:08.183962. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T13:58:19.594668. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:00:15.440278. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:01:28.371595. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:02:41.246581. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:03:54.221031. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:05:07.864390. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:06:19.770508. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:08:38.303394. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:09:52.286458. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:11:05.725322. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:13:00.646822. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:14:14.390238. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:15:26.992008. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:16:38.974425. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:17:55.867847. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:26:18.570383. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:27:33.698586. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:28:55.880699. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:30:15.155825. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:31:35.190259. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:32:53.500674. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:34:14.966982. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:35:43.202633. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:37:04.548949. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:38:25.160430. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:39:47.433145. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:41:49.672361. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:43:09.370089. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:45:08.768321. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:46:30.389484. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:48:31.655449. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:50:32.846073. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T14:51:58.759595. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:00:45.689232. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:02:06.419396. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:03:33.206679. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:04:52.434740. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:06:13.875097. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:07:35.209553. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:08:58.300629. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:10:35.864368. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:20:30.089256. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:21:54.774975. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:23:57.992803. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:25:22.564554. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:26:45.714223. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:28:48.409002. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:30:15.716670. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:32:16.512800. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:33:39.317533. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:35:00.137562. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:47:09.504049. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:48:29.761413. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T15:49:55.503649. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:41:27.453459. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:42:46.643235. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:43:58.419983. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:45:10.606154. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:46:24.998512. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:47:38.562537. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:48:51.083835. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:50:05.237740. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:57:08.614369. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T18:59:02.725860. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:00:20.038317. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:01:36.304303. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:02:51.424089. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:04:30.607280. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:06:23.250658. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:07:37.268851. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:08:50.528807. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:10:04.476691. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:11:20.577695. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:12:36.189405. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:13:52.595352. +# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc +libwebrtc updated from /home/pehrsons/dev/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2023-10-16T19:15:06.626003. diff --git a/third_party/libwebrtc/api/BUILD.gn b/third_party/libwebrtc/api/BUILD.gn index 1c3084a07493..9a60e912ec83 100644 --- a/third_party/libwebrtc/api/BUILD.gn +++ b/third_party/libwebrtc/api/BUILD.gn @@ -824,6 +824,7 @@ rtc_library("transport_api") { "call/transport.h", ] deps = [ + ":array_view", ":refcountedbase", ":scoped_refptr", ] diff --git a/third_party/libwebrtc/api/audio/audio_frame_api_gn/moz.build b/third_party/libwebrtc/api/audio/audio_frame_api_gn/moz.build index 9248b91c2d92..bbe919aa5953 100644 --- a/third_party/libwebrtc/api/audio/audio_frame_api_gn/moz.build +++ b/third_party/libwebrtc/api/audio/audio_frame_api_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio/audio_mixer_api_gn/moz.build b/third_party/libwebrtc/api/audio/audio_mixer_api_gn/moz.build index 0ef13b0f4588..520104b4592d 100644 --- a/third_party/libwebrtc/api/audio/audio_mixer_api_gn/moz.build +++ b/third_party/libwebrtc/api/audio/audio_mixer_api_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/L16/audio_decoder_L16_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/L16/audio_decoder_L16_gn/moz.build index ff00330a684c..026c95751931 100644 --- a/third_party/libwebrtc/api/audio_codecs/L16/audio_decoder_L16_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/L16/audio_decoder_L16_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/L16/audio_encoder_L16_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/L16/audio_encoder_L16_gn/moz.build index ee8f51ccb440..512385612ee0 100644 --- a/third_party/libwebrtc/api/audio_codecs/L16/audio_encoder_L16_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/L16/audio_encoder_L16_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/audio_codecs_api_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/audio_codecs_api_gn/moz.build index e9984fe360ce..b003a5c2383c 100644 --- a/third_party/libwebrtc/api/audio_codecs/audio_codecs_api_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/audio_codecs_api_gn/moz.build @@ -129,6 +129,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/g711/audio_decoder_g711_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/g711/audio_decoder_g711_gn/moz.build index 2a3e692b812d..1367c36ea990 100644 --- a/third_party/libwebrtc/api/audio_codecs/g711/audio_decoder_g711_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/g711/audio_decoder_g711_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/g711/audio_encoder_g711_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/g711/audio_encoder_g711_gn/moz.build index 7f89c91164f6..b4808ded9c8b 100644 --- a/third_party/libwebrtc/api/audio_codecs/g711/audio_encoder_g711_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/g711/audio_encoder_g711_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/g722/audio_decoder_g722_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/g722/audio_decoder_g722_gn/moz.build index 9dd0927ed536..8714874223a8 100644 --- a/third_party/libwebrtc/api/audio_codecs/g722/audio_decoder_g722_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/g722/audio_decoder_g722_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_config_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_config_gn/moz.build index 4ecf66bd1968..5ebbe0ea5f5f 100644 --- a/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_config_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_config_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_gn/moz.build index 36bd517e0079..933120c6927d 100644 --- a/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/g722/audio_encoder_g722_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_multiopus_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_multiopus_gn/moz.build index 21c64c783062..c343cfdb5f63 100644 --- a/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_multiopus_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_multiopus_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_opus_config_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_opus_config_gn/moz.build index 9b1098290596..f2fee9346445 100644 --- a/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_opus_config_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/opus/audio_decoder_opus_config_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/audio_codecs/opus/audio_encoder_multiopus_gn/moz.build b/third_party/libwebrtc/api/audio_codecs/opus/audio_encoder_multiopus_gn/moz.build index 2c2a46ea430d..f4310e7a28e1 100644 --- a/third_party/libwebrtc/api/audio_codecs/opus/audio_encoder_multiopus_gn/moz.build +++ b/third_party/libwebrtc/api/audio_codecs/opus/audio_encoder_multiopus_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/call/transport.h b/third_party/libwebrtc/api/call/transport.h index 387ce8d15bf5..52c577ba839a 100644 --- a/third_party/libwebrtc/api/call/transport.h +++ b/third_party/libwebrtc/api/call/transport.h @@ -14,6 +14,7 @@ #include #include +#include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" @@ -44,10 +45,40 @@ struct PacketOptions { class Transport { public: - virtual bool SendRtp(const uint8_t* packet, - size_t length, - const PacketOptions& options) = 0; - virtual bool SendRtcp(const uint8_t* packet, size_t length) = 0; + // New style functions. Default implementations are to accomodate + // subclasses that haven't been converted to new style yet. + // TODO(bugs.webrtc.org/14870): Deprecate and remove old functions. + // Mozilla: Add GCC pragmas for now. They will be removed soon: + // https://webrtc.googlesource.com/src/+/e14d122a7b24bf78c02b8a4ce23716f79451dd23 +#if defined(__clang__) +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +#elif defined(__GNUC__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif + virtual bool SendRtp(rtc::ArrayView packet, + const PacketOptions& options) { + return SendRtp(packet.data(), packet.size(), options); + } + virtual bool SendRtcp(rtc::ArrayView packet) { + return SendRtcp(packet.data(), packet.size()); + } +#if defined(__clang__) +#pragma clang diagnostic pop +#elif defined(__GNUC__) +#pragma GCC diagnostic pop +#endif + // Old style functions. + [[deprecated("Use ArrayView version")]] virtual bool + SendRtp(const uint8_t* packet, size_t length, const PacketOptions& options) { + return SendRtp(rtc::MakeArrayView(packet, length), options); + } + [[deprecated("Use ArrayView version")]] virtual bool SendRtcp( + const uint8_t* packet, + size_t length) { + return SendRtcp(rtc::MakeArrayView(packet, length)); + } protected: virtual ~Transport() {} diff --git a/third_party/libwebrtc/api/candidate.cc b/third_party/libwebrtc/api/candidate.cc index 53a48269a043..90cb32682313 100644 --- a/third_party/libwebrtc/api/candidate.cc +++ b/third_party/libwebrtc/api/candidate.cc @@ -78,11 +78,12 @@ std::string Candidate::ToStringInternal(bool sensitive) const { rtc::StringBuilder ost; std::string address = sensitive ? address_.ToSensitiveString() : address_.ToString(); + std::string related_address = sensitive ? related_address_.ToSensitiveString() + : related_address_.ToString(); ost << "Cand[" << transport_name_ << ":" << foundation_ << ":" << component_ << ":" << protocol_ << ":" << priority_ << ":" << address << ":" << type_ - << ":" << related_address_.ToString() << ":" << username_ << ":" - << password_ << ":" << network_id_ << ":" << network_cost_ << ":" - << generation_ << "]"; + << ":" << related_address << ":" << username_ << ":" << password_ << ":" + << network_id_ << ":" << network_cost_ << ":" << generation_ << "]"; return ost.Release(); } diff --git a/third_party/libwebrtc/api/crypto_params.h b/third_party/libwebrtc/api/crypto_params.h index 95bd892f9cfb..34906ea0efa9 100644 --- a/third_party/libwebrtc/api/crypto_params.h +++ b/third_party/libwebrtc/api/crypto_params.h @@ -26,14 +26,14 @@ struct CryptoParams { absl::string_view cs, absl::string_view kp, absl::string_view sp) - : tag(t), cipher_suite(cs), key_params(kp), session_params(sp) {} + : tag(t), crypto_suite(cs), key_params(kp), session_params(sp) {} bool Matches(const CryptoParams& params) const { - return (tag == params.tag && cipher_suite == params.cipher_suite); + return (tag == params.tag && crypto_suite == params.crypto_suite); } int tag; - std::string cipher_suite; + std::string crypto_suite; std::string key_params; std::string session_params; }; diff --git a/third_party/libwebrtc/api/fec_controller_api_gn/moz.build b/third_party/libwebrtc/api/fec_controller_api_gn/moz.build index 8f7d88432678..57be6208e57f 100644 --- a/third_party/libwebrtc/api/fec_controller_api_gn/moz.build +++ b/third_party/libwebrtc/api/fec_controller_api_gn/moz.build @@ -53,6 +53,10 @@ if CONFIG["OS_TARGET"] == "Android": DEFINES["__STDC_CONSTANT_MACROS"] = True DEFINES["__STDC_FORMAT_MACROS"] = True + OS_LIBS += [ + "log" + ] + if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_MAC"] = True diff --git a/third_party/libwebrtc/api/field_trials_registry_gn/moz.build b/third_party/libwebrtc/api/field_trials_registry_gn/moz.build index e105574d498d..a7b53adc457b 100644 --- a/third_party/libwebrtc/api/field_trials_registry_gn/moz.build +++ b/third_party/libwebrtc/api/field_trials_registry_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/frame_transformer_interface.h b/third_party/libwebrtc/api/frame_transformer_interface.h index 3e2d7d64ef34..5ad94a06f555 100644 --- a/third_party/libwebrtc/api/frame_transformer_interface.h +++ b/third_party/libwebrtc/api/frame_transformer_interface.h @@ -36,6 +36,8 @@ class TransformableFrameInterface { virtual uint8_t GetPayloadType() const = 0; virtual uint32_t GetSsrc() const = 0; virtual uint32_t GetTimestamp() const = 0; + virtual void SetRTPTimestamp(uint32_t timestamp) = 0; + // TODO(https://bugs.webrtc.org/14878): Change this to pure virtual after it // is implemented everywhere. virtual absl::optional GetCaptureTimeIdentifier() const { @@ -69,13 +71,9 @@ class TransformableAudioFrameInterface : public TransformableFrameInterface { public: virtual ~TransformableAudioFrameInterface() = default; - virtual void SetRTPTimestamp(uint32_t timestamp) = 0; - // Exposes the frame header, enabling the interface clients to use the - // information in the header as needed, for example to compile the list of - // csrcs. - // TODO(crbug.com/1453226): Deprecate and remove once callers have migrated to - // the getters for specific fields. - virtual const RTPHeader& GetHeader() const = 0; + // TODO(crbug.com/1453226): Remove after a few weeks. + [[deprecated("Use specific getters instead.")]] virtual const RTPHeader& + GetHeader() const = 0; virtual rtc::ArrayView GetContributingSources() const = 0; @@ -84,6 +82,18 @@ class TransformableAudioFrameInterface : public TransformableFrameInterface { virtual const absl::optional SequenceNumber() const { return absl::nullopt; } + + // TODO(crbug.com/1456628): Change this to pure virtual after it + // is implemented everywhere. + virtual absl::optional AbsoluteCaptureTimestamp() const { + return absl::nullopt; + } + + enum class FrameType { kEmptyFrame, kAudioFrameSpeech, kAudioFrameCN }; + + // TODO(crbug.com/1456628): Change this to pure virtual after it + // is implemented everywhere. + virtual FrameType Type() const { return FrameType::kEmptyFrame; } }; // Objects implement this interface to be notified with the transformed frame. diff --git a/third_party/libwebrtc/api/media_stream_interface_gn/moz.build b/third_party/libwebrtc/api/media_stream_interface_gn/moz.build index 0f053a0e9732..e7255c805bee 100644 --- a/third_party/libwebrtc/api/media_stream_interface_gn/moz.build +++ b/third_party/libwebrtc/api/media_stream_interface_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/rtc_error_gn/moz.build b/third_party/libwebrtc/api/rtc_error_gn/moz.build index aaa86c696fa6..45cad74d51aa 100644 --- a/third_party/libwebrtc/api/rtc_error_gn/moz.build +++ b/third_party/libwebrtc/api/rtc_error_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/rtc_event_log/rtc_event_log_gn/moz.build b/third_party/libwebrtc/api/rtc_event_log/rtc_event_log_gn/moz.build index 180dfb1ff4b4..6d14d068a7fe 100644 --- a/third_party/libwebrtc/api/rtc_event_log/rtc_event_log_gn/moz.build +++ b/third_party/libwebrtc/api/rtc_event_log/rtc_event_log_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/rtp_headers_gn/moz.build b/third_party/libwebrtc/api/rtp_headers_gn/moz.build index 1641b4594ef6..ba0c4f55aa8c 100644 --- a/third_party/libwebrtc/api/rtp_headers_gn/moz.build +++ b/third_party/libwebrtc/api/rtp_headers_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/rtp_packet_info_gn/moz.build b/third_party/libwebrtc/api/rtp_packet_info_gn/moz.build index d331490f9baf..d4d79fc4042d 100644 --- a/third_party/libwebrtc/api/rtp_packet_info_gn/moz.build +++ b/third_party/libwebrtc/api/rtp_packet_info_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build b/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build index 7e18b2e7cc9d..b5ea7bc20fc2 100644 --- a/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build +++ b/third_party/libwebrtc/api/rtp_sender_setparameters_callback_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/stats/OWNERS b/third_party/libwebrtc/api/stats/OWNERS index 7e98070d5d74..1556231b31a6 100644 --- a/third_party/libwebrtc/api/stats/OWNERS +++ b/third_party/libwebrtc/api/stats/OWNERS @@ -1,2 +1,3 @@ +set noparent hbos@webrtc.org hta@webrtc.org diff --git a/third_party/libwebrtc/api/stats/rtcstats_objects.h b/third_party/libwebrtc/api/stats/rtcstats_objects.h index 36013928ec75..8bb1ff2d0529 100644 --- a/third_party/libwebrtc/api/stats/rtcstats_objects.h +++ b/third_party/libwebrtc/api/stats/rtcstats_objects.h @@ -248,13 +248,18 @@ class RTC_EXPORT RTCInboundRtpStreamStats final RTCStatsMember packets_received; RTCStatsMember packets_discarded; RTCStatsMember fec_packets_received; + RTCStatsMember fec_bytes_received; RTCStatsMember fec_packets_discarded; + // Inbound FEC SSRC. Only present if a mechanism like FlexFEC is negotiated. + RTCStatsMember fec_ssrc; RTCStatsMember bytes_received; RTCStatsMember header_bytes_received; // Inbound RTX stats. Only defined when RTX is used and it is therefore // possible to distinguish retransmissions. RTCStatsMember retransmitted_packets_received; RTCStatsMember retransmitted_bytes_received; + RTCStatsMember rtx_ssrc; + RTCStatsMember last_packet_received_timestamp; RTCStatsMember jitter_buffer_delay; RTCStatsMember jitter_buffer_target_delay; @@ -367,6 +372,9 @@ class RTC_EXPORT RTCOutboundRtpStreamStats final // In JavaScript, this is only exposed if HW exposure is allowed. RTCStatsMember power_efficient_encoder; RTCStatsMember scalability_mode; + + // RTX ssrc. Only present if RTX is negotiated. + RTCStatsMember rtx_ssrc; }; // https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* diff --git a/third_party/libwebrtc/api/test/mock_transformable_audio_frame.h b/third_party/libwebrtc/api/test/mock_transformable_audio_frame.h index 9b52e5c57918..680d7a0fdc05 100644 --- a/third_party/libwebrtc/api/test/mock_transformable_audio_frame.h +++ b/third_party/libwebrtc/api/test/mock_transformable_audio_frame.h @@ -37,6 +37,14 @@ class MockTransformableAudioFrame : public TransformableAudioFrameInterface { GetDirection, (), (const, override)); + MOCK_METHOD(absl::optional, + AbsoluteCaptureTimestamp, + (), + (const, override)); + MOCK_METHOD(TransformableAudioFrameInterface::FrameType, + Type, + (), + (const, override)); }; } // namespace webrtc diff --git a/third_party/libwebrtc/api/test/mock_transformable_video_frame.h b/third_party/libwebrtc/api/test/mock_transformable_video_frame.h index eab02d7e009c..749d2bddabce 100644 --- a/third_party/libwebrtc/api/test/mock_transformable_video_frame.h +++ b/third_party/libwebrtc/api/test/mock_transformable_video_frame.h @@ -24,6 +24,7 @@ class MockTransformableVideoFrame MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override)); MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); MOCK_METHOD(void, diff --git a/third_party/libwebrtc/api/test/network_emulation_manager.cc b/third_party/libwebrtc/api/test/network_emulation_manager.cc index 236e2f0e174a..756fe4e7570b 100644 --- a/third_party/libwebrtc/api/test/network_emulation_manager.cc +++ b/third_party/libwebrtc/api/test/network_emulation_manager.cc @@ -82,6 +82,33 @@ NetworkEmulationManager::SimulatedNetworkNode::Builder::packet_queue_length( return *this; } +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder:: + delay_standard_deviation_ms(int delay_standard_deviation_ms) { + config_.delay_standard_deviation_ms = delay_standard_deviation_ms; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::allow_reordering() { + config_.allow_reordering = true; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::avg_burst_loss_length( + int avg_burst_loss_length) { + config_.avg_burst_loss_length = avg_burst_loss_length; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::packet_overhead( + int packet_overhead) { + config_.packet_overhead = packet_overhead; + return *this; +} + NetworkEmulationManager::SimulatedNetworkNode NetworkEmulationManager::SimulatedNetworkNode::Builder::Build( uint64_t random_seed) const { diff --git a/third_party/libwebrtc/api/test/network_emulation_manager.h b/third_party/libwebrtc/api/test/network_emulation_manager.h index bc9279d3067a..6b66d8188afa 100644 --- a/third_party/libwebrtc/api/test/network_emulation_manager.h +++ b/third_party/libwebrtc/api/test/network_emulation_manager.h @@ -184,6 +184,10 @@ class NetworkEmulationManager { Builder& capacity_Mbps(int link_capacity_Mbps); Builder& loss(double loss_rate); Builder& packet_queue_length(int max_queue_length_in_packets); + Builder& delay_standard_deviation_ms(int delay_standard_deviation_ms); + Builder& allow_reordering(); + Builder& avg_burst_loss_length(int avg_burst_loss_length); + Builder& packet_overhead(int packet_overhead); SimulatedNetworkNode Build(uint64_t random_seed = 1) const; SimulatedNetworkNode Build(NetworkEmulationManager* net, uint64_t random_seed = 1) const; diff --git a/third_party/libwebrtc/api/transport/datagram_transport_interface_gn/moz.build b/third_party/libwebrtc/api/transport/datagram_transport_interface_gn/moz.build index 5bd7a14d5ed6..d1d364a13ea1 100644 --- a/third_party/libwebrtc/api/transport/datagram_transport_interface_gn/moz.build +++ b/third_party/libwebrtc/api/transport/datagram_transport_interface_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/transport/field_trial_based_config_gn/moz.build b/third_party/libwebrtc/api/transport/field_trial_based_config_gn/moz.build index 38a5c45915e9..16ae3e2f0daa 100644 --- a/third_party/libwebrtc/api/transport/field_trial_based_config_gn/moz.build +++ b/third_party/libwebrtc/api/transport/field_trial_based_config_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/transport/network_control_gn/moz.build b/third_party/libwebrtc/api/transport/network_control_gn/moz.build index 2a3988f5695d..61b68b20c239 100644 --- a/third_party/libwebrtc/api/transport/network_control_gn/moz.build +++ b/third_party/libwebrtc/api/transport/network_control_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/transport/rtp/rtp_source_gn/moz.build b/third_party/libwebrtc/api/transport/rtp/rtp_source_gn/moz.build index 324f779cb8f0..4a3e5f409700 100644 --- a/third_party/libwebrtc/api/transport/rtp/rtp_source_gn/moz.build +++ b/third_party/libwebrtc/api/transport/rtp/rtp_source_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/transport/stun.cc b/third_party/libwebrtc/api/transport/stun.cc index 1098c6720eed..35a65fd8e856 100644 --- a/third_party/libwebrtc/api/transport/stun.cc +++ b/third_party/libwebrtc/api/transport/stun.cc @@ -722,6 +722,10 @@ StunAttributeValueType StunMessage::GetAttributeValueType(int type) const { return STUN_VALUE_BYTE_STRING; case STUN_ATTR_GOOG_MISC_INFO: return STUN_VALUE_UINT16_LIST; + case STUN_ATTR_GOOG_DELTA: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_GOOG_DELTA_ACK: + return STUN_VALUE_UINT64; default: return STUN_VALUE_UNKNOWN; } diff --git a/third_party/libwebrtc/api/transport_api_gn/moz.build b/third_party/libwebrtc/api/transport_api_gn/moz.build index 77a120da7d27..3c85fd9c0082 100644 --- a/third_party/libwebrtc/api/transport_api_gn/moz.build +++ b/third_party/libwebrtc/api/transport_api_gn/moz.build @@ -57,6 +57,10 @@ if CONFIG["OS_TARGET"] == "Android": DEFINES["__STDC_CONSTANT_MACROS"] = True DEFINES["__STDC_FORMAT_MACROS"] = True + OS_LIBS += [ + "log" + ] + if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_MAC"] = True diff --git a/third_party/libwebrtc/api/uma_metrics.h b/third_party/libwebrtc/api/uma_metrics.h index a63159e8493f..925ba0757654 100644 --- a/third_party/libwebrtc/api/uma_metrics.h +++ b/third_party/libwebrtc/api/uma_metrics.h @@ -119,16 +119,6 @@ enum AddIceCandidateResult { kAddIceCandidateMax }; -// Metric for recording which api surface was used to enable simulcast. -// These values are persisted to logs. Entries should not be renumbered and -// numeric values should never be reused. -enum SimulcastApiVersion { - kSimulcastApiVersionNone = 0, - kSimulcastApiVersionLegacy = 1, - kSimulcastApiVersionSpecCompliant = 2, - kSimulcastApiVersionMax -}; - // Metrics for reporting usage of BUNDLE. // These values are persisted to logs. Entries should not be renumbered and // numeric values should never be reused. diff --git a/third_party/libwebrtc/api/video/BUILD.gn b/third_party/libwebrtc/api/video/BUILD.gn index c46f3d019b0c..ae29d8eaa79f 100644 --- a/third_party/libwebrtc/api/video/BUILD.gn +++ b/third_party/libwebrtc/api/video/BUILD.gn @@ -28,6 +28,7 @@ rtc_library("video_rtp_headers") { deps = [ "..:array_view", + "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", @@ -126,6 +127,7 @@ rtc_source_set("video_frame_type") { visibility = [ "*" ] sources = [ "video_frame_type.h" ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ "../../rtc_base:checks" ] } rtc_source_set("render_resolution") { @@ -155,6 +157,7 @@ rtc_library("encoded_image") { "../../rtc_base:checks", "../../rtc_base:refcount", "../../rtc_base/system:rtc_export", + "../units:timestamp", ] absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/third_party/libwebrtc/api/video/color_space.h b/third_party/libwebrtc/api/video/color_space.h index a7ad86b01695..e491c52e7253 100644 --- a/third_party/libwebrtc/api/video/color_space.h +++ b/third_party/libwebrtc/api/video/color_space.h @@ -101,7 +101,7 @@ class RTC_EXPORT ColorSpace { kInvalid = 0, // Limited Rec. 709 color range with RGB values ranging from 16 to 235. kLimited = 1, - // Full RGB color range with RGB valees from 0 to 255. + // Full RGB color range with RGB values from 0 to 255. kFull = 2, // Range is defined by MatrixCoefficients/TransferCharacteristics. kDerived = 3, diff --git a/third_party/libwebrtc/api/video/encoded_image.cc b/third_party/libwebrtc/api/video/encoded_image.cc index ff61994dee04..09224c3c49b2 100644 --- a/third_party/libwebrtc/api/video/encoded_image.cc +++ b/third_party/libwebrtc/api/video/encoded_image.cc @@ -75,6 +75,11 @@ void EncodedImage::SetEncodeTime(int64_t encode_start_ms, timing_.encode_finish_ms = encode_finish_ms; } +webrtc::Timestamp EncodedImage::CaptureTime() const { + return capture_time_ms_ > 0 ? Timestamp::Millis(capture_time_ms_) + : Timestamp::MinusInfinity(); +} + absl::optional EncodedImage::SpatialLayerFrameSize( int spatial_index) const { RTC_DCHECK_GE(spatial_index, 0); diff --git a/third_party/libwebrtc/api/video/encoded_image.h b/third_party/libwebrtc/api/video/encoded_image.h index 7b2f5c88f083..77ce8b605d6e 100644 --- a/third_party/libwebrtc/api/video/encoded_image.h +++ b/third_party/libwebrtc/api/video/encoded_image.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "api/rtp_packet_infos.h" #include "api/scoped_refptr.h" +#include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/video_codec_constants.h" #include "api/video/video_content_type.h" @@ -87,6 +88,8 @@ class RTC_EXPORT EncodedImage { void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms); + webrtc::Timestamp CaptureTime() const; + int64_t NtpTimeMs() const { return ntp_time_ms_; } // Every simulcast layer (= encoding) has its own encoder and RTP stream. diff --git a/third_party/libwebrtc/api/video/encoded_image_gn/moz.build b/third_party/libwebrtc/api/video/encoded_image_gn/moz.build index c1c76b29d8a4..53a06a6a7840 100644 --- a/third_party/libwebrtc/api/video/encoded_image_gn/moz.build +++ b/third_party/libwebrtc/api/video/encoded_image_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build b/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build index d6cbcd817ba6..f9e5bdd1122b 100644 --- a/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build +++ b/third_party/libwebrtc/api/video/recordable_encoded_frame_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build b/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build index 1712dffdc683..b995b23a983b 100644 --- a/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_bitrate_allocator_factory_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/video_content_type.cc b/third_party/libwebrtc/api/video/video_content_type.cc index 9ba3ece79b6a..79da9ff273fd 100644 --- a/third_party/libwebrtc/api/video/video_content_type.cc +++ b/third_party/libwebrtc/api/video/video_content_type.cc @@ -10,21 +10,7 @@ #include "api/video/video_content_type.h" -// VideoContentType stored as a single byte, which is sent over the network. -// Structure: -// -// 0 1 2 3 4 5 6 7 -// +---------------+ -// |r r e e e s s c| -// -// where: -// r - reserved bits. -// e - 3-bit number of an experiment group counted from 1. 0 means there's no -// experiment ongoing. -// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that -// no simulcast information is set. -// c - content type. 0 means real-time video, 1 means screenshare. -// +#include "rtc_base/checks.h" namespace webrtc { namespace videocontenttypehelpers { @@ -33,57 +19,21 @@ namespace { static constexpr uint8_t kScreenshareBitsSize = 1; static constexpr uint8_t kScreenshareBitsMask = (1u << kScreenshareBitsSize) - 1; - -static constexpr uint8_t kSimulcastShift = 1; -static constexpr uint8_t kSimulcastBitsSize = 2; -static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1) - << kSimulcastShift; // 0b00000110 - -static constexpr uint8_t kExperimentShift = 3; -static constexpr uint8_t kExperimentBitsSize = 3; -static constexpr uint8_t kExperimentBitsMask = - ((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000 - -static constexpr uint8_t kTotalBitsSize = - kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize; } // namespace -bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) { - // Store in bits 2-4. - if (experiment_id >= (1 << kExperimentBitsSize)) - return false; - *content_type = static_cast( - (static_cast(*content_type) & ~kExperimentBitsMask) | - ((experiment_id << kExperimentShift) & kExperimentBitsMask)); - return true; -} - -bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) { - // Store in bits 5-6. - if (simulcast_id >= (1 << kSimulcastBitsSize)) - return false; - *content_type = static_cast( - (static_cast(*content_type) & ~kSimulcastBitsMask) | - ((simulcast_id << kSimulcastShift) & kSimulcastBitsMask)); - return true; -} - -uint8_t GetExperimentId(const VideoContentType& content_type) { - return (static_cast(content_type) & kExperimentBitsMask) >> - kExperimentShift; -} -uint8_t GetSimulcastId(const VideoContentType& content_type) { - return (static_cast(content_type) & kSimulcastBitsMask) >> - kSimulcastShift; -} - bool IsScreenshare(const VideoContentType& content_type) { + // Ensure no bits apart from the screenshare bit is set. + // This CHECK is a temporary measure to detect code that introduces + // values according to old versions. + RTC_CHECK((static_cast(content_type) & !kScreenshareBitsMask) == 0); return (static_cast(content_type) & kScreenshareBitsMask) > 0; } bool IsValidContentType(uint8_t value) { - // Any 6-bit value is allowed. - return value < (1 << kTotalBitsSize); + // Only the screenshare bit is allowed. + // However, due to previous usage of the next 5 bits, we allow + // the lower 6 bits to be set. + return value < (1 << 6); } const char* ToString(const VideoContentType& content_type) { diff --git a/third_party/libwebrtc/api/video/video_content_type.h b/third_party/libwebrtc/api/video/video_content_type.h index 2d38a62366c5..b57420182ca8 100644 --- a/third_party/libwebrtc/api/video/video_content_type.h +++ b/third_party/libwebrtc/api/video/video_content_type.h @@ -15,18 +15,15 @@ namespace webrtc { +// VideoContentType stored as a single byte, which is sent over the network +// in the rtp-hdrext/video-content-type extension. +// Only the lowest bit is used, per the enum. enum class VideoContentType : uint8_t { UNSPECIFIED = 0, SCREENSHARE = 1, }; namespace videocontenttypehelpers { -bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id); -bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id); - -uint8_t GetExperimentId(const VideoContentType& content_type); -uint8_t GetSimulcastId(const VideoContentType& content_type); - bool IsScreenshare(const VideoContentType& content_type); bool IsValidContentType(uint8_t value); diff --git a/third_party/libwebrtc/api/video/video_frame_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_gn/moz.build index 7d21dfb24a71..d9157e5e796b 100644 --- a/third_party/libwebrtc/api/video/video_frame_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_frame_gn/moz.build @@ -137,6 +137,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build index 01eabd0d3249..e1c033c43c29 100644 --- a/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_frame_i010_gn/moz.build @@ -133,6 +133,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build index 48f4a170531a..1adedbe53f37 100644 --- a/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_frame_metadata_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/video_frame_type.h b/third_party/libwebrtc/api/video/video_frame_type.h index 9f0a34c89ef7..3665a80cd8d0 100644 --- a/third_party/libwebrtc/api/video/video_frame_type.h +++ b/third_party/libwebrtc/api/video/video_frame_type.h @@ -12,6 +12,7 @@ #define API_VIDEO_VIDEO_FRAME_TYPE_H_ #include "absl/strings/string_view.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -25,15 +26,19 @@ enum class VideoFrameType { inline constexpr absl::string_view VideoFrameTypeToString( VideoFrameType frame_type) { - if (frame_type == VideoFrameType::kEmptyFrame) { - return "empty"; - } - if (frame_type == VideoFrameType::kVideoFrameKey) { - return "key"; - } - if (frame_type == VideoFrameType::kVideoFrameDelta) { - return "delta"; + switch (frame_type) { + case VideoFrameType::kEmptyFrame: + return "empty"; + case VideoFrameType::kVideoFrameKey: + return "key"; + case VideoFrameType::kVideoFrameDelta: + return "delta"; } +// Mozilla: +// gcc-8 complains about a constexpr function calling a non-constexpr ditto. +#if defined(__clang__) || (defined(__GNUC__) && __GNUC__ >= 9) + RTC_CHECK_NOTREACHED(); +#endif return ""; } diff --git a/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build b/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build index 46adc4af5f34..a2a86f471693 100644 --- a/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_frame_type_gn/moz.build @@ -53,6 +53,10 @@ if CONFIG["OS_TARGET"] == "Android": DEFINES["__STDC_CONSTANT_MACROS"] = True DEFINES["__STDC_FORMAT_MACROS"] = True + OS_LIBS += [ + "log" + ] + if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_MAC"] = True diff --git a/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build b/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build index ebe08ca9fdf2..498b48088003 100644 --- a/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_rtp_headers_gn/moz.build @@ -129,6 +129,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build b/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build index 96a4350fec9b..e9d0fdfa24df 100644 --- a/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build +++ b/third_party/libwebrtc/api/video/video_stream_encoder_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc b/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc index 02b43ba4f29a..5844ca0e32ec 100644 --- a/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc +++ b/third_party/libwebrtc/api/video_codecs/h264_profile_level_id.cc @@ -238,7 +238,8 @@ absl::optional H264ProfileLevelIdToString( } char str[7]; - snprintf(str, 7u, "%s%02x", profile_idc_iop_string, profile_level_id.level); + snprintf(str, 7u, "%s%02x", profile_idc_iop_string, + static_cast(profile_level_id.level)); return {str}; } diff --git a/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build b/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build index b834c671ea9e..0a2207ced912 100644 --- a/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build +++ b/third_party/libwebrtc/api/video_codecs/video_codecs_api_gn/moz.build @@ -136,6 +136,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/audio/channel_receive.cc b/third_party/libwebrtc/audio/channel_receive.cc index fc7f906e491c..b3aac7a1fbf1 100644 --- a/third_party/libwebrtc/audio/channel_receive.cc +++ b/third_party/libwebrtc/audio/channel_receive.cc @@ -70,7 +70,8 @@ acm2::AcmReceiver::Config AcmConfig( rtc::scoped_refptr decoder_factory, absl::optional codec_pair_id, size_t jitter_buffer_max_packets, - bool jitter_buffer_fast_playout) { + bool jitter_buffer_fast_playout, + int jitter_buffer_min_delay_ms) { acm2::AcmReceiver::Config acm_config; acm_config.neteq_factory = neteq_factory; acm_config.decoder_factory = decoder_factory; @@ -78,6 +79,7 @@ acm2::AcmReceiver::Config AcmConfig( acm_config.neteq_config.max_packets_in_buffer = jitter_buffer_max_packets; acm_config.neteq_config.enable_fast_accelerate = jitter_buffer_fast_playout; acm_config.neteq_config.enable_muted_state = true; + acm_config.neteq_config.min_delay_ms = jitter_buffer_min_delay_ms; return acm_config; } @@ -293,7 +295,8 @@ class ChannelReceive : public ChannelReceiveInterface, webrtc::AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_ RTC_GUARDED_BY(worker_thread_checker_); - webrtc::CaptureClockOffsetUpdater capture_clock_offset_updater_; + webrtc::CaptureClockOffsetUpdater capture_clock_offset_updater_ + RTC_GUARDED_BY(ts_stats_lock_); rtc::scoped_refptr frame_transformer_delegate_; @@ -475,6 +478,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( for (auto& packet_info : audio_frame->packet_infos_) { absl::optional local_capture_clock_offset_q32x32; if (packet_info.absolute_capture_time().has_value()) { + MutexLock lock(&ts_stats_lock_); local_capture_clock_offset_q32x32 = capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( packet_info.absolute_capture_time() @@ -551,7 +555,8 @@ ChannelReceive::ChannelReceive( decoder_factory, codec_pair_id, jitter_buffer_max_packets, - jitter_buffer_fast_playout)), + jitter_buffer_fast_playout, + jitter_buffer_min_delay_ms)), _outputAudioLevel(), clock_(clock), ntp_estimator_(clock), diff --git a/third_party/libwebrtc/audio/channel_receive_frame_transformer_delegate.cc b/third_party/libwebrtc/audio/channel_receive_frame_transformer_delegate.cc index 8f817c6fe7d8..3cc9a0120438 100644 --- a/third_party/libwebrtc/audio/channel_receive_frame_transformer_delegate.cc +++ b/third_party/libwebrtc/audio/channel_receive_frame_transformer_delegate.cc @@ -50,6 +50,18 @@ class TransformableIncomingAudioFrame return header_.sequenceNumber; } + absl::optional AbsoluteCaptureTimestamp() const override { + // This could be extracted from received header extensions + extrapolation, + // if required in future, eg for being able to re-send received frames. + return absl::nullopt; + } + const RTPHeader& Header() const { return header_; } + + FrameType Type() const override { + return header_.extension.voiceActivity ? FrameType::kAudioFrameSpeech + : FrameType::kAudioFrameCN; + } + private: rtc::Buffer payload_; RTPHeader header_; @@ -106,6 +118,6 @@ void ChannelReceiveFrameTransformerDelegate::ReceiveFrame( auto* transformed_frame = static_cast(frame.get()); receive_frame_callback_(transformed_frame->GetData(), - transformed_frame->GetHeader()); + transformed_frame->Header()); } } // namespace webrtc diff --git a/third_party/libwebrtc/audio/channel_send_frame_transformer_delegate.cc b/third_party/libwebrtc/audio/channel_send_frame_transformer_delegate.cc index 93aaa4324d43..8c9725681bc0 100644 --- a/third_party/libwebrtc/audio/channel_send_frame_transformer_delegate.cc +++ b/third_party/libwebrtc/audio/channel_send_frame_transformer_delegate.cc @@ -15,6 +15,36 @@ namespace webrtc { namespace { +using IfaceFrameType = TransformableAudioFrameInterface::FrameType; + +IfaceFrameType InternalFrameTypeToInterfaceFrameType( + const AudioFrameType frame_type) { + switch (frame_type) { + case AudioFrameType::kEmptyFrame: + return IfaceFrameType::kEmptyFrame; + case AudioFrameType::kAudioFrameSpeech: + return IfaceFrameType::kAudioFrameSpeech; + case AudioFrameType::kAudioFrameCN: + return IfaceFrameType::kAudioFrameCN; + } + RTC_DCHECK_NOTREACHED(); + return IfaceFrameType::kEmptyFrame; +} + +AudioFrameType InterfaceFrameTypeToInternalFrameType( + const IfaceFrameType frame_type) { + switch (frame_type) { + case IfaceFrameType::kEmptyFrame: + return AudioFrameType::kEmptyFrame; + case IfaceFrameType::kAudioFrameSpeech: + return AudioFrameType::kAudioFrameSpeech; + case IfaceFrameType::kAudioFrameCN: + return AudioFrameType::kAudioFrameCN; + } + RTC_DCHECK_NOTREACHED(); + return AudioFrameType::kEmptyFrame; +} + class TransformableOutgoingAudioFrame : public TransformableAudioFrameInterface { public: @@ -44,11 +74,11 @@ class TransformableOutgoingAudioFrame uint32_t GetStartTimestamp() const { return rtp_start_timestamp_; } uint32_t GetSsrc() const override { return ssrc_; } - AudioFrameType GetFrameType() const { return frame_type_; } - uint8_t GetPayloadType() const override { return payload_type_; } - int64_t GetAbsoluteCaptureTimestampMs() const { - return absolute_capture_timestamp_ms_; + IfaceFrameType Type() const override { + return InternalFrameTypeToInterfaceFrameType(frame_type_); } + + uint8_t GetPayloadType() const override { return payload_type_; } Direction GetDirection() const override { return Direction::kSender; } // TODO(crbug.com/1453226): Remove once GetHeader() is removed from @@ -67,6 +97,10 @@ class TransformableOutgoingAudioFrame rtp_timestamp_ = timestamp - rtp_start_timestamp_; } + absl::optional AbsoluteCaptureTimestamp() const override { + return absolute_capture_timestamp_ms_; + } + private: AudioFrameType frame_type_; uint8_t payload_type_; @@ -140,24 +174,21 @@ void ChannelSendFrameTransformerDelegate::SendFrame( return; auto* transformed_frame = static_cast(frame.get()); - send_frame_callback_(transformed_frame->GetFrameType(), - transformed_frame->GetPayloadType(), - transformed_frame->GetTimestamp() - - transformed_frame->GetStartTimestamp(), - transformed_frame->GetData(), - transformed_frame->GetAbsoluteCaptureTimestampMs()); + send_frame_callback_( + InterfaceFrameTypeToInternalFrameType(transformed_frame->Type()), + transformed_frame->GetPayloadType(), + transformed_frame->GetTimestamp() - + transformed_frame->GetStartTimestamp(), + transformed_frame->GetData(), + *transformed_frame->AbsoluteCaptureTimestamp()); } std::unique_ptr CloneSenderAudioFrame( TransformableAudioFrameInterface* original) { - AudioFrameType audio_frame_type = - original->GetHeader().extension.voiceActivity - ? AudioFrameType::kAudioFrameSpeech - : AudioFrameType::kAudioFrameCN; - // TODO(crbug.com/webrtc/14949): Ensure the correct timestamps are passed. return std::make_unique( - audio_frame_type, original->GetPayloadType(), original->GetTimestamp(), + InterfaceFrameTypeToInternalFrameType(original->Type()), + original->GetPayloadType(), original->GetTimestamp(), /*rtp_start_timestamp=*/0u, original->GetData().data(), original->GetData().size(), original->GetTimestamp(), original->GetSsrc()); diff --git a/third_party/libwebrtc/call/BUILD.gn b/third_party/libwebrtc/call/BUILD.gn index 20e9241a8303..161cce992fd2 100644 --- a/third_party/libwebrtc/call/BUILD.gn +++ b/third_party/libwebrtc/call/BUILD.gn @@ -71,6 +71,7 @@ rtc_library("call_interfaces") { "../modules/audio_processing", "../modules/audio_processing:api", "../modules/audio_processing:audio_processing_statistics", + "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:audio_format_to_string", "../rtc_base:checks", diff --git a/third_party/libwebrtc/call/audio_sender_interface_gn/moz.build b/third_party/libwebrtc/call/audio_sender_interface_gn/moz.build index a76d7380394a..c236d2d56fdb 100644 --- a/third_party/libwebrtc/call/audio_sender_interface_gn/moz.build +++ b/third_party/libwebrtc/call/audio_sender_interface_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/call/bitrate_estimator_tests.cc b/third_party/libwebrtc/call/bitrate_estimator_tests.cc index f44cdfd5099b..f17a037ed297 100644 --- a/third_party/libwebrtc/call/bitrate_estimator_tests.cc +++ b/third_party/libwebrtc/call/bitrate_estimator_tests.cc @@ -186,6 +186,7 @@ class BitrateEstimatorTest : public test::CallTest { test::VideoTestConstants::kDefaultFramerate, *test->task_queue_factory_); frame_generator_capturer_->Init(); + frame_generator_capturer_->Start(); send_stream_->SetSource(frame_generator_capturer_.get(), DegradationPreference::MAINTAIN_FRAMERATE); send_stream_->Start(); diff --git a/third_party/libwebrtc/call/call.cc b/third_party/libwebrtc/call/call.cc index 0421a21ee3b1..bca607204239 100644 --- a/third_party/libwebrtc/call/call.cc +++ b/third_party/libwebrtc/call/call.cc @@ -1274,7 +1274,7 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { // on a ProcessThread. This is alright as is since we forward the call to // implementations that either just do a PostTask or use locking. video_send_delay_stats_->OnSentPacket(sent_packet.packet_id, - clock_->TimeInMilliseconds()); + clock_->CurrentTime()); transport_send_->OnSentPacket(sent_packet); } diff --git a/third_party/libwebrtc/call/call_interfaces_gn/moz.build b/third_party/libwebrtc/call/call_interfaces_gn/moz.build index 2a5fa2a72c2f..ab6f9566ac93 100644 --- a/third_party/libwebrtc/call/call_interfaces_gn/moz.build +++ b/third_party/libwebrtc/call/call_interfaces_gn/moz.build @@ -64,6 +64,7 @@ if CONFIG["OS_TARGET"] == "Android": DEFINES["__STDC_FORMAT_MACROS"] = True OS_LIBS += [ + "GLESv2", "log" ] diff --git a/third_party/libwebrtc/call/fake_network_pipe.cc b/third_party/libwebrtc/call/fake_network_pipe.cc index 8879927a5bdb..f5f3658022f7 100644 --- a/third_party/libwebrtc/call/fake_network_pipe.cc +++ b/third_party/libwebrtc/call/fake_network_pipe.cc @@ -356,10 +356,12 @@ void FakeNetworkPipe::DeliverNetworkPacket(NetworkPacket* packet) { return; } if (packet->is_rtcp()) { - transport->SendRtcp(packet->data(), packet->data_length()); + transport->SendRtcp( + rtc::MakeArrayView(packet->data(), packet->data_length())); } else { - transport->SendRtp(packet->data(), packet->data_length(), - packet->packet_options()); + transport->SendRtp( + rtc::MakeArrayView(packet->data(), packet->data_length()), + packet->packet_options()); } } else if (receiver_) { int64_t packet_time_us = packet->packet_time_us().value_or(-1); diff --git a/third_party/libwebrtc/call/flexfec_receive_stream.h b/third_party/libwebrtc/call/flexfec_receive_stream.h index 4f6fe44afabe..c5ac0f9fb6e0 100644 --- a/third_party/libwebrtc/call/flexfec_receive_stream.h +++ b/third_party/libwebrtc/call/flexfec_receive_stream.h @@ -21,6 +21,7 @@ #include "api/rtp_parameters.h" #include "call/receive_stream.h" #include "call/rtp_packet_sink_interface.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" namespace webrtc { @@ -69,6 +70,8 @@ class FlexfecReceiveStream : public RtpPacketSinkInterface, // Called to change the payload type after initialization. virtual void SetPayloadType(int payload_type) = 0; virtual int payload_type() const = 0; + + virtual const ReceiveStatistics* GetStats() const = 0; }; } // namespace webrtc diff --git a/third_party/libwebrtc/call/flexfec_receive_stream_impl.cc b/third_party/libwebrtc/call/flexfec_receive_stream_impl.cc index cbfbe55297ee..e20f1b6ac528 100644 --- a/third_party/libwebrtc/call/flexfec_receive_stream_impl.cc +++ b/third_party/libwebrtc/call/flexfec_receive_stream_impl.cc @@ -21,7 +21,6 @@ #include "api/rtp_parameters.h" #include "call/rtp_stream_receiver_controller_interface.h" #include "modules/rtp_rtcp/include/flexfec_receiver.h" -#include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/third_party/libwebrtc/call/flexfec_receive_stream_impl.h b/third_party/libwebrtc/call/flexfec_receive_stream_impl.h index 73cdff8b73bf..5ce2cb6f0ed9 100644 --- a/third_party/libwebrtc/call/flexfec_receive_stream_impl.h +++ b/third_party/libwebrtc/call/flexfec_receive_stream_impl.h @@ -70,6 +70,10 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { rtp_rtcp_->SetRTCPStatus(mode); } + const ReceiveStatistics* GetStats() const override { + return rtp_receive_statistics_.get(); + } + private: RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_; diff --git a/third_party/libwebrtc/call/rtp_payload_params.cc b/third_party/libwebrtc/call/rtp_payload_params.cc index f0347bc74ea2..e9bfb30ae377 100644 --- a/third_party/libwebrtc/call/rtp_payload_params.cc +++ b/third_party/libwebrtc/call/rtp_payload_params.cc @@ -542,7 +542,8 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, const auto& vp9_header = absl::get(rtp_video_header.video_type_header); const int num_spatial_layers = kMaxSimulatedSpatialLayers; - const int num_active_spatial_layers = vp9_header.num_spatial_layers; + const int first_active_spatial_id = vp9_header.first_active_layer; + const int last_active_spatial_id = vp9_header.num_spatial_layers - 1; const int num_temporal_layers = kMaxTemporalStreams; static_assert(num_spatial_layers <= RtpGenericFrameDescriptor::kMaxSpatialLayers); @@ -556,10 +557,16 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, int temporal_index = vp9_header.temporal_idx != kNoTemporalIdx ? vp9_header.temporal_idx : 0; - if (spatial_index >= num_spatial_layers || - temporal_index >= num_temporal_layers || - num_active_spatial_layers > num_spatial_layers) { + if (!(temporal_index < num_temporal_layers && + first_active_spatial_id <= spatial_index && + spatial_index <= last_active_spatial_id && + last_active_spatial_id < num_spatial_layers)) { // Prefer to generate no generic layering than an inconsistent one. + RTC_LOG(LS_ERROR) << "Inconsistent layer id sid=" << spatial_index + << ",tid=" << temporal_index + << " in VP9 header. Active spatial ids: [" + << first_active_spatial_id << "," + << last_active_spatial_id << "]"; return; } @@ -642,28 +649,39 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, } result.active_decode_targets = - ((uint32_t{1} << num_temporal_layers * num_active_spatial_layers) - 1); + ((uint32_t{1} << num_temporal_layers * (last_active_spatial_id + 1)) - + 1) ^ + ((uint32_t{1} << num_temporal_layers * first_active_spatial_id) - 1); // Calculate chains, asuming chain includes all frames with temporal_id = 0 if (!vp9_header.inter_pic_predicted && !vp9_header.inter_layer_predicted) { // Assume frames without dependencies also reset chains. - for (int sid = spatial_index; sid < num_spatial_layers; ++sid) { + for (int sid = spatial_index; sid <= last_active_spatial_id; ++sid) { chain_last_frame_id_[sid] = -1; } } result.chain_diffs.resize(num_spatial_layers, 0); - for (int sid = 0; sid < num_active_spatial_layers; ++sid) { + for (int sid = first_active_spatial_id; sid <= last_active_spatial_id; + ++sid) { if (chain_last_frame_id_[sid] == -1) { result.chain_diffs[sid] = 0; continue; } - result.chain_diffs[sid] = shared_frame_id - chain_last_frame_id_[sid]; + int64_t chain_diff = shared_frame_id - chain_last_frame_id_[sid]; + if (chain_diff >= 256) { + RTC_LOG(LS_ERROR) + << "Too many frames since last VP9 T0 frame for spatial layer #" + << sid << " at frame#" << shared_frame_id; + chain_last_frame_id_[sid] = -1; + chain_diff = 0; + } + result.chain_diffs[sid] = chain_diff; } if (temporal_index == 0) { chain_last_frame_id_[spatial_index] = shared_frame_id; if (!vp9_header.non_ref_for_inter_layer_pred) { - for (int sid = spatial_index + 1; sid < num_spatial_layers; ++sid) { + for (int sid = spatial_index + 1; sid <= last_active_spatial_id; ++sid) { chain_last_frame_id_[sid] = shared_frame_id; } } diff --git a/third_party/libwebrtc/call/rtp_payload_params_unittest.cc b/third_party/libwebrtc/call/rtp_payload_params_unittest.cc index 8481b5f93f21..45f00061ee78 100644 --- a/third_party/libwebrtc/call/rtp_payload_params_unittest.cc +++ b/third_party/libwebrtc/call/rtp_payload_params_unittest.cc @@ -1136,6 +1136,170 @@ TEST(RtpPayloadParamsVp9ToGenericTest, EXPECT_EQ(headers[2].generic->chain_diffs[1], 2); } +TEST(RtpPayloadParamsVp9ToGenericTest, ChangeFirstActiveLayer) { + // S2 4---5 + // + // S1 1---3 7 + // + // S0 0---2 6 + RtpPayloadState state; + RtpPayloadParams params(/*ssrc=*/123, &state, FieldTrialBasedConfig()); + + EncodedImage image; + CodecSpecificInfo info; + info.codecType = kVideoCodecVP9; + info.codecSpecific.VP9.flexible_mode = true; + info.codecSpecific.VP9.first_frame_in_picture = true; + info.codecSpecific.VP9.inter_layer_predicted = false; + info.codecSpecific.VP9.non_ref_for_inter_layer_pred = true; + info.codecSpecific.VP9.first_frame_in_picture = true; + info.end_of_picture = true; + + RTPVideoHeader headers[8]; + // S0 key frame. + info.codecSpecific.VP9.num_spatial_layers = 2; + info.codecSpecific.VP9.first_active_layer = 0; + image._frameType = VideoFrameType::kVideoFrameKey; + image.SetSpatialIndex(0); + info.codecSpecific.VP9.inter_pic_predicted = false; + info.codecSpecific.VP9.num_ref_pics = 0; + headers[0] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/0); + + // S1 key frame. + image._frameType = VideoFrameType::kVideoFrameKey; + image.SetSpatialIndex(1); + info.codecSpecific.VP9.inter_pic_predicted = false; + info.codecSpecific.VP9.num_ref_pics = 0; + headers[1] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/1); + + // S0 delta frame. + image._frameType = VideoFrameType::kVideoFrameDelta; + image.SetSpatialIndex(0); + info.codecSpecific.VP9.inter_pic_predicted = true; + info.codecSpecific.VP9.num_ref_pics = 1; + info.codecSpecific.VP9.p_diff[0] = 1; + headers[2] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/2); + + // S1 delta frame. + image._frameType = VideoFrameType::kVideoFrameDelta; + info.codecSpecific.VP9.inter_pic_predicted = true; + info.codecSpecific.VP9.num_ref_pics = 1; + info.codecSpecific.VP9.p_diff[0] = 1; + headers[3] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/3); + + // S2 key frame + info.codecSpecific.VP9.num_spatial_layers = 3; + info.codecSpecific.VP9.first_active_layer = 2; + image._frameType = VideoFrameType::kVideoFrameKey; + image.SetSpatialIndex(2); + info.codecSpecific.VP9.inter_pic_predicted = false; + info.codecSpecific.VP9.num_ref_pics = 0; + headers[4] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/4); + + // S2 delta frame. + image._frameType = VideoFrameType::kVideoFrameDelta; + info.codecSpecific.VP9.inter_pic_predicted = true; + info.codecSpecific.VP9.num_ref_pics = 1; + info.codecSpecific.VP9.p_diff[0] = 1; + headers[5] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/5); + + // S0 key frame after pause. + info.codecSpecific.VP9.num_spatial_layers = 2; + info.codecSpecific.VP9.first_active_layer = 0; + image._frameType = VideoFrameType::kVideoFrameKey; + image.SetSpatialIndex(0); + info.codecSpecific.VP9.inter_pic_predicted = false; + info.codecSpecific.VP9.num_ref_pics = 0; + headers[6] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/6); + + // S1 key frame. + image._frameType = VideoFrameType::kVideoFrameKey; + image.SetSpatialIndex(1); + info.codecSpecific.VP9.inter_pic_predicted = false; + info.codecSpecific.VP9.num_ref_pics = 0; + headers[7] = params.GetRtpVideoHeader(image, &info, /*shared_frame_id=*/7); + + ASSERT_TRUE(headers[0].generic); + int num_decode_targets = headers[0].generic->decode_target_indications.size(); + int num_chains = headers[0].generic->chain_diffs.size(); + // Rely on implementation detail there are always kMaxTemporalStreams temporal + // layers. In particular assume Decode Target#0 matches layer S0T0, and + // Decode Target#kMaxTemporalStreams matches layer S1T0. + static constexpr int kS0T0 = 0; + static constexpr int kS1T0 = kMaxTemporalStreams; + static constexpr int kS2T0 = 2 * kMaxTemporalStreams; + ASSERT_GE(num_decode_targets, 3); + ASSERT_GE(num_chains, 3); + + for (int frame_idx = 0; frame_idx < int{std::size(headers)}; ++frame_idx) { + const RTPVideoHeader& header = headers[frame_idx]; + ASSERT_TRUE(header.generic); + EXPECT_EQ(header.generic->temporal_index, 0); + ASSERT_THAT(header.generic->decode_target_indications, + SizeIs(num_decode_targets)); + ASSERT_THAT(header.generic->chain_diffs, SizeIs(num_chains)); + EXPECT_EQ(header.generic->frame_id, frame_idx); + } + + EXPECT_TRUE(headers[0].generic->active_decode_targets[kS0T0]); + EXPECT_TRUE(headers[0].generic->active_decode_targets[kS1T0]); + EXPECT_FALSE(headers[0].generic->active_decode_targets[kS2T0]); + + EXPECT_FALSE(headers[4].generic->active_decode_targets[kS0T0]); + EXPECT_FALSE(headers[4].generic->active_decode_targets[kS1T0]); + EXPECT_TRUE(headers[4].generic->active_decode_targets[kS2T0]); + + EXPECT_EQ(headers[1].generic->active_decode_targets, + headers[0].generic->active_decode_targets); + + EXPECT_EQ(headers[2].generic->active_decode_targets, + headers[0].generic->active_decode_targets); + + EXPECT_EQ(headers[3].generic->active_decode_targets, + headers[0].generic->active_decode_targets); + + EXPECT_EQ(headers[5].generic->active_decode_targets, + headers[4].generic->active_decode_targets); + + EXPECT_EQ(headers[6].generic->active_decode_targets, + headers[0].generic->active_decode_targets); + + EXPECT_EQ(headers[7].generic->active_decode_targets, + headers[0].generic->active_decode_targets); + + EXPECT_EQ(headers[0].generic->chain_diffs[0], 0); + EXPECT_EQ(headers[0].generic->chain_diffs[1], 0); + EXPECT_EQ(headers[0].generic->chain_diffs[2], 0); + + EXPECT_EQ(headers[1].generic->chain_diffs[0], 1); + EXPECT_EQ(headers[1].generic->chain_diffs[1], 0); + EXPECT_EQ(headers[1].generic->chain_diffs[2], 0); + + EXPECT_EQ(headers[2].generic->chain_diffs[0], 2); + EXPECT_EQ(headers[2].generic->chain_diffs[1], 1); + EXPECT_EQ(headers[2].generic->chain_diffs[2], 0); + + EXPECT_EQ(headers[3].generic->chain_diffs[0], 1); + EXPECT_EQ(headers[3].generic->chain_diffs[1], 2); + EXPECT_EQ(headers[3].generic->chain_diffs[2], 0); + + EXPECT_EQ(headers[4].generic->chain_diffs[0], 0); + EXPECT_EQ(headers[4].generic->chain_diffs[1], 0); + EXPECT_EQ(headers[4].generic->chain_diffs[2], 0); + + EXPECT_EQ(headers[5].generic->chain_diffs[0], 0); + EXPECT_EQ(headers[5].generic->chain_diffs[1], 0); + EXPECT_EQ(headers[5].generic->chain_diffs[2], 1); + + EXPECT_EQ(headers[6].generic->chain_diffs[0], 0); + EXPECT_EQ(headers[6].generic->chain_diffs[1], 0); + EXPECT_EQ(headers[6].generic->chain_diffs[2], 0); + + EXPECT_EQ(headers[7].generic->chain_diffs[0], 1); + EXPECT_EQ(headers[7].generic->chain_diffs[1], 0); + EXPECT_EQ(headers[7].generic->chain_diffs[2], 0); +} + class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { public: enum LayerSync { kNoSync, kSync }; diff --git a/third_party/libwebrtc/call/rtp_video_sender.cc b/third_party/libwebrtc/call/rtp_video_sender.cc index 35288457f906..b7616d1a1fa7 100644 --- a/third_party/libwebrtc/call/rtp_video_sender.cc +++ b/third_party/libwebrtc/call/rtp_video_sender.cc @@ -21,6 +21,7 @@ #include "api/array_view.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/field_trial_based_config.h" +#include "api/units/time_delta.h" #include "api/video_codecs/video_codec.h" #include "call/rtp_transport_controller_send_interface.h" #include "modules/pacing/packet_router.h" @@ -497,7 +498,7 @@ void RtpVideoSender::SetActiveModules(const std::vector& active_modules) { void RtpVideoSender::SetActiveModulesLocked( const std::vector& active_modules) { RTC_DCHECK_RUN_ON(&transport_checker_); - RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); + RTC_CHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; for (size_t i = 0; i < active_modules.size(); ++i) { if (active_modules[i]) { @@ -591,10 +592,10 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( return Result(Result::ERROR_SEND_FAILED); } - absl::optional expected_retransmission_time_ms; + TimeDelta expected_retransmission_time = TimeDelta::PlusInfinity(); if (encoded_image.RetransmissionAllowed()) { - expected_retransmission_time_ms = - rtp_streams_[simulcast_index].rtp_rtcp->ExpectedRetransmissionTimeMs(); + expected_retransmission_time = + rtp_streams_[simulcast_index].rtp_rtcp->ExpectedRetransmissionTime(); } if (IsFirstFrameOfACodedVideoSequence(encoded_image, codec_specific_info)) { @@ -623,7 +624,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image, params_[simulcast_index].GetRtpVideoHeader( encoded_image, codec_specific_info, shared_frame_id_), - expected_retransmission_time_ms); + expected_retransmission_time); if (frame_count_observer_) { FrameCounts& counts = frame_counts_[simulcast_index]; if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { diff --git a/third_party/libwebrtc/call/version.cc b/third_party/libwebrtc/call/version.cc index 2d1db90079ba..b2f262f1e5c4 100644 --- a/third_party/libwebrtc/call/version.cc +++ b/third_party/libwebrtc/call/version.cc @@ -13,7 +13,7 @@ namespace webrtc { // The timestamp is always in UTC. -const char* const kSourceTimestamp = "WebRTC source stamp 2023-06-20T04:03:02"; +const char* const kSourceTimestamp = "WebRTC source stamp 2023-08-08T04:02:52"; void LoadWebRTCVersionInRegister() { // Using volatile to instruct the compiler to not optimize `p` away even diff --git a/third_party/libwebrtc/call/video_receive_stream.h b/third_party/libwebrtc/call/video_receive_stream.h index aafe47275df5..0dc7dee71d7f 100644 --- a/third_party/libwebrtc/call/video_receive_stream.h +++ b/third_party/libwebrtc/call/video_receive_stream.h @@ -89,7 +89,7 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { uint32_t frames_rendered = 0; // Decoder stats. - std::string decoder_implementation_name = "unknown"; + absl::optional decoder_implementation_name; absl::optional power_efficient_decoder; FrameCounts frame_counts; int decode_ms = 0; diff --git a/third_party/libwebrtc/call/video_send_stream.h b/third_party/libwebrtc/call/video_send_stream.h index de18fc7b92e9..1a0261be1b2e 100644 --- a/third_party/libwebrtc/call/video_send_stream.h +++ b/third_party/libwebrtc/call/video_send_stream.h @@ -101,7 +101,7 @@ class VideoSendStream { Stats(); ~Stats(); std::string ToString(int64_t time_ms) const; - std::string encoder_implementation_name = "unknown"; + absl::optional encoder_implementation_name; double input_frame_rate = 0; int encode_frame_rate = 0; int avg_encode_time_ms = 0; diff --git a/third_party/libwebrtc/common_audio/third_party/ooura/README.chromium b/third_party/libwebrtc/common_audio/third_party/ooura/README.chromium index 9df2ddb5e289..459df13042c6 100644 --- a/third_party/libwebrtc/common_audio/third_party/ooura/README.chromium +++ b/third_party/libwebrtc/common_audio/third_party/ooura/README.chromium @@ -6,6 +6,7 @@ Date: 2018-06-19 License: Custome license License File: LICENSE Security Critical: yes +Shipped: yes Description: This is a package to calculate Discrete Fourier/Cosine/Sine Transforms of diff --git a/third_party/libwebrtc/common_audio/third_party/spl_sqrt_floor/README.chromium b/third_party/libwebrtc/common_audio/third_party/spl_sqrt_floor/README.chromium index b226490e85f8..b2c4309bd645 100644 --- a/third_party/libwebrtc/common_audio/third_party/spl_sqrt_floor/README.chromium +++ b/third_party/libwebrtc/common_audio/third_party/spl_sqrt_floor/README.chromium @@ -6,6 +6,7 @@ Date: 2018-03-22 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: Sqrt routine, originally was posted to the USENET group comp.sys.arm on diff --git a/third_party/libwebrtc/common_video/common_video_gn/moz.build b/third_party/libwebrtc/common_video/common_video_gn/moz.build index d6fc7c3632a4..565886609808 100644 --- a/third_party/libwebrtc/common_video/common_video_gn/moz.build +++ b/third_party/libwebrtc/common_video/common_video_gn/moz.build @@ -138,6 +138,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/docs/bug-reporting.md b/third_party/libwebrtc/docs/bug-reporting.md index c21186a9b7dd..7948cda8b7f3 100644 --- a/third_party/libwebrtc/docs/bug-reporting.md +++ b/third_party/libwebrtc/docs/bug-reporting.md @@ -22,9 +22,10 @@ Anyone with a [Google account][1] can file bugs in the Chrome and WebRTC tracker * Identify which bug tracker to use: * If you're hitting a problem in Chrome, file the bug using the - [the Chromium issue wizard](https://chromiumbugs.appspot.com/?token=0) + [the Chromium issue wizard](https://crbug.com/new) Choose "Web Developer" and "API", then fill out the form. For the component choose * Blink>GetUserMedia for camera/microphone issues + * Blink>GetDisplayMedia for screen capture issues * Blink>MediaRecording for issues with the MediaRecorder API * Blink>WebRTC for issues with the RTCPeerConnection API This ensures the right people will look at your bug. @@ -51,10 +52,10 @@ Anyone with a [Google account][1] can file bugs in the Chrome and WebRTC tracker * Camera and microphone model and version (if applicable) - * For Chrome audio and video device issues, please run the tests at - . After the tests finish running, click the bug - icon at the top, download the report, and attach the report to the issue - tracker. + * Try reproducing with the minimal samples at + https://webrtc.github.io/samples/src/content/getusermedia/audio/ + and + https://webrtc.github.io/samples/src/content/getusermedia/gum/ * Web site URL @@ -76,17 +77,19 @@ Anyone with a [Google account][1] can file bugs in the Chrome and WebRTC tracker * For **connectivity** issues on Chrome, ensure **chrome://webrtc-internals** is open in another tab before starting the call and while the call is in progress, - * expand the **Create Dump** section, + * expand the **Create a WebRTC-Internals dump** section, - * click the **Download the PeerConnection updates and stats data** button. + * click the **Download the webrtc-internals dump** button. You will be prompted to save the dump to your local machine. Please - attach that dump to the bug report. + attach that dump to the bug report. You can inspect the dump and + remove any information you consider personally identifiable such as + IP addresses. * For **audio quality** issues on Chrome, while the call is in progress, * please open **chrome://webrtc-internals** in another tab, - * expand the **Create Dump** section, + * expand the **Create a WebRTC-Internals dump** section, * fill in the **Enable diagnostic audio recordings** checkbox. You will be prompted to save the recording to your local machine. After ending the diff --git a/third_party/libwebrtc/docs/native-code/development/contributing.md b/third_party/libwebrtc/docs/native-code/development/contributing.md index 762e9a9c3665..918948166ec6 100644 --- a/third_party/libwebrtc/docs/native-code/development/contributing.md +++ b/third_party/libwebrtc/docs/native-code/development/contributing.md @@ -38,6 +38,7 @@ You will not have to repeat the above. After all that, you’re ready to upload: [AUTHORS]: https://webrtc.googlesource.com/src/+/refs/heads/main/AUTHORS [new-password]: https://webrtc.googlesource.com/new-password [discuss-webrtc]: https://groups.google.com/forum/#!forum/discuss-webrtc +[Chromium recommendations for code reviews]: https://chromium.googlesource.com/chromium/src/+/main/docs/cl_tips.md ### Uploading your First Patch Now that you have your account set up, you can do the actual upload: diff --git a/third_party/libwebrtc/examples/BUILD.gn b/third_party/libwebrtc/examples/BUILD.gn index 9fa5d6f6187c..6ae2b71d854d 100644 --- a/third_party/libwebrtc/examples/BUILD.gn +++ b/third_party/libwebrtc/examples/BUILD.gn @@ -352,7 +352,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:videocapture_objc", "../sdk:videocodec_objc", ] - if (rtc_ios_macos_use_opengl_rendering) { + if (rtc_ios_use_opengl_rendering) { deps += [ "../sdk:opengl_ui_objc" ] } @@ -509,7 +509,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:videotoolbox_objc", ] - if (rtc_ios_macos_use_opengl_rendering) { + if (rtc_ios_use_opengl_rendering) { deps += [ "../sdk:opengl_ui_objc" ] } } @@ -548,7 +548,6 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:helpers_objc", "../sdk:mediaconstraints_objc", "../sdk:metal_objc", - "../sdk:opengl_ui_objc", "../sdk:peerconnectionfactory_base_objc", "../sdk:peerconnectionfactory_base_objc", "../sdk:videocapture_objc", diff --git a/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn b/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn index e4c48a2b3612..9aba1fbd92bd 100644 --- a/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn +++ b/third_party/libwebrtc/examples/androidnativeapi/BUILD.gn @@ -15,7 +15,6 @@ if (is_android) { deps = [ ":resources", - "//modules/audio_device:audio_device_java", "//rtc_base:base_java", "//sdk/android:camera_java", "//sdk/android:surfaceviewrenderer_java", diff --git a/third_party/libwebrtc/examples/androidvoip/BUILD.gn b/third_party/libwebrtc/examples/androidvoip/BUILD.gn index 3d5186f279e2..b4d53f81be57 100644 --- a/third_party/libwebrtc/examples/androidvoip/BUILD.gn +++ b/third_party/libwebrtc/examples/androidvoip/BUILD.gn @@ -24,7 +24,6 @@ if (is_android) { deps = [ ":resources", - "//modules/audio_device:audio_device_java", "//rtc_base:base_java", "//sdk/android:base_java", "//sdk/android:java_audio_device_module_java", diff --git a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m index 8904187215fd..982fa56b4316 100644 --- a/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m +++ b/third_party/libwebrtc/examples/objc/AppRTCMobile/mac/APPRTCViewController.m @@ -14,7 +14,6 @@ #import "sdk/objc/api/peerconnection/RTCVideoTrack.h" #import "sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h" -#import "sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h" #import "ARDAppClient.h" #import "ARDCaptureController.h" @@ -45,7 +44,7 @@ static NSUInteger const kBottomViewHeight = 200; @end -@interface APPRTCMainView () +@interface APPRTCMainView () @end @implementation APPRTCMainView { NSScrollView* _scrollView; @@ -178,9 +177,9 @@ static NSUInteger const kBottomViewHeight = 200; [self setNeedsUpdateConstraints:YES]; } -#pragma mark - RTC_OBJC_TYPE(RTCNSGLVideoViewDelegate) +#pragma mark - RTCVideoViewDelegate -- (void)videoView:(RTC_OBJC_TYPE(RTCNSGLVideoView) *)videoView didChangeVideoSize:(NSSize)size { +- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { if (videoView == _remoteVideoView) { _remoteVideoSize = size; } else if (videoView == _localVideoView) { @@ -216,38 +215,8 @@ static NSUInteger const kBottomViewHeight = 200; [_scrollView setDocumentView:_logView]; [self addSubview:_scrollView]; -// NOTE (daniela): Ignoring Clang diagonstic here. -// We're performing run time check to make sure class is available on runtime. -// If not we're providing sensible default. -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wpartial-availability" - if ([RTC_OBJC_TYPE(RTCMTLNSVideoView) class] && - [RTC_OBJC_TYPE(RTCMTLNSVideoView) isMetalAvailable]) { - _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; - _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; - } -#pragma clang diagnostic pop - if (_remoteVideoView == nil) { - NSOpenGLPixelFormatAttribute attributes[] = { - NSOpenGLPFADoubleBuffer, - NSOpenGLPFADepthSize, 24, - NSOpenGLPFAOpenGLProfile, - NSOpenGLProfileVersion3_2Core, - 0 - }; - NSOpenGLPixelFormat* pixelFormat = - [[NSOpenGLPixelFormat alloc] initWithAttributes:attributes]; - - RTC_OBJC_TYPE(RTCNSGLVideoView)* remote = - [[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; - remote.delegate = self; - _remoteVideoView = remote; - - RTC_OBJC_TYPE(RTCNSGLVideoView)* local = - [[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; - local.delegate = self; - _localVideoView = local; - } + _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; + _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; [_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO]; [self addSubview:_remoteVideoView]; @@ -269,7 +238,7 @@ static NSUInteger const kBottomViewHeight = 200; _connectButton = [[NSButton alloc] initWithFrame:NSZeroRect]; [_connectButton setTranslatesAutoresizingMaskIntoConstraints:NO]; _connectButton.title = @"Start call"; - _connectButton.bezelStyle = NSRoundedBezelStyle; + _connectButton.bezelStyle = NSBezelStyleSmallSquare; _connectButton.target = self; _connectButton.action = @selector(startCall:); [_actionItemsView addSubview:_connectButton]; @@ -277,7 +246,7 @@ static NSUInteger const kBottomViewHeight = 200; _loopbackButton = [[NSButton alloc] initWithFrame:NSZeroRect]; [_loopbackButton setTranslatesAutoresizingMaskIntoConstraints:NO]; _loopbackButton.title = @"Loopback"; - [_loopbackButton setButtonType:NSSwitchButton]; + [_loopbackButton setButtonType:NSButtonTypeSwitch]; [_actionItemsView addSubview:_loopbackButton]; } diff --git a/third_party/libwebrtc/infra/config/config.star b/third_party/libwebrtc/infra/config/config.star index 35ed6ee20139..f7a1827d2115 100755 --- a/third_party/libwebrtc/infra/config/config.star +++ b/third_party/libwebrtc/infra/config/config.star @@ -693,14 +693,12 @@ ci_builder("Android32", "Android|arm|rel") try_builder("android_arm_rel") ci_builder("Android32 Builder arm", "Android|arm|size", perf_cat = "Android|arm|Builder|", prioritized = True) try_builder("android_compile_arm_rel") -perf_builder("Perf Android32 (O Pixel2)", "Android|arm|Tester|O Pixel2", triggered_by = ["Android32 Builder arm"]) perf_builder("Perf Android32 (R Pixel5)", "Android|arm|Tester|R Pixel5", triggered_by = ["Android32 Builder arm"]) try_builder("android_compile_arm64_dbg", cq = None) try_builder("android_arm64_dbg", cq = None) ci_builder("Android64", "Android|arm64|rel") try_builder("android_arm64_rel") ci_builder("Android64 Builder arm64", "Android|arm64|size", perf_cat = "Android|arm64|Builder|", prioritized = True) -perf_builder("Perf Android64 (O Pixel2)", "Android|arm64|Tester|O Pixel2", triggered_by = ["Android64 Builder arm64"]) perf_builder("Perf Android64 (R Pixel5)", "Android|arm64|Tester|R Pixel5", triggered_by = ["Android64 Builder arm64"]) try_builder("android_compile_arm64_rel") ci_builder("Android64 Builder x64 (dbg)", "Android|x64|dbg") diff --git a/third_party/libwebrtc/infra/config/cr-buildbucket.cfg b/third_party/libwebrtc/infra/config/cr-buildbucket.cfg index 6555031b5dbd..3f387cf506d2 100644 --- a/third_party/libwebrtc/infra/config/cr-buildbucket.cfg +++ b/third_party/libwebrtc/infra/config/cr-buildbucket.cfg @@ -2278,52 +2278,6 @@ buckets { } } } - builders { - name: "Perf Android32 (O Pixel2)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:8" - dimensions: "os:Linux" - dimensions: "pool:luci.webrtc.perf" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc.perf",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 10800 - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "perf_test_results" - test_results {} - } - } - } builders { name: "Perf Android32 (R Pixel5)" swarming_host: "chromium-swarm.appspot.com" @@ -2370,52 +2324,6 @@ buckets { } } } - builders { - name: "Perf Android64 (O Pixel2)" - swarming_host: "chromium-swarm.appspot.com" - swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:8" - dimensions: "os:Linux" - dimensions: "pool:luci.webrtc.perf" - exe { - cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" - cipd_version: "refs/heads/main" - cmd: "luciexe" - } - properties: - '{' - ' "$build/reclient": {' - ' "instance": "rbe-webrtc-trusted",' - ' "metrics_project": "chromium-reclient-metrics"' - ' },' - ' "$recipe_engine/resultdb/test_presentation": {' - ' "column_keys": [],' - ' "grouping_keys": [' - ' "status",' - ' "v.test_suite"' - ' ]' - ' },' - ' "builder_group": "client.webrtc.perf",' - ' "recipe": "webrtc/standalone"' - '}' - priority: 30 - execution_timeout_secs: 10800 - build_numbers: YES - service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - experiments { - key: "luci.recipes.use_python3" - value: 100 - } - resultdb { - enable: true - bq_exports { - project: "webrtc-ci" - dataset: "resultdb" - table: "perf_test_results" - test_results {} - } - } - } builders { name: "Perf Android64 (R Pixel5)" swarming_host: "chromium-swarm.appspot.com" diff --git a/third_party/libwebrtc/infra/config/luci-milo.cfg b/third_party/libwebrtc/infra/config/luci-milo.cfg index 191ad18051d1..c3319ddb636c 100644 --- a/third_party/libwebrtc/infra/config/luci-milo.cfg +++ b/third_party/libwebrtc/infra/config/luci-milo.cfg @@ -282,11 +282,6 @@ consoles { name: "buildbucket/luci.webrtc.perf/Android32 Builder arm" category: "Android|arm|Builder" } - builders { - name: "buildbucket/luci.webrtc.perf/Perf Android32 (O Pixel2)" - category: "Android|arm|Tester" - short_name: "O Pixel2" - } builders { name: "buildbucket/luci.webrtc.perf/Perf Android32 (R Pixel5)" category: "Android|arm|Tester" @@ -296,11 +291,6 @@ consoles { name: "buildbucket/luci.webrtc.perf/Android64 Builder arm64" category: "Android|arm64|Builder" } - builders { - name: "buildbucket/luci.webrtc.perf/Perf Android64 (O Pixel2)" - category: "Android|arm64|Tester" - short_name: "O Pixel2" - } builders { name: "buildbucket/luci.webrtc.perf/Perf Android64 (R Pixel5)" category: "Android|arm64|Tester" diff --git a/third_party/libwebrtc/infra/config/luci-notify.cfg b/third_party/libwebrtc/infra/config/luci-notify.cfg index 123d0eab099b..0c22a488fa40 100644 --- a/third_party/libwebrtc/infra/config/luci-notify.cfg +++ b/third_party/libwebrtc/infra/config/luci-notify.cfg @@ -1207,28 +1207,6 @@ notifiers { failed_step_regexp_exclude: ".*\\(experimental\\).*" } } -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "perf" - name: "Perf Android32 (O Pixel2)" - repository: "https://webrtc.googlesource.com/src" - } -} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -1251,28 +1229,6 @@ notifiers { repository: "https://webrtc.googlesource.com/src" } } -notifiers { - notifications { - on_new_status: INFRA_FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "infra_failure" - } - notifications { - on_new_status: FAILURE - email { - recipients: "webrtc-troopers-robots@google.com" - } - template: "build_failure" - notify_blamelist {} - } - builders { - bucket: "perf" - name: "Perf Android64 (O Pixel2)" - repository: "https://webrtc.googlesource.com/src" - } -} notifiers { notifications { on_new_status: INFRA_FAILURE diff --git a/third_party/libwebrtc/infra/config/luci-scheduler.cfg b/third_party/libwebrtc/infra/config/luci-scheduler.cfg index f879b73a1bcf..c1bafa7a6622 100644 --- a/third_party/libwebrtc/infra/config/luci-scheduler.cfg +++ b/third_party/libwebrtc/infra/config/luci-scheduler.cfg @@ -365,24 +365,6 @@ job { builder: "MacArm64 Builder" } } -job { - id: "Perf Android32 (O Pixel2)" - realm: "perf" - acls { - role: TRIGGERER - granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - } - acl_sets: "perf" - triggering_policy { - kind: LOGARITHMIC_BATCHING - log_base: 1.7 - } - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "perf" - builder: "Perf Android32 (O Pixel2)" - } -} job { id: "Perf Android32 (R Pixel5)" realm: "perf" @@ -401,24 +383,6 @@ job { builder: "Perf Android32 (R Pixel5)" } } -job { - id: "Perf Android64 (O Pixel2)" - realm: "perf" - acls { - role: TRIGGERER - granted_to: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" - } - acl_sets: "perf" - triggering_policy { - kind: LOGARITHMIC_BATCHING - log_base: 1.7 - } - buildbucket { - server: "cr-buildbucket.appspot.com" - bucket: "perf" - builder: "Perf Android64 (O Pixel2)" - } -} job { id: "Perf Android64 (R Pixel5)" realm: "perf" diff --git a/third_party/libwebrtc/infra/config/project.cfg b/third_party/libwebrtc/infra/config/project.cfg index 640686739d96..07d86e6eed4c 100644 --- a/third_party/libwebrtc/infra/config/project.cfg +++ b/third_party/libwebrtc/infra/config/project.cfg @@ -7,7 +7,7 @@ name: "webrtc" access: "group:all" lucicfg { - version: "1.39.4" + version: "1.39.10" package_dir: "." config_dir: "." entry_point: "config.star" diff --git a/third_party/libwebrtc/infra/config/realms.cfg b/third_party/libwebrtc/infra/config/realms.cfg index d21fe3542dcb..6d5937a89b24 100644 --- a/third_party/libwebrtc/infra/config/realms.cfg +++ b/third_party/libwebrtc/infra/config/realms.cfg @@ -111,9 +111,7 @@ realms { conditions { restrict { attribute: "scheduler.job.name" - values: "Perf Android32 (O Pixel2)" values: "Perf Android32 (R Pixel5)" - values: "Perf Android64 (O Pixel2)" values: "Perf Android64 (R Pixel5)" values: "Perf Fuchsia" values: "Perf Linux Bionic" diff --git a/third_party/libwebrtc/infra/specs/client.webrtc.json b/third_party/libwebrtc/infra/specs/client.webrtc.json index 3560c13666f7..c9fd0ee9b41f 100644 --- a/third_party/libwebrtc/infra/specs/client.webrtc.json +++ b/third_party/libwebrtc/infra/specs/client.webrtc.json @@ -12,7 +12,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -34,7 +33,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -56,7 +54,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -78,7 +75,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -100,7 +96,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -122,7 +117,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -144,7 +138,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -167,7 +160,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -190,29 +182,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -235,7 +204,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -257,7 +225,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -279,7 +246,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -301,7 +267,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -324,7 +289,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -346,7 +310,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -369,7 +332,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -391,7 +353,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -413,7 +374,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -435,7 +395,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -458,7 +417,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -480,7 +438,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -496,12 +453,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -511,7 +470,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -528,7 +486,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -550,7 +507,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -572,7 +528,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -594,7 +549,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -616,7 +570,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -638,7 +591,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -660,7 +612,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -683,7 +634,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -706,29 +656,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -751,7 +678,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -773,7 +699,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -795,7 +720,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -817,7 +741,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -840,7 +763,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -862,7 +784,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -885,7 +806,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -907,7 +827,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -929,7 +848,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -951,7 +869,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -974,7 +891,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -996,7 +912,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1012,12 +927,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -1027,7 +944,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1044,7 +960,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1074,7 +989,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1096,7 +1010,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1118,7 +1031,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1140,7 +1052,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1162,7 +1073,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1184,7 +1094,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1206,7 +1115,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1229,7 +1137,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1252,29 +1159,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1297,7 +1181,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1319,7 +1202,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1341,7 +1223,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1363,7 +1244,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1386,7 +1266,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1408,7 +1287,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1431,7 +1309,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1453,7 +1330,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1475,7 +1351,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1497,7 +1372,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1520,7 +1394,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1542,7 +1415,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1558,12 +1430,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -1573,7 +1447,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1590,7 +1463,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1612,7 +1484,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1634,7 +1505,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1656,7 +1526,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1678,7 +1547,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1700,7 +1568,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1722,7 +1589,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1745,7 +1611,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1768,29 +1633,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1813,7 +1655,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1835,7 +1676,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1857,7 +1697,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1879,7 +1718,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1902,7 +1740,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1924,7 +1761,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1947,7 +1783,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1969,7 +1804,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1991,7 +1825,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2013,7 +1846,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2036,7 +1868,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2058,7 +1889,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2074,12 +1904,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -2089,7 +1921,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -2117,7 +1948,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2144,7 +1974,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2171,7 +2000,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2198,7 +2026,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2225,7 +2052,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2252,7 +2078,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2279,7 +2104,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2307,7 +2131,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2334,7 +2157,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2362,7 +2184,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2389,7 +2210,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2413,7 +2233,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2438,7 +2257,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2458,7 +2276,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2478,7 +2295,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2498,7 +2314,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2518,7 +2333,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2539,7 +2353,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2550,26 +2363,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -2580,7 +2373,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2601,7 +2393,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2621,7 +2412,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2641,7 +2431,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2661,7 +2450,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2682,7 +2470,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2702,7 +2489,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2722,7 +2508,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2743,7 +2528,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2763,7 +2547,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2783,7 +2566,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2803,7 +2585,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2824,7 +2605,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2844,7 +2624,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2868,7 +2647,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2888,7 +2666,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2908,7 +2685,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2928,7 +2704,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2948,7 +2723,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2969,7 +2743,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2980,26 +2753,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-20.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -3010,7 +2763,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3031,7 +2783,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3051,7 +2802,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3071,7 +2821,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3091,7 +2840,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3112,7 +2860,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3132,7 +2879,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3153,7 +2899,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3173,7 +2918,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3193,7 +2937,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3213,7 +2956,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3234,7 +2976,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3254,7 +2995,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3278,7 +3018,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3298,7 +3037,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3318,7 +3056,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3338,7 +3075,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3358,7 +3094,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3379,7 +3114,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3390,26 +3124,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -3420,7 +3134,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3441,7 +3154,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3461,7 +3173,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3481,7 +3192,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3501,7 +3211,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3522,7 +3231,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3542,7 +3250,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3563,7 +3270,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3583,7 +3289,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3603,7 +3308,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3623,7 +3327,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3644,7 +3347,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3664,7 +3366,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3688,7 +3389,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3708,7 +3408,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3728,7 +3427,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3748,7 +3446,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3768,7 +3465,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3789,7 +3485,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3800,26 +3495,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -3830,7 +3505,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3851,7 +3525,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3871,7 +3544,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3891,7 +3563,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3911,7 +3582,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3932,7 +3602,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3952,7 +3621,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3972,7 +3640,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -3993,7 +3660,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4013,7 +3679,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4033,7 +3698,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4053,7 +3717,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4074,7 +3737,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4094,7 +3756,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4118,7 +3779,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4138,7 +3798,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4158,7 +3817,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4178,7 +3836,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4198,7 +3855,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4219,7 +3875,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4230,26 +3885,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -4260,7 +3895,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4281,7 +3915,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4301,7 +3934,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4321,7 +3953,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4341,7 +3972,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4362,7 +3992,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4382,7 +4011,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4402,7 +4030,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4423,7 +4050,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4443,7 +4069,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4463,7 +4088,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4483,7 +4107,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4504,7 +4127,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4524,7 +4146,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4548,7 +4169,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4568,7 +4188,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4588,7 +4207,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4608,7 +4226,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4628,7 +4245,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4649,7 +4265,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4660,26 +4275,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -4690,7 +4285,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4711,7 +4305,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4731,7 +4324,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4751,7 +4343,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4771,7 +4362,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4792,7 +4382,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4812,7 +4401,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4833,7 +4421,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4853,7 +4440,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4873,7 +4459,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4893,7 +4478,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4914,7 +4498,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4934,7 +4517,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4959,7 +4541,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4979,7 +4560,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -4999,7 +4579,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5019,7 +4598,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5039,7 +4617,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5060,7 +4637,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5071,26 +4647,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -5101,7 +4657,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5122,7 +4677,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5142,7 +4696,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5162,7 +4715,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5182,7 +4734,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5203,7 +4754,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5223,7 +4773,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5244,7 +4793,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5264,7 +4812,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5284,7 +4831,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5304,7 +4850,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5325,7 +4870,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5345,7 +4889,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5371,7 +4914,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5391,7 +4933,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5411,7 +4952,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5431,7 +4971,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5451,7 +4990,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5472,7 +5010,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5483,26 +5020,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -5513,7 +5030,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5534,7 +5050,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5554,7 +5069,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5574,7 +5088,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5594,7 +5107,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5615,7 +5127,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5635,7 +5146,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5655,7 +5165,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5676,7 +5185,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5696,7 +5204,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5716,7 +5223,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5736,7 +5242,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5757,7 +5262,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5777,7 +5281,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5802,7 +5305,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5822,7 +5324,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5842,7 +5343,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5862,7 +5362,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5882,7 +5381,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5903,7 +5401,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5914,26 +5411,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -5944,7 +5421,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5965,7 +5441,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5985,7 +5460,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6005,7 +5479,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6025,7 +5498,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6046,7 +5518,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6066,7 +5537,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6086,7 +5556,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6107,7 +5576,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6127,7 +5595,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6147,7 +5614,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6167,7 +5633,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6188,7 +5653,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6209,7 +5673,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6229,7 +5692,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6255,7 +5717,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6276,7 +5737,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6297,7 +5757,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6318,7 +5777,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6339,7 +5797,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6361,7 +5818,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6373,27 +5829,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -6404,7 +5839,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6426,7 +5860,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6447,7 +5880,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6468,7 +5900,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6489,7 +5920,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6511,7 +5941,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6532,7 +5961,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6554,7 +5982,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6575,7 +6002,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6596,7 +6022,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6617,7 +6042,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6639,7 +6063,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6660,7 +6083,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6686,7 +6108,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6707,7 +6128,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6728,7 +6148,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6749,7 +6168,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6770,7 +6188,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6792,7 +6209,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6804,27 +6220,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -6835,7 +6230,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6857,7 +6251,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6878,7 +6271,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6899,7 +6291,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6920,7 +6311,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6942,7 +6332,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6963,7 +6352,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -6985,7 +6373,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -7006,7 +6393,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -7027,7 +6413,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -7048,7 +6433,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -7070,7 +6454,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -7091,7 +6474,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -7116,7 +6498,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7136,7 +6517,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7156,7 +6536,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7176,7 +6555,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7196,7 +6574,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7217,7 +6594,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7228,26 +6604,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -7258,7 +6614,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7279,7 +6634,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7299,7 +6653,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7319,7 +6672,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7339,7 +6691,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7360,7 +6711,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7380,7 +6730,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7401,7 +6750,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7421,7 +6769,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7441,7 +6788,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7461,7 +6807,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7482,7 +6827,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7503,7 +6847,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7523,7 +6866,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7547,7 +6889,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7567,7 +6908,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7587,7 +6927,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7607,7 +6946,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7627,7 +6965,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7648,7 +6985,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7659,26 +6995,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -7689,7 +7005,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7710,7 +7025,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7730,7 +7044,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7750,7 +7063,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7770,7 +7082,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7791,7 +7102,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7811,7 +7121,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7832,7 +7141,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7852,7 +7160,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7872,7 +7179,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7892,7 +7198,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7913,7 +7218,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7933,7 +7237,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -7958,7 +7261,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7985,7 +7287,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8005,7 +7306,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8025,7 +7325,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8045,7 +7344,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8065,7 +7363,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8086,7 +7383,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8097,26 +7393,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -8127,7 +7403,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8148,7 +7423,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8168,7 +7442,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8188,7 +7461,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8208,7 +7480,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8229,7 +7500,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8249,7 +7519,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8270,7 +7539,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8290,7 +7558,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8310,7 +7577,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8330,7 +7596,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8351,7 +7616,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8371,7 +7635,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8395,7 +7658,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8415,7 +7677,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8435,7 +7696,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8455,7 +7715,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8475,7 +7734,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8496,7 +7754,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8507,26 +7764,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -8537,7 +7774,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8558,7 +7794,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8578,7 +7813,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8598,7 +7832,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8618,7 +7851,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8639,7 +7871,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8659,7 +7890,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8680,7 +7910,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8700,7 +7929,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8720,7 +7948,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8740,7 +7967,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8761,7 +7987,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8781,7 +8006,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8805,7 +8029,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8825,7 +8048,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8845,7 +8067,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8865,7 +8086,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8885,7 +8105,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8906,7 +8125,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8917,26 +8135,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -8947,7 +8145,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8968,7 +8165,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8988,7 +8184,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9008,7 +8203,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9028,7 +8222,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9049,7 +8242,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9069,7 +8261,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9090,7 +8281,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9110,7 +8300,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9130,7 +8319,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9150,7 +8338,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9171,7 +8358,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9191,7 +8377,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9215,7 +8400,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9235,7 +8419,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9255,7 +8438,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9275,7 +8457,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9295,7 +8476,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9316,7 +8496,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9327,26 +8506,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -9357,7 +8516,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9378,7 +8536,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9398,7 +8555,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9418,7 +8574,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9438,7 +8593,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9459,7 +8613,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9479,7 +8632,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9500,7 +8652,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9520,7 +8671,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9540,7 +8690,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9560,7 +8709,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9581,7 +8729,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9602,7 +8749,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9622,7 +8768,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9659,12 +8804,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -9711,12 +8855,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -9763,12 +8906,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -9814,12 +8956,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -9865,12 +9006,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -9916,12 +9056,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -9967,12 +9106,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10018,12 +9156,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10069,12 +9206,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10120,12 +9256,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10171,12 +9306,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10222,12 +9356,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10273,12 +9406,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10324,12 +9456,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10375,12 +9506,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10426,12 +9556,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10478,12 +9607,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10530,12 +9658,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10582,12 +9709,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10635,12 +9761,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10688,12 +9813,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10741,12 +9865,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10792,12 +9915,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10843,12 +9965,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10894,12 +10015,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10945,12 +10065,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -10996,12 +10115,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11047,12 +10165,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11098,12 +10215,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11149,12 +10265,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11200,12 +10315,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11252,12 +10366,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11304,12 +10417,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11357,12 +10469,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11409,12 +10520,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11461,12 +10571,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11513,12 +10622,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11565,12 +10673,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11617,12 +10724,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11668,12 +10774,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11721,12 +10826,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11774,12 +10878,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11827,12 +10930,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11878,12 +10980,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11929,12 +11030,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -11980,12 +11080,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12031,12 +11130,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12082,12 +11180,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12133,12 +11230,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12184,12 +11280,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12235,12 +11330,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12286,12 +11380,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12337,12 +11430,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12388,12 +11480,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12439,12 +11530,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12491,12 +11581,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12543,12 +11632,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12595,12 +11683,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12646,12 +11733,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12697,12 +11783,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12748,12 +11833,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12799,12 +11883,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -12850,12 +11933,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ diff --git a/third_party/libwebrtc/infra/specs/client.webrtc.perf.json b/third_party/libwebrtc/infra/specs/client.webrtc.perf.json index 9194fcf9463f..74393b560a05 100644 --- a/third_party/libwebrtc/infra/specs/client.webrtc.perf.json +++ b/third_party/libwebrtc/infra/specs/client.webrtc.perf.json @@ -1,81 +1,6 @@ { "AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {}, "AAAAA2 See generate_buildbot_json.py to make changes": {}, - "Perf Android32 (O Pixel2)": { - "gtest_tests": [ - { - "args": [ - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" - ], - "merge": { - "args": [ - "--test-suite", - "video_codec_perf_tests" - ], - "script": "//tools_webrtc/perf/process_perf_results.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "gce": "0", - "os": "Android", - "pool": "WebRTC-perf" - } - ], - "expiration": 10800, - "hard_timeout": 10800, - "idempotent": false, - "io_timeout": 10800, - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "video_codec_perf_tests", - "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" - }, - { - "args": [ - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", - "--nologs" - ], - "merge": { - "args": [ - "--test-suite", - "webrtc_perf_tests" - ], - "script": "//tools_webrtc/perf/process_perf_results.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "gce": "0", - "os": "Android", - "pool": "WebRTC-perf" - } - ], - "expiration": 10800, - "hard_timeout": 10800, - "idempotent": false, - "io_timeout": 10800, - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "webrtc_perf_tests", - "test_id_prefix": "ninja://:webrtc_perf_tests/" - } - ] - }, "Perf Android32 (R Pixel5)": { "gtest_tests": [ { @@ -94,7 +19,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -130,7 +54,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -151,81 +74,6 @@ } ] }, - "Perf Android64 (O Pixel2)": { - "gtest_tests": [ - { - "args": [ - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" - ], - "merge": { - "args": [ - "--test-suite", - "video_codec_perf_tests" - ], - "script": "//tools_webrtc/perf/process_perf_results.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "gce": "0", - "os": "Android", - "pool": "WebRTC-perf" - } - ], - "expiration": 10800, - "hard_timeout": 10800, - "idempotent": false, - "io_timeout": 10800, - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "video_codec_perf_tests", - "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" - }, - { - "args": [ - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", - "--nologs" - ], - "merge": { - "args": [ - "--test-suite", - "webrtc_perf_tests" - ], - "script": "//tools_webrtc/perf/process_perf_results.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "gce": "0", - "os": "Android", - "pool": "WebRTC-perf" - } - ], - "expiration": 10800, - "hard_timeout": 10800, - "idempotent": false, - "io_timeout": 10800, - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "webrtc_perf_tests", - "test_id_prefix": "ninja://:webrtc_perf_tests/" - } - ] - }, "Perf Android64 (R Pixel5)": { "gtest_tests": [ { @@ -244,7 +92,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -280,7 +127,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -325,7 +171,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -364,7 +209,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -402,7 +246,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "gce": "0", @@ -438,7 +281,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "gce": "0", @@ -475,7 +317,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -512,7 +353,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -550,7 +390,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -587,7 +426,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -625,7 +463,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "gce": "0", @@ -661,7 +498,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "gce": "0", diff --git a/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl b/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl index 508d80dab704..7e31965b4e7d 100644 --- a/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl +++ b/third_party/libwebrtc/infra/specs/gn_isolate_map.pyl @@ -71,10 +71,6 @@ "label": "//modules:modules_unittests", "type": "windowed_test_launcher", }, - "peer_connection_mediachannel_split_unittests": { - "label": "//pc:peer_connection_mediachannel_split_unittests", - "type": "console_test_launcher", - }, "peerconnection_unittests": { "label": "//pc:peerconnection_unittests", "type": "console_test_launcher", diff --git a/third_party/libwebrtc/infra/specs/internal.client.webrtc.json b/third_party/libwebrtc/infra/specs/internal.client.webrtc.json index 9fcb9225e771..ba1d10911269 100644 --- a/third_party/libwebrtc/infra/specs/internal.client.webrtc.json +++ b/third_party/libwebrtc/infra/specs/internal.client.webrtc.json @@ -7,7 +7,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -21,12 +21,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -37,7 +36,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -49,7 +48,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -63,12 +62,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -79,7 +77,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -92,7 +90,7 @@ "--readline-timeout=1200", "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -106,12 +104,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -124,7 +121,7 @@ "io_timeout": 7200, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -137,7 +134,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -151,12 +148,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -167,7 +163,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -180,7 +176,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -194,12 +190,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -210,7 +205,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -222,7 +217,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -236,12 +231,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -252,7 +246,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -264,7 +258,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -278,12 +272,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -294,7 +287,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -306,7 +299,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -320,12 +313,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -336,7 +328,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -348,7 +340,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -362,12 +354,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -378,7 +369,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -390,7 +381,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -404,12 +395,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -420,7 +410,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -432,7 +422,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -446,12 +436,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -462,7 +451,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -499,12 +488,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -535,7 +523,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -549,12 +537,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -565,7 +552,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -577,7 +564,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -591,12 +578,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -607,7 +593,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -620,7 +606,7 @@ "--readline-timeout=1200", "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -634,12 +620,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -652,7 +637,7 @@ "io_timeout": 7200, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -665,7 +650,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -679,12 +664,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -695,7 +679,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -708,7 +692,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -722,12 +706,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -738,7 +721,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -750,7 +733,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -764,12 +747,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -780,7 +762,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -792,7 +774,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -806,12 +788,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -822,7 +803,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -834,7 +815,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -848,12 +829,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -864,7 +844,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -876,7 +856,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -890,12 +870,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -906,7 +885,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -918,7 +897,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -932,12 +911,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -948,7 +926,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], @@ -960,7 +938,7 @@ "args": [ "--xctest", "--xcode-build-version", - "13c100", + "14c18", "--out-dir", "${ISOLATED_OUTDIR}" ], @@ -974,12 +952,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -990,7 +967,7 @@ ], "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_14c18", "path": "Xcode.app" } ], diff --git a/third_party/libwebrtc/infra/specs/mixins.pyl b/third_party/libwebrtc/infra/specs/mixins.pyl index 2767f67e5f24..06a763b16bfa 100644 --- a/third_party/libwebrtc/infra/specs/mixins.pyl +++ b/third_party/libwebrtc/infra/specs/mixins.pyl @@ -52,13 +52,11 @@ } }, 'fuchsia-gtest-output': { - '$mixin_append': { - 'args': [ - '--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json', - '--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json', - '--test-arg=--undefok=test_launcher_summary_output' - ] - } + 'args': [ + '--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json', + '--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json', + '--test-arg=--undefok=test_launcher_summary_output' + ] }, 'has_native_resultdb_integration': { 'resultdb': { @@ -85,33 +83,27 @@ } }, 'ios_runtime_cache_14_5': { - '$mixin_append': { - 'swarming': { - 'named_caches': [{ - 'name': 'runtime_ios_14_5', - 'path': 'Runtime-ios-14.5' - }] - } + 'swarming': { + 'named_caches': [{ + 'name': 'runtime_ios_14_5', + 'path': 'Runtime-ios-14.5' + }] } }, 'ios_runtime_cache_15_5': { - '$mixin_append': { - 'swarming': { - 'named_caches': [{ - 'name': 'runtime_ios_15_5', - 'path': 'Runtime-ios-15.5' - }] - } + 'swarming': { + 'named_caches': [{ + 'name': 'runtime_ios_15_5', + 'path': 'Runtime-ios-15.5' + }] } }, 'ios_runtime_cache_16_2': { - '$mixin_append': { - 'swarming': { - 'named_caches': [{ - 'name': 'runtime_ios_16_2', - 'path': 'Runtime-ios-16.2' - }] - } + 'swarming': { + 'named_caches': [{ + 'name': 'runtime_ios_16_2', + 'path': 'Runtime-ios-16.2' + }] } }, 'isolate_profile_data': { @@ -174,21 +166,17 @@ 'location': '.', 'revision': - 'git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118' + 'git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb' }] } }, 'out_dir_arg': { - '$mixin_append': { - 'args': ['--out-dir', '${ISOLATED_OUTDIR}'] - } + 'args': ['--out-dir', '${ISOLATED_OUTDIR}'] }, 'perf-output': { - '$mixin_append': { - 'args': [ - '--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb' - ] - } + 'args': [ + '--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb' + ] }, 'perf-pool': { 'swarming': { @@ -219,14 +207,10 @@ 'script': '//tools_webrtc/perf/process_perf_results.py', 'args': ['--test-suite', 'webrtc_perf_tests'] }, - '$mixin_append': { - 'args': ['--nologs'] - } + 'args': ['--nologs'] }, 'quick-perf-tests': { - '$mixin_append': { - 'args': ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] - } + 'args': ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] }, 'redfin': { 'swarming': { @@ -237,9 +221,7 @@ } }, 'resultdb-gtest-json-format': { - '$mixin_append': { - 'args': ['--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json'] - }, + 'args': ['--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json'], 'resultdb': { 'result_format': 'gtest_json', 'result_file': '${ISOLATED_OUTDIR}/gtest_output.json' @@ -286,9 +268,7 @@ } }, 'webrtc-xctest': { - '$mixin_append': { - 'args': ['--xctest'] - } + 'args': ['--xctest'] }, 'win10': { 'swarming': { @@ -312,9 +292,7 @@ } }, 'xcode_13_main': { - '$mixin_append': { - 'args': ['--xcode-build-version', '13c100'] - }, + 'args': ['--xcode-build-version', '13c100'], 'swarming': { 'named_caches': [{ 'name': 'xcode_ios_13c100', @@ -323,9 +301,7 @@ } }, 'xcode_14_main': { - '$mixin_append': { - 'args': ['--xcode-build-version', '14c18'] - }, + 'args': ['--xcode-build-version', '14c18'], 'swarming': { 'named_caches': [{ 'name': 'xcode_ios_14c18', @@ -334,8 +310,6 @@ } }, 'xcode_parallelization': { - '$mixin_append': { - 'args': ['--xcode-parallelization'] - } + 'args': ['--xcode-parallelization'] } } diff --git a/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl b/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl index e5810b97d090..691aa139ea35 100644 --- a/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl +++ b/third_party/libwebrtc/infra/specs/mixins_webrtc.pyl @@ -38,13 +38,11 @@ # Hack to use the test-launcher-summary-output flag + emulator folders for gtest-output # but it's currently the only way to get the file out of the emulator. 'fuchsia-gtest-output': { - '$mixin_append': { - 'args': [ - '--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json', - '--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json', - '--test-arg=--undefok=test_launcher_summary_output' - ], - }, + 'args': [ + '--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json', + '--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json', + '--test-arg=--undefok=test_launcher_summary_output' + ], }, 'ios-device-15.7': { 'swarming': { @@ -65,13 +63,11 @@ }, }, 'ios_runtime_cache_14_5': { - '$mixin_append': { - 'swarming': { - 'named_caches': [{ - 'name': 'runtime_ios_14_5', - 'path': 'Runtime-ios-14.5' - }] - } + 'swarming': { + 'named_caches': [{ + 'name': 'runtime_ios_14_5', + 'path': 'Runtime-ios-14.5' + }] } }, 'limited-capacity': { @@ -96,12 +92,17 @@ } } }, + 'linux-bionic': { + 'swarming': { + 'dimensions': { + 'os': 'Ubuntu-18.04', + } + } + }, 'perf-output': { - '$mixin_append': { - 'args': [ - '--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb', - ], - }, + 'args': [ + '--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb', + ], }, 'perf-pool': { 'swarming': { @@ -129,9 +130,7 @@ 'script': '//tools_webrtc/perf/process_perf_results.py', 'args': ['--test-suite', 'webrtc_perf_tests'], }, - '$mixin_append': { - 'args': ['--nologs'] - } + 'args': ['--nologs'] }, 'perf-video-codec-perf-tests': { 'merge': { @@ -140,12 +139,10 @@ }, }, 'quick-perf-tests': { - '$mixin_append': { - 'args': [ - '--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', - '--nologs', - ], - } + 'args': [ + '--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', + '--nologs', + ], }, 'redfin': { 'swarming': { @@ -156,11 +153,9 @@ } }, 'resultdb-gtest-json-format': { - '$mixin_append': { - 'args': [ - '--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json', - ], - }, + 'args': [ + '--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json', + ], 'resultdb': { 'result_format': 'gtest_json', 'result_file': '${ISOLATED_OUTDIR}/gtest_output.json', @@ -199,16 +194,12 @@ }, }, 'webrtc-xctest': { - '$mixin_append': { - 'args': [ - '--xctest', - ], - }, + 'args': [ + '--xctest', + ], }, 'xcode_13_main': { - '$mixin_append': { - 'args': ['--xcode-build-version', '13c100'] - }, + 'args': ['--xcode-build-version', '13c100'], 'swarming': { 'named_caches': [{ 'name': 'xcode_ios_13c100', diff --git a/third_party/libwebrtc/infra/specs/test_suites.pyl b/third_party/libwebrtc/infra/specs/test_suites.pyl index 2e440a67d9cf..9430d45a3952 100644 --- a/third_party/libwebrtc/infra/specs/test_suites.pyl +++ b/third_party/libwebrtc/infra/specs/test_suites.pyl @@ -9,7 +9,13 @@ { 'basic_suites': { 'android_junit_tests': { - 'android_examples_junit_tests': {}, + 'android_examples_junit_tests': { + 'args': [ + # Force number of shards to be one because of none hermetic tests + # in TCPChannelClientTest and DirectRTCClientTest. + '--shards=1', + ] + }, 'android_sdk_junit_tests': {}, }, 'android_tests': { @@ -25,7 +31,6 @@ 'modules_unittests': { 'mixins': ['shards-6'], }, - 'peer_connection_mediachannel_split_unittests': {}, 'peerconnection_unittests': { 'mixins': ['shards-4'], }, @@ -64,7 +69,6 @@ 'modules_unittests': { 'mixins': ['shards-6'], }, - 'peer_connection_mediachannel_split_unittests': {}, 'peerconnection_unittests': { 'mixins': ['shards-4'], }, diff --git a/third_party/libwebrtc/infra/specs/tryserver.webrtc.json b/third_party/libwebrtc/infra/specs/tryserver.webrtc.json index 2e1ca1661919..dff7c3dfd489 100644 --- a/third_party/libwebrtc/infra/specs/tryserver.webrtc.json +++ b/third_party/libwebrtc/infra/specs/tryserver.webrtc.json @@ -12,7 +12,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -34,7 +33,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -56,7 +54,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -78,7 +75,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -100,7 +96,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -122,7 +117,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -144,7 +138,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -167,7 +160,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -190,29 +182,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -235,7 +204,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -257,7 +225,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -279,7 +246,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -301,7 +267,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -324,7 +289,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -346,7 +310,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -369,7 +332,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -391,7 +353,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -413,7 +374,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -435,7 +395,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -458,7 +417,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -480,7 +438,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -506,7 +463,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -522,12 +478,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -537,7 +495,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -554,7 +511,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -576,7 +532,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -598,7 +553,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -620,7 +574,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -642,7 +595,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -664,7 +616,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -686,7 +637,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -709,7 +659,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -732,29 +681,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -777,7 +703,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -799,7 +724,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -821,7 +745,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -843,7 +766,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -866,7 +788,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -888,7 +809,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -911,7 +831,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -933,7 +852,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -955,7 +873,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -977,7 +894,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1000,7 +916,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1022,7 +937,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1048,7 +962,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1064,12 +977,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -1079,7 +994,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1096,7 +1010,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1118,7 +1031,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1140,7 +1052,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1162,7 +1073,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1184,7 +1094,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1206,7 +1115,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1228,7 +1136,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1251,7 +1158,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1274,29 +1180,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1319,7 +1202,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1341,7 +1223,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1363,7 +1244,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1385,7 +1265,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1408,7 +1287,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1430,7 +1308,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1453,7 +1330,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1475,7 +1351,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1497,7 +1372,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1519,7 +1393,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1542,7 +1415,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1564,7 +1436,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1590,7 +1461,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1606,12 +1476,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -1621,7 +1493,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1638,7 +1509,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1665,7 +1535,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1687,7 +1556,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1709,7 +1577,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1731,7 +1598,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1753,7 +1619,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1775,7 +1640,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1797,7 +1661,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1820,7 +1683,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1843,29 +1705,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "android_devices": "1", - "device_type": "walleye", - "os": "Android" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test": "peer_connection_mediachannel_split_unittests", - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, - { - "merge": { - "script": "//testing/merge_scripts/standard_gtest_merge.py" - }, - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1888,7 +1727,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1910,7 +1748,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1932,7 +1769,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1954,7 +1790,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1977,7 +1812,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -1999,7 +1833,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2022,7 +1855,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2044,7 +1876,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2066,7 +1897,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2088,7 +1918,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2111,7 +1940,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2133,7 +1961,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2159,7 +1986,6 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "android_devices": "1", @@ -2175,12 +2001,14 @@ ], "junit_tests": [ { + "args": [ + "--shards=1" + ], "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, @@ -2190,7 +2018,6 @@ "enable": true, "has_native_resultdb_integration": true }, - "swarming": {}, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -2223,7 +2050,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2250,7 +2076,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2277,7 +2102,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2304,7 +2128,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2331,7 +2154,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2358,7 +2180,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2385,7 +2206,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2413,7 +2233,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2440,7 +2259,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2468,7 +2286,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2495,7 +2312,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -2534,12 +2350,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2586,12 +2401,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2638,12 +2452,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2689,12 +2502,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2740,12 +2552,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2791,12 +2602,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2842,12 +2652,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2893,12 +2702,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2944,12 +2752,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -2995,12 +2802,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3046,12 +2852,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3097,12 +2902,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3148,12 +2952,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3199,12 +3002,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3250,12 +3052,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3301,12 +3102,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3353,12 +3153,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3405,12 +3204,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3457,12 +3255,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3510,12 +3307,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3563,12 +3359,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3616,12 +3411,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3667,12 +3461,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3718,12 +3511,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3769,12 +3561,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3820,12 +3611,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3871,12 +3661,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3922,12 +3711,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -3973,12 +3761,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4024,12 +3811,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4075,12 +3861,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4127,12 +3912,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4179,12 +3963,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4232,12 +4015,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4284,12 +4066,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4336,12 +4117,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4388,12 +4168,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4440,12 +4219,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4492,12 +4270,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4543,12 +4320,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4596,12 +4372,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4649,12 +4424,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4702,12 +4476,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4753,12 +4526,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4804,12 +4576,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4855,12 +4626,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4906,12 +4676,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -4957,12 +4726,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5008,12 +4776,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5059,12 +4826,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5110,12 +4876,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5161,12 +4926,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5212,12 +4976,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5263,12 +5026,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5314,12 +5076,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5366,12 +5127,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5418,12 +5178,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5470,12 +5229,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5521,12 +5279,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5572,12 +5329,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5623,12 +5379,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5674,12 +5429,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5725,12 +5479,11 @@ "has_native_resultdb_integration": true }, "swarming": { - "can_use_on_swarming_builders": true, "cipd_packages": [ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:a598cd77abc01089647a79e3cfa3cc8e8ecb5118" + "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" } ], "dimension_sets": [ @@ -5768,7 +5521,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5788,7 +5540,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5808,7 +5559,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5828,7 +5578,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5848,7 +5597,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5869,7 +5617,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5880,26 +5627,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -5910,7 +5637,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5931,7 +5657,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5951,7 +5676,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5971,7 +5695,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -5991,7 +5714,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6012,7 +5734,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6032,7 +5753,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6052,7 +5772,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6073,7 +5792,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6093,7 +5811,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6113,7 +5830,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6133,7 +5849,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6154,7 +5869,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6174,7 +5888,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6205,7 +5918,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6226,7 +5938,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6247,7 +5958,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6268,7 +5978,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6289,7 +5998,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6311,7 +6019,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6322,27 +6029,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "isolate_profile_data": true, - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "isolate_profile_data": true, @@ -6354,7 +6040,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6376,7 +6061,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6397,7 +6081,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6418,7 +6101,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6439,7 +6121,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6461,7 +6142,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6482,7 +6162,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6503,7 +6182,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6525,7 +6203,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6546,7 +6223,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6567,7 +6243,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6588,7 +6263,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6616,7 +6290,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6637,7 +6310,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6659,7 +6331,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6680,7 +6351,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6707,7 +6377,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6731,7 +6400,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6751,7 +6419,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6771,7 +6438,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6791,7 +6457,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6811,7 +6476,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6832,7 +6496,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6843,26 +6506,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -6873,7 +6516,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6894,7 +6536,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6914,7 +6555,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6934,7 +6574,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6954,7 +6593,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6975,7 +6613,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -6995,7 +6632,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7015,7 +6651,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7036,7 +6671,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7056,7 +6690,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7076,7 +6709,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7096,7 +6728,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7117,7 +6748,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7137,7 +6767,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7162,7 +6791,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7182,7 +6810,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7202,7 +6829,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7222,7 +6848,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7242,7 +6867,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7263,7 +6887,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7274,26 +6897,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -7304,7 +6907,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7325,7 +6927,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7345,7 +6946,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7365,7 +6965,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7385,7 +6984,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7406,7 +7004,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7426,7 +7023,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7446,7 +7042,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7467,7 +7062,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7487,7 +7081,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7507,7 +7100,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7527,7 +7119,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7548,7 +7139,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7568,7 +7158,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7592,7 +7181,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7617,7 +7205,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7637,7 +7224,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7657,7 +7243,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7677,7 +7262,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7697,7 +7281,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7718,7 +7301,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7729,26 +7311,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-20.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -7759,7 +7321,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7780,7 +7341,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7800,7 +7360,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7820,7 +7379,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7840,7 +7398,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7861,7 +7418,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7881,7 +7437,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7902,7 +7457,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7922,7 +7476,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7942,7 +7495,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7962,7 +7514,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -7983,7 +7534,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8003,7 +7553,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8027,7 +7576,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8047,7 +7595,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8067,7 +7614,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8087,7 +7633,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8107,7 +7652,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8128,7 +7672,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8139,26 +7682,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -8169,7 +7692,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8190,7 +7712,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8210,7 +7731,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8230,7 +7750,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8250,7 +7769,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8271,7 +7789,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8291,7 +7808,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8311,7 +7827,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8332,7 +7847,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8352,7 +7866,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8372,7 +7885,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8392,7 +7904,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8419,7 +7930,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8439,7 +7949,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8460,7 +7969,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8480,7 +7988,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8506,7 +8013,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8530,7 +8036,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8550,7 +8055,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8570,7 +8074,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8590,7 +8093,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8610,7 +8112,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8631,7 +8132,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8642,26 +8142,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -8672,7 +8152,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8693,7 +8172,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8713,7 +8191,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8733,7 +8210,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8753,7 +8229,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8774,7 +8249,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8794,7 +8268,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8815,7 +8288,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8835,7 +8307,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8855,7 +8326,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8875,7 +8345,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8896,7 +8365,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8916,7 +8384,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8940,7 +8407,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8960,7 +8426,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -8980,7 +8445,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9000,7 +8464,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9020,7 +8483,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9041,7 +8503,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9052,26 +8513,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -9082,7 +8523,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9103,7 +8543,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9123,7 +8562,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9143,7 +8581,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9163,7 +8600,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9184,7 +8620,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9204,7 +8639,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9224,7 +8658,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9245,7 +8678,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9265,7 +8697,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9285,7 +8716,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9305,7 +8735,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9326,7 +8755,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9346,7 +8774,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9370,7 +8797,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9390,7 +8816,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9410,7 +8835,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9430,7 +8854,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9450,7 +8873,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9471,7 +8893,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9482,26 +8903,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -9512,7 +8913,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9533,7 +8933,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9553,7 +8952,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9573,7 +8971,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9593,7 +8990,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9614,7 +9010,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9634,7 +9029,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9654,7 +9048,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9675,7 +9068,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9695,7 +9087,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9715,7 +9106,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9735,7 +9125,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9756,7 +9145,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9776,7 +9164,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9800,7 +9187,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9820,7 +9206,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9840,7 +9225,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9860,7 +9244,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9880,7 +9263,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9901,7 +9283,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9912,26 +9293,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -9942,7 +9303,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9963,7 +9323,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -9983,7 +9342,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10003,7 +9361,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10023,7 +9380,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10044,7 +9400,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10064,7 +9419,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10085,7 +9439,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10105,7 +9458,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10125,7 +9477,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10145,7 +9496,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10166,7 +9516,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10186,7 +9535,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10210,7 +9558,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10230,7 +9577,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10250,7 +9596,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10270,7 +9615,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10290,7 +9634,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10311,7 +9654,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10322,26 +9664,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -10352,7 +9674,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10373,7 +9694,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10393,7 +9713,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10413,7 +9732,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10433,7 +9751,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10454,7 +9771,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10474,7 +9790,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10495,7 +9810,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10515,7 +9829,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10535,7 +9848,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10555,7 +9867,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10576,7 +9887,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10596,7 +9906,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -10620,7 +9929,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10641,7 +9949,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10662,7 +9969,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10683,7 +9989,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10704,7 +10009,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10726,7 +10030,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10738,27 +10041,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -10769,7 +10051,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10791,7 +10072,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10812,7 +10092,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10833,7 +10112,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10854,7 +10132,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10876,7 +10153,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10897,7 +10173,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10919,7 +10194,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10940,7 +10214,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10961,7 +10234,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -10982,7 +10254,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11004,7 +10275,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11025,7 +10295,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11052,7 +10321,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11073,7 +10341,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11094,7 +10361,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11115,7 +10381,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11136,7 +10401,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11158,7 +10422,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11170,27 +10433,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -11201,7 +10443,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11223,7 +10464,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11244,7 +10484,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11265,7 +10504,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11286,7 +10524,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11308,7 +10545,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11329,7 +10565,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11351,7 +10586,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11372,7 +10606,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11393,7 +10626,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11414,7 +10646,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11436,7 +10667,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11457,7 +10687,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cores": "12", @@ -11482,7 +10711,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11502,7 +10730,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11522,7 +10749,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11542,7 +10768,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11562,7 +10787,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11583,7 +10807,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11594,26 +10817,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -11624,7 +10827,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11645,7 +10847,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11665,7 +10866,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11685,7 +10885,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11705,7 +10904,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11726,7 +10924,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11746,7 +10943,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11767,7 +10963,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11787,7 +10982,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11807,7 +11001,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11827,7 +11020,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11848,7 +11040,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11868,7 +11059,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -11892,7 +11082,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -11912,7 +11101,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -11932,7 +11120,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -11952,7 +11139,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -11972,7 +11158,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -11993,7 +11178,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12004,26 +11188,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -12034,7 +11198,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12055,7 +11218,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12075,7 +11237,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12095,7 +11256,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12115,7 +11275,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12136,7 +11295,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12156,7 +11314,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12177,7 +11334,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12197,7 +11353,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12217,7 +11372,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12237,7 +11391,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12264,7 +11417,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12284,7 +11436,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12305,7 +11456,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12325,7 +11475,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12351,7 +11500,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12375,7 +11523,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12395,7 +11542,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12415,7 +11561,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12435,7 +11580,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12455,7 +11599,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12476,7 +11619,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12487,26 +11629,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -12517,7 +11639,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12538,7 +11659,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12558,7 +11678,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12578,7 +11697,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12598,7 +11716,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12619,7 +11736,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12639,7 +11755,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12660,7 +11775,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12680,7 +11794,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12700,7 +11813,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12720,7 +11832,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12741,7 +11852,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12761,7 +11871,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "arm64-64-Apple_M1", @@ -12785,7 +11894,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12805,7 +11913,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12825,7 +11932,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12845,7 +11951,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12865,7 +11970,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12886,7 +11990,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12897,26 +12000,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -12927,7 +12010,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12948,7 +12030,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12968,7 +12049,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -12988,7 +12068,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13008,7 +12087,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13029,7 +12107,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13049,7 +12126,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13070,7 +12146,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13090,7 +12165,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13110,7 +12184,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13130,7 +12203,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13151,7 +12223,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13171,7 +12242,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13199,7 +12269,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13219,7 +12288,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13239,7 +12307,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13259,7 +12326,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13279,7 +12345,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13300,7 +12365,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13311,26 +12375,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -13341,7 +12385,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13362,7 +12405,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13382,7 +12424,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13402,7 +12443,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13422,7 +12462,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13443,7 +12482,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13463,7 +12501,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13484,7 +12521,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13504,7 +12540,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13524,7 +12559,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13544,7 +12578,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13565,7 +12598,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13585,7 +12617,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13609,7 +12640,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13629,7 +12659,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13649,7 +12678,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13669,7 +12697,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13689,7 +12716,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13710,7 +12736,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13721,26 +12746,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -13751,7 +12756,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13772,7 +12776,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13792,7 +12795,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13812,7 +12814,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13832,7 +12833,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13853,7 +12853,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13873,7 +12872,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13894,7 +12892,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13914,7 +12911,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13934,7 +12930,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13954,7 +12949,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13975,7 +12969,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -13995,7 +12988,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14019,7 +13011,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14039,7 +13030,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14059,7 +13049,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14079,7 +13068,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14099,7 +13087,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14120,7 +13107,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14131,26 +13117,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -14161,7 +13127,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14182,7 +13147,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14202,7 +13166,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14222,7 +13185,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14242,7 +13204,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14263,7 +13224,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14283,7 +13243,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14304,7 +13263,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14324,7 +13282,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14344,7 +13301,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14364,7 +13320,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14385,7 +13340,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14405,7 +13359,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14429,7 +13382,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14449,7 +13401,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14469,7 +13420,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14489,7 +13439,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14509,7 +13458,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14530,7 +13478,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14541,26 +13488,6 @@ }, "test_id_prefix": "ninja://modules:modules_unittests/" }, - { - "isolate_name": "peer_connection_mediachannel_split_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peer_connection_mediachannel_split_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "can_use_on_swarming_builders": true, - "dimension_sets": [ - { - "cpu": "x86-64", - "os": "Windows-10-19045" - } - ] - }, - "test_id_prefix": "ninja://pc:peer_connection_mediachannel_split_unittests/" - }, { "isolate_name": "peerconnection_unittests", "merge": { @@ -14571,7 +13498,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14592,7 +13518,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14612,7 +13537,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14632,7 +13556,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14652,7 +13575,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14673,7 +13595,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14693,7 +13614,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14714,7 +13634,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14734,7 +13653,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14754,7 +13672,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14774,7 +13691,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14801,7 +13717,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14821,7 +13736,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14842,7 +13756,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14862,7 +13775,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14888,7 +13800,6 @@ "result_format": "gtest_json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", @@ -14912,7 +13823,6 @@ "result_format": "json" }, "swarming": { - "can_use_on_swarming_builders": true, "dimension_sets": [ { "cpu": "x86-64", diff --git a/third_party/libwebrtc/infra/specs/waterfalls.pyl b/third_party/libwebrtc/infra/specs/waterfalls.pyl index d66da0fdfa37..8b9f9f749573 100644 --- a/third_party/libwebrtc/infra/specs/waterfalls.pyl +++ b/third_party/libwebrtc/infra/specs/waterfalls.pyl @@ -245,16 +245,6 @@ 'name': 'client.webrtc.perf', 'mixins': ['limited-capacity'], 'machines': { - 'Perf Android32 (O Pixel2)': { - 'mixins': [ - 'walleye', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration', - 'chromium-tester-service-account' - ], - 'test_suites': { - 'gtest_tests': 'perf_tests', - }, - }, 'Perf Android32 (R Pixel5)': { 'mixins': [ 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', 'perf-output', @@ -264,16 +254,6 @@ 'gtest_tests': 'perf_tests', }, }, - 'Perf Android64 (O Pixel2)': { - 'mixins': [ - 'walleye', 'android-devices', 'perf-pool', 'timeout-3h', - 'perf-output', 'has_native_resultdb_integration', - 'chromium-tester-service-account' - ], - 'test_suites': { - 'gtest_tests': 'perf_tests', - }, - }, 'Perf Android64 (R Pixel5)': { 'mixins': [ 'redfin', 'android-devices', 'perf-pool', 'timeout-3h', 'perf-output', @@ -345,7 +325,7 @@ 'iOS64 Debug': { 'mixins': [ 'ios-device-15.7', 'webrtc-xctest', 'chrome-tester-service-account', - 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'xcode_14_main', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg' ], 'test_suites': { @@ -368,7 +348,7 @@ 'iOS64 Release': { 'mixins': [ 'ios-device-15.7', 'webrtc-xctest', 'chrome-tester-service-account', - 'xcode_13_main', 'mac_toolchain', 'has_native_resultdb_integration', + 'xcode_14_main', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg' ], 'test_suites': { diff --git a/third_party/libwebrtc/logging/rtc_event_audio_gn/moz.build b/third_party/libwebrtc/logging/rtc_event_audio_gn/moz.build index f70052f36f23..9b4966439ea0 100644 --- a/third_party/libwebrtc/logging/rtc_event_audio_gn/moz.build +++ b/third_party/libwebrtc/logging/rtc_event_audio_gn/moz.build @@ -130,6 +130,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/logging/rtc_event_bwe_gn/moz.build b/third_party/libwebrtc/logging/rtc_event_bwe_gn/moz.build index 8fc25dce315c..3af2a8fa34b1 100644 --- a/third_party/libwebrtc/logging/rtc_event_bwe_gn/moz.build +++ b/third_party/libwebrtc/logging/rtc_event_bwe_gn/moz.build @@ -131,6 +131,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/logging/rtc_event_field_gn/moz.build b/third_party/libwebrtc/logging/rtc_event_field_gn/moz.build index 74aa081cb186..feb4a3f8c676 100644 --- a/third_party/libwebrtc/logging/rtc_event_field_gn/moz.build +++ b/third_party/libwebrtc/logging/rtc_event_field_gn/moz.build @@ -129,6 +129,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/logging/rtc_event_pacing_gn/moz.build b/third_party/libwebrtc/logging/rtc_event_pacing_gn/moz.build index 78992d519f70..8e2c1dcddf7f 100644 --- a/third_party/libwebrtc/logging/rtc_event_pacing_gn/moz.build +++ b/third_party/libwebrtc/logging/rtc_event_pacing_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/logging/rtc_event_video_gn/moz.build b/third_party/libwebrtc/logging/rtc_event_video_gn/moz.build index 5f3539a0f37d..fa653e5a268d 100644 --- a/third_party/libwebrtc/logging/rtc_event_video_gn/moz.build +++ b/third_party/libwebrtc/logging/rtc_event_video_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/logging/rtc_stream_config_gn/moz.build b/third_party/libwebrtc/logging/rtc_stream_config_gn/moz.build index 690d57689b4c..4db9d43b22ce 100644 --- a/third_party/libwebrtc/logging/rtc_stream_config_gn/moz.build +++ b/third_party/libwebrtc/logging/rtc_stream_config_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/media/BUILD.gn b/third_party/libwebrtc/media/BUILD.gn index a7d18b34101a..9dd837c8515c 100644 --- a/third_party/libwebrtc/media/BUILD.gn +++ b/third_party/libwebrtc/media/BUILD.gn @@ -150,40 +150,6 @@ rtc_library("rtc_media_base") { } } -rtc_library("media_channel_shim") { - sources = [ - "base/media_channel_shim.cc", - "base/media_channel_shim.h", - ] - deps = [ - ":codec", - ":media_channel", - ":media_channel_impl", - ":stream_params", - "../api:frame_transformer_interface", - "../api:rtc_error", - "../api:rtp_headers", - "../api:rtp_parameters", - "../api:rtp_sender_interface", - "../api:scoped_refptr", - "../api/crypto:frame_decryptor_interface", - "../api/crypto:frame_encryptor_interface", - "../api/transport/rtp:rtp_source", - "../api/video:recordable_encoded_frame", - "../api/video:video_frame", - "../api/video_codecs:video_codecs_api", - "../modules/rtp_rtcp:rtp_rtcp_format", - "../rtc_base:checks", - "../rtc_base:network_route", - "../rtc_base/network:sent_packet", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - rtc_library("media_channel_impl") { if (!build_with_mozilla) { sources = [ @@ -241,7 +207,6 @@ rtc_source_set("media_channel") { sources = [ "base/media_channel.h" ] deps = [ ":codec", - ":delayable", ":media_constants", ":rtp_utils", ":stream_params", @@ -290,11 +255,6 @@ rtc_source_set("media_channel") { ] } -rtc_source_set("delayable") { - sources = [ "base/delayable.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - rtc_library("codec") { sources = [ "base/codec.cc", @@ -304,6 +264,7 @@ rtc_library("codec") { ":media_constants", "../api:field_trials_view", "../api:rtp_parameters", + "../api/audio_codecs:audio_codecs_api", "../api/video_codecs:video_codecs_api", "../rtc_base:checks", "../rtc_base:logging", @@ -506,7 +467,6 @@ rtc_library("rtc_audio_video") { ":codec", ":media_channel", ":media_channel_impl", - ":media_channel_shim", ":media_constants", ":rid_description", ":rtc_media_base", @@ -570,6 +530,7 @@ rtc_library("rtc_audio_video") { "../modules/audio_processing:audio_processing_statistics", "../modules/audio_processing/aec_dump", "../modules/audio_processing/agc:gain_control_interface", + "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/video_coding", "../modules/video_coding:video_codec_interface", @@ -768,6 +729,7 @@ if (rtc_include_tests) { deps = [ ":codec", ":media_channel", + ":media_channel_impl", ":media_constants", ":rtc_audio_video", ":rtc_internal_video_codecs", @@ -778,6 +740,7 @@ if (rtc_include_tests) { ":stream_params", "../api:call_api", "../api:fec_controller_api", + "../api:rtp_parameters", "../api:scoped_refptr", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", @@ -821,6 +784,7 @@ if (rtc_include_tests) { ] absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -868,7 +832,7 @@ if (rtc_include_tests) { defines = [] deps = [ ":codec", - ":media_channel_shim", + ":media_channel", ":media_constants", ":rtc_audio_video", ":rtc_internal_video_codecs", diff --git a/third_party/libwebrtc/media/base/codec.cc b/third_party/libwebrtc/media/base/codec.cc index 7a238cfae4ae..70a8d90e25a3 100644 --- a/third_party/libwebrtc/media/base/codec.cc +++ b/third_party/libwebrtc/media/base/codec.cc @@ -12,6 +12,7 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/audio_codecs/audio_format.h" #include "api/video_codecs/av1_profile.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/vp9_profile.h" @@ -131,6 +132,11 @@ Codec::Codec(Type type, Codec::Codec(Type type) : Codec(type, 0, "", 0) {} +Codec::Codec(const webrtc::SdpAudioFormat& c) + : Codec(Type::kAudio, 0, c.name, c.clockrate_hz, c.num_channels) { + params = c.parameters; +} + Codec::Codec(const webrtc::SdpVideoFormat& c) : Codec(Type::kVideo, 0, c.name, kVideoCodecClockrate) { params = c.parameters; @@ -442,6 +448,10 @@ Codec CreateAudioCodec(int id, return Codec(Codec::Type::kAudio, id, name, clockrate, channels); } +Codec CreateAudioCodec(const webrtc::SdpAudioFormat& c) { + return Codec(c); +} + Codec CreateVideoCodec(const std::string& name) { return CreateVideoCodec(0, name); } diff --git a/third_party/libwebrtc/media/base/codec.h b/third_party/libwebrtc/media/base/codec.h index 1ca0931b660c..74c5cc2add96 100644 --- a/third_party/libwebrtc/media/base/codec.h +++ b/third_party/libwebrtc/media/base/codec.h @@ -19,6 +19,7 @@ #include "absl/container/inlined_vector.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/audio_codecs/audio_format.h" #include "api/field_trials_view.h" #include "api/rtp_parameters.h" #include "api/video_codecs/sdp_video_format.h" @@ -166,12 +167,14 @@ struct RTC_EXPORT Codec { int clockrate, size_t channels); + explicit Codec(const webrtc::SdpAudioFormat& c); explicit Codec(const webrtc::SdpVideoFormat& c); friend Codec CreateAudioCodec(int id, const std::string& name, int clockrate, size_t channels); + friend Codec CreateAudioCodec(const webrtc::SdpAudioFormat& c); friend Codec CreateAudioRtxCodec(int rtx_payload_type, int associated_payload_type); friend Codec CreateVideoCodec(int id, const std::string& name); @@ -188,6 +191,7 @@ Codec CreateAudioCodec(int id, const std::string& name, int clockrate, size_t channels); +Codec CreateAudioCodec(const webrtc::SdpAudioFormat& c); Codec CreateAudioRtxCodec(int rtx_payload_type, int associated_payload_type); Codec CreateVideoCodec(const std::string& name); Codec CreateVideoCodec(int id, const std::string& name); diff --git a/third_party/libwebrtc/media/base/delayable.h b/third_party/libwebrtc/media/base/delayable.h deleted file mode 100644 index f0344c5decd5..000000000000 --- a/third_party/libwebrtc/media/base/delayable.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_BASE_DELAYABLE_H_ -#define MEDIA_BASE_DELAYABLE_H_ - -#include - -#include "absl/types/optional.h" - -namespace cricket { - -// Delayable is used by user code through ApplyConstraints algorithm. Its -// methods must take precendence over similar functional in `syncable.h`. -class Delayable { - public: - virtual ~Delayable() {} - // Set base minimum delay of the receive stream with specified ssrc. - // Base minimum delay sets lower bound on minimum delay value which - // determines minimum delay until audio playout. - // Returns false if there is no stream with given ssrc. - virtual bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) = 0; - - // Returns current value of base minimum delay in milliseconds. - virtual absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const = 0; -}; - -} // namespace cricket - -#endif // MEDIA_BASE_DELAYABLE_H_ diff --git a/third_party/libwebrtc/media/base/fake_media_engine.cc b/third_party/libwebrtc/media/base/fake_media_engine.cc index 5e4ecd310d47..daf62894da42 100644 --- a/third_party/libwebrtc/media/base/fake_media_engine.cc +++ b/third_party/libwebrtc/media/base/fake_media_engine.cc @@ -16,103 +16,246 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "absl/types/optional.h" +#include "media/base/media_channel.h" #include "rtc_base/checks.h" namespace cricket { using webrtc::TaskQueueBase; -FakeVoiceMediaChannel::DtmfInfo::DtmfInfo(uint32_t ssrc, - int event_code, - int duration) +FakeVoiceMediaReceiveChannel::DtmfInfo::DtmfInfo(uint32_t ssrc, + int event_code, + int duration) : ssrc(ssrc), event_code(event_code), duration(duration) {} -FakeVoiceMediaChannel::VoiceChannelAudioSink::VoiceChannelAudioSink( +FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::VoiceChannelAudioSink( AudioSource* source) : source_(source) { source_->SetSink(this); } -FakeVoiceMediaChannel::VoiceChannelAudioSink::~VoiceChannelAudioSink() { +FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::~VoiceChannelAudioSink() { if (source_) { source_->SetSink(nullptr); } } -void FakeVoiceMediaChannel::VoiceChannelAudioSink::OnData( +void FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::OnData( const void* audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames, absl::optional absolute_capture_timestamp_ms) {} -void FakeVoiceMediaChannel::VoiceChannelAudioSink::OnClose() { +void FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::OnClose() { source_ = nullptr; } -AudioSource* FakeVoiceMediaChannel::VoiceChannelAudioSink::source() const { +AudioSource* FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::source() + const { return source_; } -FakeVoiceMediaChannel::FakeVoiceMediaChannel(MediaChannel::Role role, - FakeVoiceEngine* engine, - const AudioOptions& options, - TaskQueueBase* network_thread) - : RtpHelper(role, network_thread), - engine_(engine), +FakeVoiceMediaReceiveChannel::FakeVoiceMediaReceiveChannel( + const AudioOptions& options, + TaskQueueBase* network_thread) + : RtpReceiveChannelHelper( + network_thread), max_bps_(-1) { output_scalings_[0] = 1.0; // For default channel. SetOptions(options); } -FakeVoiceMediaChannel::~FakeVoiceMediaChannel() { - if (engine_) { - engine_->UnregisterChannel(this); - } -} -const std::vector& FakeVoiceMediaChannel::recv_codecs() const { +FakeVoiceMediaReceiveChannel::~FakeVoiceMediaReceiveChannel() = default; +const std::vector& FakeVoiceMediaReceiveChannel::recv_codecs() + const { return recv_codecs_; } -const std::vector& FakeVoiceMediaChannel::send_codecs() const { - return send_codecs_; -} -const std::vector& FakeVoiceMediaChannel::codecs() const { - return send_codecs(); -} -const std::vector& -FakeVoiceMediaChannel::dtmf_info_queue() const { +const std::vector& +FakeVoiceMediaReceiveChannel::dtmf_info_queue() const { return dtmf_info_queue_; } -const AudioOptions& FakeVoiceMediaChannel::options() const { +const AudioOptions& FakeVoiceMediaReceiveChannel::options() const { return options_; } -int FakeVoiceMediaChannel::max_bps() const { +int FakeVoiceMediaReceiveChannel::max_bps() const { return max_bps_; } -bool FakeVoiceMediaChannel::SetSendParameters( - const AudioSendParameters& params) { - set_send_rtcp_parameters(params.rtcp); - return (SetSendCodecs(params.codecs) && - SetSendExtmapAllowMixed(params.extmap_allow_mixed) && - SetSendRtpHeaderExtensions(params.extensions) && - SetMaxSendBandwidth(params.max_bandwidth_bps) && - SetOptions(params.options)); -} -bool FakeVoiceMediaChannel::SetRecvParameters( - const AudioRecvParameters& params) { +bool FakeVoiceMediaReceiveChannel::SetRecvParameters( + const AudioReceiverParameters& params) { set_recv_rtcp_parameters(params.rtcp); return (SetRecvCodecs(params.codecs) && SetRecvRtpHeaderExtensions(params.extensions)); } -void FakeVoiceMediaChannel::SetPlayout(bool playout) { +void FakeVoiceMediaReceiveChannel::SetPlayout(bool playout) { set_playout(playout); } -void FakeVoiceMediaChannel::SetSend(bool send) { +bool FakeVoiceMediaReceiveChannel::HasSource(uint32_t ssrc) const { + return local_sinks_.find(ssrc) != local_sinks_.end(); +} +bool FakeVoiceMediaReceiveChannel::AddRecvStream(const StreamParams& sp) { + if (!RtpReceiveChannelHelper< + VoiceMediaReceiveChannelInterface>::AddRecvStream(sp)) + return false; + output_scalings_[sp.first_ssrc()] = 1.0; + output_delays_[sp.first_ssrc()] = 0; + return true; +} +bool FakeVoiceMediaReceiveChannel::RemoveRecvStream(uint32_t ssrc) { + if (!RtpReceiveChannelHelper< + VoiceMediaReceiveChannelInterface>::RemoveRecvStream(ssrc)) + return false; + output_scalings_.erase(ssrc); + output_delays_.erase(ssrc); + return true; +} +bool FakeVoiceMediaReceiveChannel::SetOutputVolume(uint32_t ssrc, + double volume) { + if (output_scalings_.find(ssrc) != output_scalings_.end()) { + output_scalings_[ssrc] = volume; + return true; + } + return false; +} +bool FakeVoiceMediaReceiveChannel::SetDefaultOutputVolume(double volume) { + for (auto& entry : output_scalings_) { + entry.second = volume; + } + return true; +} +bool FakeVoiceMediaReceiveChannel::GetOutputVolume(uint32_t ssrc, + double* volume) { + if (output_scalings_.find(ssrc) == output_scalings_.end()) + return false; + *volume = output_scalings_[ssrc]; + return true; +} +bool FakeVoiceMediaReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, + int delay_ms) { + if (output_delays_.find(ssrc) == output_delays_.end()) { + return false; + } else { + output_delays_[ssrc] = delay_ms; + return true; + } +} +absl::optional FakeVoiceMediaReceiveChannel::GetBaseMinimumPlayoutDelayMs( + uint32_t ssrc) const { + const auto it = output_delays_.find(ssrc); + if (it != output_delays_.end()) { + return it->second; + } + return absl::nullopt; +} +bool FakeVoiceMediaReceiveChannel::GetStats(VoiceMediaReceiveInfo* info, + bool get_and_clear_legacy_stats) { + return false; +} +void FakeVoiceMediaReceiveChannel::SetRawAudioSink( + uint32_t ssrc, + std::unique_ptr sink) { + sink_ = std::move(sink); +} +void FakeVoiceMediaReceiveChannel::SetDefaultRawAudioSink( + std::unique_ptr sink) { + sink_ = std::move(sink); +} +std::vector FakeVoiceMediaReceiveChannel::GetSources( + uint32_t ssrc) const { + return std::vector(); +} +bool FakeVoiceMediaReceiveChannel::SetRecvCodecs( + const std::vector& codecs) { + if (fail_set_recv_codecs()) { + // Fake the failure in SetRecvCodecs. + return false; + } + recv_codecs_ = codecs; + return true; +} +bool FakeVoiceMediaReceiveChannel::SetMaxSendBandwidth(int bps) { + max_bps_ = bps; + return true; +} +bool FakeVoiceMediaReceiveChannel::SetOptions(const AudioOptions& options) { + // Does a "merge" of current options and set options. + options_.SetAll(options); + return true; +} + +FakeVoiceMediaSendChannel::DtmfInfo::DtmfInfo(uint32_t ssrc, + int event_code, + int duration) + : ssrc(ssrc), event_code(event_code), duration(duration) {} + +FakeVoiceMediaSendChannel::VoiceChannelAudioSink::VoiceChannelAudioSink( + AudioSource* source) + : source_(source) { + source_->SetSink(this); +} +FakeVoiceMediaSendChannel::VoiceChannelAudioSink::~VoiceChannelAudioSink() { + if (source_) { + source_->SetSink(nullptr); + } +} +void FakeVoiceMediaSendChannel::VoiceChannelAudioSink::OnData( + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) {} +void FakeVoiceMediaSendChannel::VoiceChannelAudioSink::OnClose() { + source_ = nullptr; +} +AudioSource* FakeVoiceMediaSendChannel::VoiceChannelAudioSink::source() const { + return source_; +} + +FakeVoiceMediaSendChannel::FakeVoiceMediaSendChannel( + const AudioOptions& options, + TaskQueueBase* network_thread) + : RtpSendChannelHelper(network_thread), + max_bps_(-1) { + output_scalings_[0] = 1.0; // For default channel. + SetOptions(options); +} +FakeVoiceMediaSendChannel::~FakeVoiceMediaSendChannel() = default; +const std::vector& FakeVoiceMediaSendChannel::send_codecs() const { + return send_codecs_; +} +absl::optional FakeVoiceMediaSendChannel::GetSendCodec() const { + if (!send_codecs_.empty()) { + return send_codecs_.front(); + } + return absl::nullopt; +} +const std::vector& +FakeVoiceMediaSendChannel::dtmf_info_queue() const { + return dtmf_info_queue_; +} +const AudioOptions& FakeVoiceMediaSendChannel::options() const { + return options_; +} +int FakeVoiceMediaSendChannel::max_bps() const { + return max_bps_; +} +bool FakeVoiceMediaSendChannel::SetSendParameters( + const AudioSenderParameter& params) { + set_send_rtcp_parameters(params.rtcp); + SetExtmapAllowMixed(params.extmap_allow_mixed); + return (SetSendCodecs(params.codecs) && + SetSendRtpHeaderExtensions(params.extensions) && + SetMaxSendBandwidth(params.max_bandwidth_bps) && + SetOptions(params.options)); +} +void FakeVoiceMediaSendChannel::SetSend(bool send) { set_sending(send); } -bool FakeVoiceMediaChannel::SetAudioSend(uint32_t ssrc, - bool enable, - const AudioOptions* options, - AudioSource* source) { +bool FakeVoiceMediaSendChannel::SetAudioSend(uint32_t ssrc, + bool enable, + const AudioOptions* options, + AudioSource* source) { if (!SetLocalSource(ssrc, source)) { return false; } - if (!RtpHelper::MuteStream(ssrc, !enable)) { + if (!RtpSendChannelHelper::MuteStream( + ssrc, !enable)) { return false; } if (enable && options) { @@ -120,24 +263,10 @@ bool FakeVoiceMediaChannel::SetAudioSend(uint32_t ssrc, } return true; } -bool FakeVoiceMediaChannel::HasSource(uint32_t ssrc) const { +bool FakeVoiceMediaSendChannel::HasSource(uint32_t ssrc) const { return local_sinks_.find(ssrc) != local_sinks_.end(); } -bool FakeVoiceMediaChannel::AddRecvStream(const StreamParams& sp) { - if (!RtpHelper::AddRecvStream(sp)) - return false; - output_scalings_[sp.first_ssrc()] = 1.0; - output_delays_[sp.first_ssrc()] = 0; - return true; -} -bool FakeVoiceMediaChannel::RemoveRecvStream(uint32_t ssrc) { - if (!RtpHelper::RemoveRecvStream(ssrc)) - return false; - output_scalings_.erase(ssrc); - output_delays_.erase(ssrc); - return true; -} -bool FakeVoiceMediaChannel::CanInsertDtmf() { +bool FakeVoiceMediaSendChannel::CanInsertDtmf() { for (std::vector::const_iterator it = send_codecs_.begin(); it != send_codecs_.end(); ++it) { // Find the DTMF telephone event "codec". @@ -147,78 +276,22 @@ bool FakeVoiceMediaChannel::CanInsertDtmf() { } return false; } -bool FakeVoiceMediaChannel::InsertDtmf(uint32_t ssrc, - int event_code, - int duration) { +bool FakeVoiceMediaSendChannel::InsertDtmf(uint32_t ssrc, + int event_code, + int duration) { dtmf_info_queue_.push_back(DtmfInfo(ssrc, event_code, duration)); return true; } -bool FakeVoiceMediaChannel::SetOutputVolume(uint32_t ssrc, double volume) { - if (output_scalings_.find(ssrc) != output_scalings_.end()) { - output_scalings_[ssrc] = volume; - return true; - } - return false; -} -bool FakeVoiceMediaChannel::SetDefaultOutputVolume(double volume) { - for (auto& entry : output_scalings_) { - entry.second = volume; - } - return true; -} -bool FakeVoiceMediaChannel::GetOutputVolume(uint32_t ssrc, double* volume) { +bool FakeVoiceMediaSendChannel::GetOutputVolume(uint32_t ssrc, double* volume) { if (output_scalings_.find(ssrc) == output_scalings_.end()) return false; *volume = output_scalings_[ssrc]; return true; } -bool FakeVoiceMediaChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, - int delay_ms) { - if (output_delays_.find(ssrc) == output_delays_.end()) { - return false; - } else { - output_delays_[ssrc] = delay_ms; - return true; - } -} -absl::optional FakeVoiceMediaChannel::GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const { - const auto it = output_delays_.find(ssrc); - if (it != output_delays_.end()) { - return it->second; - } - return absl::nullopt; -} -bool FakeVoiceMediaChannel::GetSendStats(VoiceMediaSendInfo* info) { +bool FakeVoiceMediaSendChannel::GetStats(VoiceMediaSendInfo* info) { return false; } -bool FakeVoiceMediaChannel::GetReceiveStats(VoiceMediaReceiveInfo* info, - bool get_and_clear_legacy_stats) { - return false; -} -void FakeVoiceMediaChannel::SetRawAudioSink( - uint32_t ssrc, - std::unique_ptr sink) { - sink_ = std::move(sink); -} -void FakeVoiceMediaChannel::SetDefaultRawAudioSink( - std::unique_ptr sink) { - sink_ = std::move(sink); -} -std::vector FakeVoiceMediaChannel::GetSources( - uint32_t ssrc) const { - return std::vector(); -} -bool FakeVoiceMediaChannel::SetRecvCodecs( - const std::vector& codecs) { - if (fail_set_recv_codecs()) { - // Fake the failure in SetRecvCodecs. - return false; - } - recv_codecs_ = codecs; - return true; -} -bool FakeVoiceMediaChannel::SetSendCodecs( +bool FakeVoiceMediaSendChannel::SetSendCodecs( const std::vector& codecs) { if (fail_set_send_codecs()) { // Fake the failure in SetSendCodecs. @@ -227,16 +300,17 @@ bool FakeVoiceMediaChannel::SetSendCodecs( send_codecs_ = codecs; return true; } -bool FakeVoiceMediaChannel::SetMaxSendBandwidth(int bps) { +bool FakeVoiceMediaSendChannel::SetMaxSendBandwidth(int bps) { max_bps_ = bps; return true; } -bool FakeVoiceMediaChannel::SetOptions(const AudioOptions& options) { +bool FakeVoiceMediaSendChannel::SetOptions(const AudioOptions& options) { // Does a "merge" of current options and set options. options_.SetAll(options); return true; } -bool FakeVoiceMediaChannel::SetLocalSource(uint32_t ssrc, AudioSource* source) { +bool FakeVoiceMediaSendChannel::SetLocalSource(uint32_t ssrc, + AudioSource* source) { auto it = local_sinks_.find(ssrc); if (source) { if (it != local_sinks_.end()) { @@ -253,7 +327,7 @@ bool FakeVoiceMediaChannel::SetLocalSource(uint32_t ssrc, AudioSource* source) { return true; } -bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info, +bool CompareDtmfInfo(const FakeVoiceMediaSendChannel::DtmfInfo& info, uint32_t ssrc, int event_code, int duration) { @@ -261,87 +335,44 @@ bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info, info.ssrc == ssrc); } -FakeVideoMediaChannel::FakeVideoMediaChannel(MediaChannel::Role role, - FakeVideoEngine* engine, - const VideoOptions& options, - TaskQueueBase* network_thread) - : RtpHelper(role, network_thread), - engine_(engine), +FakeVideoMediaSendChannel::FakeVideoMediaSendChannel( + const VideoOptions& options, + TaskQueueBase* network_thread) + : RtpSendChannelHelper(network_thread), max_bps_(-1) { SetOptions(options); } -FakeVideoMediaChannel::~FakeVideoMediaChannel() { - if (engine_) { - engine_->UnregisterChannel(this); - } -} -const std::vector& FakeVideoMediaChannel::recv_codecs() const { - return recv_codecs_; -} -const std::vector& FakeVideoMediaChannel::send_codecs() const { +FakeVideoMediaSendChannel::~FakeVideoMediaSendChannel() = default; +const std::vector& FakeVideoMediaSendChannel::send_codecs() const { return send_codecs_; } -const std::vector& FakeVideoMediaChannel::codecs() const { +const std::vector& FakeVideoMediaSendChannel::codecs() const { return send_codecs(); } -bool FakeVideoMediaChannel::rendering() const { - return playout(); -} -const VideoOptions& FakeVideoMediaChannel::options() const { +const VideoOptions& FakeVideoMediaSendChannel::options() const { return options_; } -const std::map*>& -FakeVideoMediaChannel::sinks() const { - return sinks_; -} -int FakeVideoMediaChannel::max_bps() const { +int FakeVideoMediaSendChannel::max_bps() const { return max_bps_; } -bool FakeVideoMediaChannel::SetSendParameters( - const VideoSendParameters& params) { +bool FakeVideoMediaSendChannel::SetSendParameters( + const VideoSenderParameters& params) { set_send_rtcp_parameters(params.rtcp); + SetExtmapAllowMixed(params.extmap_allow_mixed); return (SetSendCodecs(params.codecs) && - SetSendExtmapAllowMixed(params.extmap_allow_mixed) && SetSendRtpHeaderExtensions(params.extensions) && SetMaxSendBandwidth(params.max_bandwidth_bps)); } -bool FakeVideoMediaChannel::SetRecvParameters( - const VideoRecvParameters& params) { - set_recv_rtcp_parameters(params.rtcp); - return (SetRecvCodecs(params.codecs) && - SetRecvRtpHeaderExtensions(params.extensions)); -} -bool FakeVideoMediaChannel::AddSendStream(const StreamParams& sp) { - return RtpHelper::AddSendStream(sp); -} -bool FakeVideoMediaChannel::RemoveSendStream(uint32_t ssrc) { - return RtpHelper::RemoveSendStream(ssrc); -} -absl::optional FakeVideoMediaChannel::GetSendCodec() { +absl::optional FakeVideoMediaSendChannel::GetSendCodec() const { if (send_codecs_.empty()) { return absl::nullopt; } return send_codecs_[0]; } -bool FakeVideoMediaChannel::SetSink( - uint32_t ssrc, - rtc::VideoSinkInterface* sink) { - auto it = sinks_.find(ssrc); - if (it == sinks_.end()) { - return false; - } - it->second = sink; - return true; -} -void FakeVideoMediaChannel::SetDefaultSink( - rtc::VideoSinkInterface* sink) {} -bool FakeVideoMediaChannel::HasSink(uint32_t ssrc) const { - return sinks_.find(ssrc) != sinks_.end() && sinks_.at(ssrc) != nullptr; -} -bool FakeVideoMediaChannel::SetSend(bool send) { +bool FakeVideoMediaSendChannel::SetSend(bool send) { return set_sending(send); } -bool FakeVideoMediaChannel::SetVideoSend( +bool FakeVideoMediaSendChannel::SetVideoSend( uint32_t ssrc, const VideoOptions* options, rtc::VideoSourceInterface* source) { @@ -353,62 +384,15 @@ bool FakeVideoMediaChannel::SetVideoSend( sources_[ssrc] = source; return true; } -bool FakeVideoMediaChannel::HasSource(uint32_t ssrc) const { +bool FakeVideoMediaSendChannel::HasSource(uint32_t ssrc) const { return sources_.find(ssrc) != sources_.end() && sources_.at(ssrc) != nullptr; } -bool FakeVideoMediaChannel::AddRecvStream(const StreamParams& sp) { - if (!RtpHelper::AddRecvStream(sp)) - return false; - sinks_[sp.first_ssrc()] = NULL; - output_delays_[sp.first_ssrc()] = 0; - return true; -} -bool FakeVideoMediaChannel::RemoveRecvStream(uint32_t ssrc) { - if (!RtpHelper::RemoveRecvStream(ssrc)) - return false; - sinks_.erase(ssrc); - output_delays_.erase(ssrc); - return true; -} -void FakeVideoMediaChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) { -} -bool FakeVideoMediaChannel::GetSendStats(VideoMediaSendInfo* info) { +void FakeVideoMediaSendChannel::FillBitrateInfo( + BandwidthEstimationInfo* bwe_info) {} +bool FakeVideoMediaSendChannel::GetStats(VideoMediaSendInfo* info) { return false; } -bool FakeVideoMediaChannel::GetReceiveStats(VideoMediaReceiveInfo* info) { - return false; -} -std::vector FakeVideoMediaChannel::GetSources( - uint32_t ssrc) const { - return {}; -} -bool FakeVideoMediaChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, - int delay_ms) { - if (output_delays_.find(ssrc) == output_delays_.end()) { - return false; - } else { - output_delays_[ssrc] = delay_ms; - return true; - } -} -absl::optional FakeVideoMediaChannel::GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const { - const auto it = output_delays_.find(ssrc); - if (it != output_delays_.end()) { - return it->second; - } - return absl::nullopt; -} -bool FakeVideoMediaChannel::SetRecvCodecs( - const std::vector& codecs) { - if (fail_set_recv_codecs()) { - // Fake the failure in SetRecvCodecs. - return false; - } - recv_codecs_ = codecs; - return true; -} -bool FakeVideoMediaChannel::SetSendCodecs( +bool FakeVideoMediaSendChannel::SetSendCodecs( const std::vector& codecs) { if (fail_set_send_codecs()) { // Fake the failure in SetSendCodecs. @@ -418,27 +402,137 @@ bool FakeVideoMediaChannel::SetSendCodecs( return true; } -bool FakeVideoMediaChannel::SetOptions(const VideoOptions& options) { +bool FakeVideoMediaSendChannel::SetOptions(const VideoOptions& options) { options_ = options; return true; } -bool FakeVideoMediaChannel::SetMaxSendBandwidth(int bps) { +bool FakeVideoMediaSendChannel::SetMaxSendBandwidth(int bps) { + max_bps_ = bps; + return true; +} +void FakeVideoMediaSendChannel::GenerateSendKeyFrame( + uint32_t ssrc, + const std::vector& rids) {} + +FakeVideoMediaReceiveChannel::FakeVideoMediaReceiveChannel( + const VideoOptions& options, + TaskQueueBase* network_thread) + : RtpReceiveChannelHelper( + network_thread), + max_bps_(-1) { + SetOptions(options); +} +FakeVideoMediaReceiveChannel::~FakeVideoMediaReceiveChannel() = default; +const std::vector& FakeVideoMediaReceiveChannel::recv_codecs() + const { + return recv_codecs_; +} +bool FakeVideoMediaReceiveChannel::rendering() const { + return playout(); +} +const VideoOptions& FakeVideoMediaReceiveChannel::options() const { + return options_; +} +const std::map*>& +FakeVideoMediaReceiveChannel::sinks() const { + return sinks_; +} +int FakeVideoMediaReceiveChannel::max_bps() const { + return max_bps_; +} +bool FakeVideoMediaReceiveChannel::SetRecvParameters( + const VideoReceiverParameters& params) { + set_recv_rtcp_parameters(params.rtcp); + return (SetRecvCodecs(params.codecs) && + SetRecvRtpHeaderExtensions(params.extensions)); +} +bool FakeVideoMediaReceiveChannel::SetSink( + uint32_t ssrc, + rtc::VideoSinkInterface* sink) { + auto it = sinks_.find(ssrc); + if (it == sinks_.end()) { + return false; + } + it->second = sink; + return true; +} +void FakeVideoMediaReceiveChannel::SetDefaultSink( + rtc::VideoSinkInterface* sink) {} +bool FakeVideoMediaReceiveChannel::HasSink(uint32_t ssrc) const { + return sinks_.find(ssrc) != sinks_.end() && sinks_.at(ssrc) != nullptr; +} +bool FakeVideoMediaReceiveChannel::HasSource(uint32_t ssrc) const { + return sources_.find(ssrc) != sources_.end() && sources_.at(ssrc) != nullptr; +} +bool FakeVideoMediaReceiveChannel::AddRecvStream(const StreamParams& sp) { + if (!RtpReceiveChannelHelper< + VideoMediaReceiveChannelInterface>::AddRecvStream(sp)) + return false; + sinks_[sp.first_ssrc()] = NULL; + output_delays_[sp.first_ssrc()] = 0; + return true; +} +bool FakeVideoMediaReceiveChannel::RemoveRecvStream(uint32_t ssrc) { + if (!RtpReceiveChannelHelper< + VideoMediaReceiveChannelInterface>::RemoveRecvStream(ssrc)) + return false; + sinks_.erase(ssrc); + output_delays_.erase(ssrc); + return true; +} +std::vector FakeVideoMediaReceiveChannel::GetSources( + uint32_t ssrc) const { + return {}; +} +bool FakeVideoMediaReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, + int delay_ms) { + if (output_delays_.find(ssrc) == output_delays_.end()) { + return false; + } else { + output_delays_[ssrc] = delay_ms; + return true; + } +} +absl::optional FakeVideoMediaReceiveChannel::GetBaseMinimumPlayoutDelayMs( + uint32_t ssrc) const { + const auto it = output_delays_.find(ssrc); + if (it != output_delays_.end()) { + return it->second; + } + return absl::nullopt; +} +bool FakeVideoMediaReceiveChannel::SetRecvCodecs( + const std::vector& codecs) { + if (fail_set_recv_codecs()) { + // Fake the failure in SetRecvCodecs. + return false; + } + recv_codecs_ = codecs; + return true; +} +bool FakeVideoMediaReceiveChannel::SetOptions(const VideoOptions& options) { + options_ = options; + return true; +} + +bool FakeVideoMediaReceiveChannel::SetMaxSendBandwidth(int bps) { max_bps_ = bps; return true; } -void FakeVideoMediaChannel::SetRecordableEncodedFrameCallback( +void FakeVideoMediaReceiveChannel::SetRecordableEncodedFrameCallback( uint32_t ssrc, std::function callback) {} -void FakeVideoMediaChannel::ClearRecordableEncodedFrameCallback(uint32_t ssrc) { -} +void FakeVideoMediaReceiveChannel::ClearRecordableEncodedFrameCallback( + uint32_t ssrc) {} -void FakeVideoMediaChannel::RequestRecvKeyFrame(uint32_t ssrc) {} -void FakeVideoMediaChannel::GenerateSendKeyFrame( - uint32_t ssrc, - const std::vector& rids) {} +void FakeVideoMediaReceiveChannel::RequestRecvKeyFrame(uint32_t ssrc) {} + +bool FakeVideoMediaReceiveChannel::GetStats(VideoMediaReceiveInfo* info) { + return false; +} FakeVoiceEngine::FakeVoiceEngine() : fail_create_channel_(false) { // Add a fake audio codec. Note that the name must not be "" as there are @@ -449,57 +543,29 @@ void FakeVoiceEngine::Init() {} rtc::scoped_refptr FakeVoiceEngine::GetAudioState() const { return rtc::scoped_refptr(); } -VoiceMediaChannel* FakeVoiceEngine::CreateMediaChannel( - MediaChannel::Role role, +std::unique_ptr +FakeVoiceEngine::CreateSendChannel(webrtc::Call* call, + const MediaConfig& config, + const AudioOptions& options, + const webrtc::CryptoOptions& crypto_options, + webrtc::AudioCodecPairId codec_pair_id) { + std::unique_ptr ch = + std::make_unique(options, + call->network_thread()); + return ch; +} +std::unique_ptr +FakeVoiceEngine::CreateReceiveChannel( webrtc::Call* call, const MediaConfig& config, const AudioOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::AudioCodecPairId codec_pair_id) { - if (fail_create_channel_) { - return nullptr; - } - - FakeVoiceMediaChannel* ch = - new FakeVoiceMediaChannel(role, this, options, call->network_thread()); - switch (role) { - case MediaChannel::Role::kSend: - send_channels_.push_back(ch); - break; - case MediaChannel::Role::kReceive: - receive_channels_.push_back(ch); - break; - case MediaChannel::Role::kBoth: - send_channels_.push_back(ch); - receive_channels_.push_back(ch); - break; - default: - RTC_CHECK_NOTREACHED(); - } + std::unique_ptr ch = + std::make_unique(options, + call->network_thread()); return ch; } -FakeVoiceMediaChannel* FakeVoiceEngine::GetSendChannel(size_t index) { - return (send_channels_.size() > index) ? send_channels_[index] : NULL; -} -FakeVoiceMediaChannel* FakeVoiceEngine::GetReceiveChannel(size_t index) { - return (receive_channels_.size() > index) ? receive_channels_[index] : NULL; -} -void FakeVoiceEngine::UnregisterChannel(VoiceMediaChannel* channel) { - switch (channel->role()) { - case MediaChannel::Role::kSend: - send_channels_.erase(absl::c_find(send_channels_, channel)); - break; - case MediaChannel::Role::kReceive: - receive_channels_.erase(absl::c_find(receive_channels_, channel)); - break; - case MediaChannel::Role::kBoth: - send_channels_.erase(absl::c_find(send_channels_, channel)); - receive_channels_.erase(absl::c_find(receive_channels_, channel)); - break; - default: - RTC_CHECK_NOTREACHED(); - } -} const std::vector& FakeVoiceEngine::send_codecs() const { return send_codecs_; } @@ -550,8 +616,8 @@ bool FakeVideoEngine::SetOptions(const VideoOptions& options) { options_ = options; return true; } -VideoMediaChannel* FakeVideoEngine::CreateMediaChannel( - MediaChannel::Role role, +std::unique_ptr +FakeVideoEngine::CreateSendChannel( webrtc::Call* call, const MediaConfig& config, const VideoOptions& options, @@ -561,54 +627,25 @@ VideoMediaChannel* FakeVideoEngine::CreateMediaChannel( return nullptr; } - FakeVideoMediaChannel* ch = - new FakeVideoMediaChannel(role, this, options, call->network_thread()); - switch (role) { - case MediaChannel::Role::kSend: - send_channels_.emplace_back(ch); - break; - case MediaChannel::Role::kReceive: - receive_channels_.emplace_back(ch); - break; - case MediaChannel::Role::kBoth: - send_channels_.push_back(ch); - receive_channels_.push_back(ch); - break; - default: - RTC_CHECK_NOTREACHED(); - } + std::unique_ptr ch = + std::make_unique(options, + call->network_thread()); return ch; } -FakeVideoMediaChannel* FakeVideoEngine::GetSendChannel(size_t index) { - return (send_channels_.size() > index) ? send_channels_[index] : nullptr; -} -FakeVideoMediaChannel* FakeVideoEngine::GetReceiveChannel(size_t index) { - return (receive_channels_.size() > index) ? receive_channels_[index] - : nullptr; -} -void FakeVideoEngine::UnregisterChannel(VideoMediaChannel* channel) { - switch (channel->role()) { - case MediaChannel::Role::kSend: { - auto it = absl::c_find(send_channels_, channel); - RTC_DCHECK(it != send_channels_.end()); - send_channels_.erase(it); - } break; - case MediaChannel::Role::kReceive: { - auto it = absl::c_find(receive_channels_, channel); - RTC_DCHECK(it != receive_channels_.end()); - receive_channels_.erase(it); - } break; - case MediaChannel::Role::kBoth: { - auto it = absl::c_find(send_channels_, channel); - RTC_DCHECK(it != send_channels_.end()); - send_channels_.erase(it); - it = absl::c_find(receive_channels_, channel); - RTC_DCHECK(it != receive_channels_.end()); - receive_channels_.erase(it); - } break; - default: - RTC_CHECK_NOTREACHED(); +std::unique_ptr +FakeVideoEngine::CreateReceiveChannel( + webrtc::Call* call, + const MediaConfig& config, + const VideoOptions& options, + const webrtc::CryptoOptions& crypto_options) { + if (fail_create_channel_) { + return nullptr; } + + std::unique_ptr ch = + std::make_unique(options, + call->network_thread()); + return ch; } std::vector FakeVideoEngine::send_codecs(bool use_rtx) const { return send_codecs_; @@ -660,20 +697,6 @@ void FakeMediaEngine::SetVideoCodecs(const std::vector& codecs) { video_->SetSendCodecs(codecs); video_->SetRecvCodecs(codecs); } - -FakeVoiceMediaChannel* FakeMediaEngine::GetVoiceSendChannel(size_t index) { - return voice_->GetSendChannel(index); -} -FakeVideoMediaChannel* FakeMediaEngine::GetVideoSendChannel(size_t index) { - return video_->GetSendChannel(index); -} -FakeVoiceMediaChannel* FakeMediaEngine::GetVoiceReceiveChannel(size_t index) { - return voice_->GetReceiveChannel(index); -} -FakeVideoMediaChannel* FakeMediaEngine::GetVideoReceiveChannel(size_t index) { - return video_->GetReceiveChannel(index); -} - void FakeMediaEngine::set_fail_create_channel(bool fail) { voice_->fail_create_channel_ = fail; video_->fail_create_channel_ = fail; diff --git a/third_party/libwebrtc/media/base/fake_media_engine.h b/third_party/libwebrtc/media/base/fake_media_engine.h index 26fe27666cfa..8525d0bfcfe5 100644 --- a/third_party/libwebrtc/media/base/fake_media_engine.h +++ b/third_party/libwebrtc/media/base/fake_media_engine.h @@ -18,11 +18,16 @@ #include #include #include +#include #include #include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" #include "api/call/audio_sink.h" +#include "api/media_types.h" #include "media/base/audio_source.h" +#include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" #include "media/base/media_engine.h" #include "media/base/rtp_utils.h" #include "media/base/stream_params.h" @@ -43,41 +48,22 @@ class FakeVoiceEngine; // A common helper class that handles sending and receiving RTP/RTCP packets. template -class RtpHelper : public Base { +class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { public: - explicit RtpHelper(MediaChannel::Role role, - webrtc::TaskQueueBase* network_thread) - : Base(role, network_thread), - sending_(false), + explicit RtpReceiveChannelHelper(webrtc::TaskQueueBase* network_thread) + : MediaChannelUtil(network_thread), playout_(false), - fail_set_send_codecs_(false), fail_set_recv_codecs_(false), - send_ssrc_(0), - ready_to_send_(false), transport_overhead_per_packet_(0), num_network_route_changes_(0) {} - virtual ~RtpHelper() = default; + virtual ~RtpReceiveChannelHelper() = default; const std::vector& recv_extensions() { return recv_extensions_; } - const std::vector& send_extensions() { - return send_extensions_; - } - bool sending() const { return sending_; } bool playout() const { return playout_; } const std::list& rtp_packets() const { return rtp_packets_; } const std::list& rtcp_packets() const { return rtcp_packets_; } - bool SendRtp(const void* data, - size_t len, - const rtc::PacketOptions& options) { - if (!sending_) { - return false; - } - rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, - kMaxRtpPacketLen); - return Base::SendPacket(&packet, options); - } bool SendRtcp(const void* data, size_t len) { rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, kMaxRtpPacketLen); @@ -104,37 +90,18 @@ class RtpHelper : public Base { } bool CheckNoRtp() { return rtp_packets_.empty(); } bool CheckNoRtcp() { return rtcp_packets_.empty(); } - void set_fail_set_send_codecs(bool fail) { fail_set_send_codecs_ = fail; } void set_fail_set_recv_codecs(bool fail) { fail_set_recv_codecs_ = fail; } - virtual bool AddSendStream(const StreamParams& sp) { - if (absl::c_linear_search(send_streams_, sp)) { - return false; - } - send_streams_.push_back(sp); - rtp_send_parameters_[sp.first_ssrc()] = - CreateRtpParametersWithEncodings(sp); - return true; - } - virtual bool RemoveSendStream(uint32_t ssrc) { - auto parameters_iterator = rtp_send_parameters_.find(ssrc); - if (parameters_iterator != rtp_send_parameters_.end()) { - rtp_send_parameters_.erase(parameters_iterator); - } - return RemoveStreamBySsrc(&send_streams_, ssrc); - } - virtual void ResetUnsignaledRecvStream() {} - virtual absl::optional GetUnsignaledSsrc() const { + void ResetUnsignaledRecvStream() override {} + absl::optional GetUnsignaledSsrc() const override { return absl::nullopt; } void ChooseReceiverReportSsrc(const std::set& choices) override {} - void SetSsrcListChangedCallback( - absl::AnyInvocable&)> callback) override {} virtual bool SetLocalSsrc(const StreamParams& sp) { return true; } - virtual void OnDemuxerCriteriaUpdatePending() {} - virtual void OnDemuxerCriteriaUpdateComplete() {} + void OnDemuxerCriteriaUpdatePending() override {} + void OnDemuxerCriteriaUpdateComplete() override {} - virtual bool AddRecvStream(const StreamParams& sp) { + bool AddRecvStream(const StreamParams& sp) override { if (absl::c_linear_search(receive_streams_, sp)) { return false; } @@ -143,11 +110,7 @@ class RtpHelper : public Base { CreateRtpParametersWithEncodings(sp); return true; } - virtual bool AddDefaultRecvStreamForTesting(const StreamParams& sp) { - RTC_CHECK_NOTREACHED(); - return false; - } - virtual bool RemoveRecvStream(uint32_t ssrc) { + bool RemoveRecvStream(uint32_t ssrc) override { auto parameters_iterator = rtp_receive_parameters_.find(ssrc); if (parameters_iterator != rtp_receive_parameters_.end()) { rtp_receive_parameters_.erase(parameters_iterator); @@ -155,17 +118,191 @@ class RtpHelper : public Base { return RemoveStreamBySsrc(&receive_streams_, ssrc); } - virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const { + webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override { + auto parameters_iterator = rtp_receive_parameters_.find(ssrc); + if (parameters_iterator != rtp_receive_parameters_.end()) { + return parameters_iterator->second; + } + return webrtc::RtpParameters(); + } + webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override { + return webrtc::RtpParameters(); + } + + const std::vector& recv_streams() const { + return receive_streams_; + } + bool HasRecvStream(uint32_t ssrc) const { + return GetStreamBySsrc(receive_streams_, ssrc) != nullptr; + } + + const RtcpParameters& recv_rtcp_parameters() { return recv_rtcp_parameters_; } + + int transport_overhead_per_packet() const { + return transport_overhead_per_packet_; + } + + rtc::NetworkRoute last_network_route() const { return last_network_route_; } + int num_network_route_changes() const { return num_network_route_changes_; } + void set_num_network_route_changes(int changes) { + num_network_route_changes_ = changes; + } + + void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet, + int64_t packet_time_us) { + rtcp_packets_.push_back(std::string(packet->cdata(), packet->size())); + } + + void SetFrameDecryptor(uint32_t ssrc, + rtc::scoped_refptr + frame_decryptor) override {} + + void SetDepacketizerToDecoderFrameTransformer( + uint32_t ssrc, + rtc::scoped_refptr frame_transformer) + override {} + + void SetInterface(MediaChannelNetworkInterface* iface) override { + network_interface_ = iface; + MediaChannelUtil::SetInterface(iface); + } + + protected: + void set_playout(bool playout) { playout_ = playout; } + bool SetRecvRtpHeaderExtensions(const std::vector& extensions) { + recv_extensions_ = extensions; + return true; + } + void set_recv_rtcp_parameters(const RtcpParameters& params) { + recv_rtcp_parameters_ = params; + } + void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override { + rtp_packets_.push_back( + std::string(packet.Buffer().cdata(), packet.size())); + } + bool fail_set_recv_codecs() const { return fail_set_recv_codecs_; } + + private: + bool playout_; + std::vector recv_extensions_; + std::list rtp_packets_; + std::list rtcp_packets_; + std::vector receive_streams_; + RtcpParameters recv_rtcp_parameters_; + std::map rtp_receive_parameters_; + bool fail_set_recv_codecs_; + std::string rtcp_cname_; + int transport_overhead_per_packet_; + rtc::NetworkRoute last_network_route_; + int num_network_route_changes_; + MediaChannelNetworkInterface* network_interface_ = nullptr; +}; + +// A common helper class that handles sending and receiving RTP/RTCP packets. +template +class RtpSendChannelHelper : public Base, public MediaChannelUtil { + public: + explicit RtpSendChannelHelper(webrtc::TaskQueueBase* network_thread) + : MediaChannelUtil(network_thread), + sending_(false), + fail_set_send_codecs_(false), + send_ssrc_(0), + ready_to_send_(false), + transport_overhead_per_packet_(0), + num_network_route_changes_(0) {} + virtual ~RtpSendChannelHelper() = default; + const std::vector& send_extensions() { + return send_extensions_; + } + bool sending() const { return sending_; } + const std::list& rtp_packets() const { return rtp_packets_; } + const std::list& rtcp_packets() const { return rtcp_packets_; } + + bool SendPacket(const void* data, + size_t len, + const rtc::PacketOptions& options) { + if (!sending_) { + return false; + } + rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, + kMaxRtpPacketLen); + return MediaChannelUtil::SendPacket(&packet, options); + } + bool SendRtcp(const void* data, size_t len) { + rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, + kMaxRtpPacketLen); + return MediaChannelUtil::SendRtcp(&packet, rtc::PacketOptions()); + } + + bool CheckRtp(const void* data, size_t len) { + bool success = !rtp_packets_.empty(); + if (success) { + std::string packet = rtp_packets_.front(); + rtp_packets_.pop_front(); + success = (packet == std::string(static_cast(data), len)); + } + return success; + } + bool CheckRtcp(const void* data, size_t len) { + bool success = !rtcp_packets_.empty(); + if (success) { + std::string packet = rtcp_packets_.front(); + rtcp_packets_.pop_front(); + success = (packet == std::string(static_cast(data), len)); + } + return success; + } + bool CheckNoRtp() { return rtp_packets_.empty(); } + bool CheckNoRtcp() { return rtcp_packets_.empty(); } + void set_fail_set_send_codecs(bool fail) { fail_set_send_codecs_ = fail; } + bool AddSendStream(const StreamParams& sp) override { + if (absl::c_linear_search(send_streams_, sp)) { + return false; + } + send_streams_.push_back(sp); + rtp_send_parameters_[sp.first_ssrc()] = + CreateRtpParametersWithEncodings(sp); + + if (ssrc_list_changed_callback_) { + std::set ssrcs_in_use; + for (const auto& send_stream : send_streams_) { + ssrcs_in_use.insert(send_stream.first_ssrc()); + } + ssrc_list_changed_callback_(ssrcs_in_use); + } + + return true; + } + bool RemoveSendStream(uint32_t ssrc) override { + auto parameters_iterator = rtp_send_parameters_.find(ssrc); + if (parameters_iterator != rtp_send_parameters_.end()) { + rtp_send_parameters_.erase(parameters_iterator); + } + return RemoveStreamBySsrc(&send_streams_, ssrc); + } + void SetSsrcListChangedCallback( + absl::AnyInvocable&)> callback) override { + ssrc_list_changed_callback_ = std::move(callback); + } + + void SetExtmapAllowMixed(bool extmap_allow_mixed) override { + return MediaChannelUtil::SetExtmapAllowMixed(extmap_allow_mixed); + } + bool ExtmapAllowMixed() const override { + return MediaChannelUtil::ExtmapAllowMixed(); + } + + webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override { auto parameters_iterator = rtp_send_parameters_.find(ssrc); if (parameters_iterator != rtp_send_parameters_.end()) { return parameters_iterator->second; } return webrtc::RtpParameters(); } - virtual webrtc::RTCError SetRtpSendParameters( + webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) { + webrtc::SetParametersCallback callback) override { auto parameters_iterator = rtp_send_parameters_.find(ssrc); if (parameters_iterator != rtp_send_parameters_.end()) { auto result = CheckRtpParametersInvalidModificationAndValues( @@ -185,17 +322,6 @@ class RtpHelper : public Base { callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); } - virtual webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const { - auto parameters_iterator = rtp_receive_parameters_.find(ssrc); - if (parameters_iterator != rtp_receive_parameters_.end()) { - return parameters_iterator->second; - } - return webrtc::RtpParameters(); - } - virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const { - return webrtc::RtpParameters(); - } - bool IsStreamMuted(uint32_t ssrc) const { bool ret = muted_streams_.find(ssrc) != muted_streams_.end(); // If |ssrc = 0| check if the first send stream is muted. @@ -208,12 +334,6 @@ class RtpHelper : public Base { const std::vector& send_streams() const { return send_streams_; } - const std::vector& recv_streams() const { - return receive_streams_; - } - bool HasRecvStream(uint32_t ssrc) const { - return GetStreamBySsrc(receive_streams_, ssrc) != nullptr; - } bool HasSendStream(uint32_t ssrc) const { return GetStreamBySsrc(send_streams_, ssrc) != nullptr; } @@ -225,15 +345,7 @@ class RtpHelper : public Base { return send_streams_[0].first_ssrc(); } - // TODO(perkj): This is to support legacy unit test that only check one - // sending stream. - const std::string rtcp_cname() { - if (send_streams_.empty()) - return ""; - return send_streams_[0].cname; - } const RtcpParameters& send_rtcp_parameters() { return send_rtcp_parameters_; } - const RtcpParameters& recv_rtcp_parameters() { return recv_rtcp_parameters_; } bool ready_to_send() const { return ready_to_send_; } @@ -261,18 +373,9 @@ class RtpHelper : public Base { rtc::scoped_refptr frame_transformer) override {} - void SetFrameDecryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_decryptor) override {} - - void SetDepacketizerToDecoderFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override {} - void SetInterface(MediaChannelNetworkInterface* iface) override { network_interface_ = iface; - MediaChannel::SetInterface(iface); + MediaChannelUtil::SetInterface(iface); } bool HasNetworkInterface() const override { return network_interface_ != nullptr; @@ -294,17 +397,6 @@ class RtpHelper : public Base { sending_ = send; return true; } - void set_playout(bool playout) { playout_ = playout; } - bool SetRecvRtpHeaderExtensions(const std::vector& extensions) { - recv_extensions_ = extensions; - return true; - } - bool SetSendExtmapAllowMixed(bool extmap_allow_mixed) { - if (Base::ExtmapAllowMixed() != extmap_allow_mixed) { - Base::SetExtmapAllowMixed(extmap_allow_mixed); - } - return true; - } bool SetSendRtpHeaderExtensions(const std::vector& extensions) { send_extensions_ = extensions; return true; @@ -312,13 +404,6 @@ class RtpHelper : public Base { void set_send_rtcp_parameters(const RtcpParameters& params) { send_rtcp_parameters_ = params; } - void set_recv_rtcp_parameters(const RtcpParameters& params) { - recv_rtcp_parameters_ = params; - } - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override { - rtp_packets_.push_back( - std::string(packet.Buffer().cdata(), packet.size())); - } void OnPacketSent(const rtc::SentPacket& sent_packet) override {} void OnReadyToSend(bool ready) override { ready_to_send_ = ready; } void OnNetworkRouteChanged(absl::string_view transport_name, @@ -328,27 +413,20 @@ class RtpHelper : public Base { transport_overhead_per_packet_ = network_route.packet_overhead; } bool fail_set_send_codecs() const { return fail_set_send_codecs_; } - bool fail_set_recv_codecs() const { return fail_set_recv_codecs_; } private: // TODO(bugs.webrtc.org/12783): This flag is used from more than one thread. // As a workaround for tsan, it's currently std::atomic but that might not // be the appropriate fix. std::atomic sending_; - bool playout_; - std::vector recv_extensions_; std::vector send_extensions_; std::list rtp_packets_; std::list rtcp_packets_; std::vector send_streams_; - std::vector receive_streams_; RtcpParameters send_rtcp_parameters_; - RtcpParameters recv_rtcp_parameters_; std::set muted_streams_; std::map rtp_send_parameters_; - std::map rtp_receive_parameters_; bool fail_set_send_codecs_; - bool fail_set_recv_codecs_; uint32_t send_ssrc_; std::string rtcp_cname_; bool ready_to_send_; @@ -356,9 +434,12 @@ class RtpHelper : public Base { rtc::NetworkRoute last_network_route_; int num_network_route_changes_; MediaChannelNetworkInterface* network_interface_ = nullptr; + absl::AnyInvocable&)> + ssrc_list_changed_callback_ = nullptr; }; -class FakeVoiceMediaChannel : public RtpHelper { +class FakeVoiceMediaReceiveChannel + : public RtpReceiveChannelHelper { public: struct DtmfInfo { DtmfInfo(uint32_t ssrc, int event_code, int duration); @@ -366,36 +447,34 @@ class FakeVoiceMediaChannel : public RtpHelper { int event_code; int duration; }; - FakeVoiceMediaChannel(MediaChannel::Role role, - FakeVoiceEngine* engine, - const AudioOptions& options, - webrtc::TaskQueueBase* network_thread); - ~FakeVoiceMediaChannel(); + FakeVoiceMediaReceiveChannel(const AudioOptions& options, + webrtc::TaskQueueBase* network_thread); + virtual ~FakeVoiceMediaReceiveChannel(); + + // Test methods const std::vector& recv_codecs() const; - const std::vector& send_codecs() const; - const std::vector& codecs() const; const std::vector& dtmf_info_queue() const; const AudioOptions& options() const; int max_bps() const; - bool SetSendParameters(const AudioSendParameters& params) override; - - bool SetRecvParameters(const AudioRecvParameters& params) override; - - void SetPlayout(bool playout) override; - void SetSend(bool send) override; - bool SetAudioSend(uint32_t ssrc, - bool enable, - const AudioOptions* options, - AudioSource* source) override; - bool HasSource(uint32_t ssrc) const; + // Overrides + VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { + return nullptr; + } + VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { + return this; + } + cricket::MediaType media_type() const override { + return cricket::MEDIA_TYPE_AUDIO; + } + + bool SetRecvParameters(const AudioReceiverParameters& params) override; + void SetPlayout(bool playout) override; + bool AddRecvStream(const StreamParams& sp) override; bool RemoveRecvStream(uint32_t ssrc) override; - bool CanInsertDtmf() override; - bool InsertDtmf(uint32_t ssrc, int event_code, int duration) override; - bool SetOutputVolume(uint32_t ssrc, double volume) override; bool SetDefaultOutputVolume(double volume) override; @@ -405,9 +484,8 @@ class FakeVoiceMediaChannel : public RtpHelper { absl::optional GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const override; - bool GetSendStats(VoiceMediaSendInfo* info) override; - bool GetReceiveStats(VoiceMediaReceiveInfo* info, - bool get_and_clear_legacy_stats) override; + bool GetStats(VoiceMediaReceiveInfo* info, + bool get_and_clear_legacy_stats) override; void SetRawAudioSink( uint32_t ssrc, @@ -416,13 +494,8 @@ class FakeVoiceMediaChannel : public RtpHelper { std::unique_ptr sink) override; std::vector GetSources(uint32_t ssrc) const override; - bool SenderNackEnabled() const override { return false; } - bool SenderNonSenderRttEnabled() const override { return false; } - void SetReceiveNackEnabled(bool enabled) {} - void SetReceiveNonSenderRttEnabled(bool enabled) {} - bool SendCodecHasNack() const override { return false; } - void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override {} + void SetReceiveNackEnabled(bool enabled) override {} + void SetReceiveNonSenderRttEnabled(bool enabled) override {} private: class VoiceChannelAudioSink : public AudioSource::Sink { @@ -444,14 +517,10 @@ class FakeVoiceMediaChannel : public RtpHelper { }; bool SetRecvCodecs(const std::vector& codecs); - bool SetSendCodecs(const std::vector& codecs); bool SetMaxSendBandwidth(int bps); bool SetOptions(const AudioOptions& options); - bool SetLocalSource(uint32_t ssrc, AudioSource* source); - FakeVoiceEngine* engine_; std::vector recv_codecs_; - std::vector send_codecs_; std::map output_scalings_; std::map output_delays_; std::vector dtmf_info_queue_; @@ -461,56 +530,134 @@ class FakeVoiceMediaChannel : public RtpHelper { int max_bps_; }; +class FakeVoiceMediaSendChannel + : public RtpSendChannelHelper { + public: + struct DtmfInfo { + DtmfInfo(uint32_t ssrc, int event_code, int duration); + uint32_t ssrc; + int event_code; + int duration; + }; + FakeVoiceMediaSendChannel(const AudioOptions& options, + webrtc::TaskQueueBase* network_thread); + ~FakeVoiceMediaSendChannel() override; + + const std::vector& send_codecs() const; + const std::vector& dtmf_info_queue() const; + const AudioOptions& options() const; + int max_bps() const; + bool HasSource(uint32_t ssrc) const; + bool GetOutputVolume(uint32_t ssrc, double* volume); + + // Overrides + VideoMediaSendChannelInterface* AsVideoSendChannel() override { + return nullptr; + } + VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } + cricket::MediaType media_type() const override { + return cricket::MEDIA_TYPE_AUDIO; + } + + bool SetSendParameters(const AudioSenderParameter& params) override; + void SetSend(bool send) override; + bool SetAudioSend(uint32_t ssrc, + bool enable, + const AudioOptions* options, + AudioSource* source) override; + + bool CanInsertDtmf() override; + bool InsertDtmf(uint32_t ssrc, int event_code, int duration) override; + + bool SenderNackEnabled() const override { return false; } + bool SenderNonSenderRttEnabled() const override { return false; } + void SetReceiveNackEnabled(bool enabled) {} + void SetReceiveNonSenderRttEnabled(bool enabled) {} + bool SendCodecHasNack() const override { return false; } + void SetSendCodecChangedCallback( + absl::AnyInvocable callback) override {} + absl::optional GetSendCodec() const override; + + bool GetStats(VoiceMediaSendInfo* stats) override; + + private: + class VoiceChannelAudioSink : public AudioSource::Sink { + public: + explicit VoiceChannelAudioSink(AudioSource* source); + ~VoiceChannelAudioSink() override; + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override; + void OnClose() override; + int NumPreferredChannels() const override { return -1; } + AudioSource* source() const; + + private: + AudioSource* source_; + }; + + bool SetSendCodecs(const std::vector& codecs); + bool SetMaxSendBandwidth(int bps); + bool SetOptions(const AudioOptions& options); + bool SetLocalSource(uint32_t ssrc, AudioSource* source); + + std::vector send_codecs_; + std::map output_scalings_; + std::map output_delays_; + std::vector dtmf_info_queue_; + AudioOptions options_; + std::map> local_sinks_; + int max_bps_; +}; + // A helper function to compare the FakeVoiceMediaChannel::DtmfInfo. -bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info, +bool CompareDtmfInfo(const FakeVoiceMediaSendChannel::DtmfInfo& info, uint32_t ssrc, int event_code, int duration); -class FakeVideoMediaChannel : public RtpHelper { +class FakeVideoMediaReceiveChannel + : public RtpReceiveChannelHelper { public: - FakeVideoMediaChannel(MediaChannel::Role role, - FakeVideoEngine* engine, - const VideoOptions& options, - webrtc::TaskQueueBase* network_thread); + FakeVideoMediaReceiveChannel(const VideoOptions& options, + webrtc::TaskQueueBase* network_thread); - ~FakeVideoMediaChannel(); + virtual ~FakeVideoMediaReceiveChannel(); + + VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { + return this; + } + VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { + return nullptr; + } + cricket::MediaType media_type() const override { + return cricket::MEDIA_TYPE_VIDEO; + } const std::vector& recv_codecs() const; const std::vector& send_codecs() const; - const std::vector& codecs() const; bool rendering() const; const VideoOptions& options() const; const std::map*>& sinks() const; int max_bps() const; - bool SetSendParameters(const VideoSendParameters& params) override; - bool SetRecvParameters(const VideoRecvParameters& params) override; - bool AddSendStream(const StreamParams& sp) override; - bool RemoveSendStream(uint32_t ssrc) override; + bool SetRecvParameters(const VideoReceiverParameters& params) override; - absl::optional GetSendCodec() override; bool SetSink(uint32_t ssrc, rtc::VideoSinkInterface* sink) override; void SetDefaultSink( rtc::VideoSinkInterface* sink) override; bool HasSink(uint32_t ssrc) const; - bool SetSend(bool send) override; void SetReceive(bool receive) override {} - bool SetVideoSend( - uint32_t ssrc, - const VideoOptions* options, - rtc::VideoSourceInterface* source) override; bool HasSource(uint32_t ssrc) const; bool AddRecvStream(const StreamParams& sp) override; bool RemoveRecvStream(uint32_t ssrc) override; - void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override; - bool GetSendStats(VideoMediaSendInfo* info) override; - bool GetReceiveStats(VideoMediaReceiveInfo* info) override; - std::vector GetSources(uint32_t ssrc) const override; bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override; @@ -523,6 +670,67 @@ class FakeVideoMediaChannel : public RtpHelper { override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; void RequestRecvKeyFrame(uint32_t ssrc) override; + void SetReceiverFeedbackParameters(bool lntf_enabled, + bool nack_enabled, + webrtc::RtcpMode rtcp_mode, + absl::optional rtx_time) override {} + bool GetStats(VideoMediaReceiveInfo* info) override; + + bool AddDefaultRecvStreamForTesting(const StreamParams& sp) override { + RTC_CHECK_NOTREACHED(); + return false; + } + + private: + bool SetRecvCodecs(const std::vector& codecs); + bool SetSendCodecs(const std::vector& codecs); + bool SetOptions(const VideoOptions& options); + bool SetMaxSendBandwidth(int bps); + + std::vector recv_codecs_; + std::map*> sinks_; + std::map*> sources_; + std::map output_delays_; + VideoOptions options_; + int max_bps_; +}; + +class FakeVideoMediaSendChannel + : public RtpSendChannelHelper { + public: + FakeVideoMediaSendChannel(const VideoOptions& options, + webrtc::TaskQueueBase* network_thread); + + virtual ~FakeVideoMediaSendChannel(); + + VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; } + VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { + return nullptr; + } + cricket::MediaType media_type() const override { + return cricket::MEDIA_TYPE_VIDEO; + } + + const std::vector& send_codecs() const; + const std::vector& codecs() const; + const VideoOptions& options() const; + const std::map*>& + sinks() const; + int max_bps() const; + bool SetSendParameters(const VideoSenderParameters& params) override; + + absl::optional GetSendCodec() const override; + + bool SetSend(bool send) override; + bool SetVideoSend( + uint32_t ssrc, + const VideoOptions* options, + rtc::VideoSourceInterface* source) override; + + bool HasSource(uint32_t ssrc) const; + + void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override; + void GenerateSendKeyFrame(uint32_t ssrc, const std::vector& rids) override; webrtc::RtcpMode SendCodecRtcpMode() const override { @@ -533,28 +741,21 @@ class FakeVideoMediaChannel : public RtpHelper { void SetSsrcListChangedCallback( absl::AnyInvocable&)> callback) override {} + void SetVideoCodecSwitchingEnabled(bool enabled) override {} bool SendCodecHasLntf() const override { return false; } bool SendCodecHasNack() const override { return false; } absl::optional SendCodecRtxTime() const override { return absl::nullopt; } - void SetReceiverFeedbackParameters(bool lntf_enabled, - bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) override {} + bool GetStats(VideoMediaSendInfo* info) override; private: - bool SetRecvCodecs(const std::vector& codecs); bool SetSendCodecs(const std::vector& codecs); bool SetOptions(const VideoOptions& options); bool SetMaxSendBandwidth(int bps); - FakeVideoEngine* engine_; - std::vector recv_codecs_; std::vector send_codecs_; - std::map*> sinks_; std::map*> sources_; - std::map output_delays_; VideoOptions options_; int max_bps_; }; @@ -565,16 +766,18 @@ class FakeVoiceEngine : public VoiceEngineInterface { void Init() override; rtc::scoped_refptr GetAudioState() const override; - VoiceMediaChannel* CreateMediaChannel( - MediaChannel::Role role, + std::unique_ptr CreateSendChannel( + webrtc::Call* call, + const MediaConfig& config, + const AudioOptions& options, + const webrtc::CryptoOptions& crypto_options, + webrtc::AudioCodecPairId codec_pair_id) override; + std::unique_ptr CreateReceiveChannel( webrtc::Call* call, const MediaConfig& config, const AudioOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::AudioCodecPairId codec_pair_id) override; - FakeVoiceMediaChannel* GetSendChannel(size_t index); - FakeVoiceMediaChannel* GetReceiveChannel(size_t index); - void UnregisterChannel(VoiceMediaChannel* channel); // TODO(ossu): For proper testing, These should either individually settable // or the voice engine should reference mockable factories. @@ -594,8 +797,6 @@ class FakeVoiceEngine : public VoiceEngineInterface { std::vector header_extensions); private: - std::vector send_channels_; - std::vector receive_channels_; std::vector recv_codecs_; std::vector send_codecs_; bool fail_create_channel_; @@ -608,17 +809,21 @@ class FakeVideoEngine : public VideoEngineInterface { public: FakeVideoEngine(); bool SetOptions(const VideoOptions& options); - VideoMediaChannel* CreateMediaChannel( - MediaChannel::Role role, + std::unique_ptr CreateSendChannel( webrtc::Call* call, const MediaConfig& config, const VideoOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) override; - FakeVideoMediaChannel* GetSendChannel(size_t index); - FakeVideoMediaChannel* GetReceiveChannel(size_t index); - void UnregisterChannel(VideoMediaChannel* channel); + std::unique_ptr CreateReceiveChannel( + webrtc::Call* call, + const MediaConfig& config, + const VideoOptions& options, + const webrtc::CryptoOptions& crypto_options) override; + FakeVideoMediaSendChannel* GetSendChannel(size_t index); + FakeVideoMediaReceiveChannel* GetReceiveChannel(size_t index); + std::vector send_codecs() const override { return send_codecs(true); } @@ -636,8 +841,6 @@ class FakeVideoEngine : public VideoEngineInterface { std::vector header_extensions); private: - std::vector send_channels_; - std::vector receive_channels_; std::vector send_codecs_; std::vector recv_codecs_; bool capture_; @@ -659,13 +862,11 @@ class FakeMediaEngine : public CompositeMediaEngine { void SetAudioSendCodecs(const std::vector& codecs); void SetVideoCodecs(const std::vector& codecs); - FakeVoiceMediaChannel* GetVoiceSendChannel(size_t index); - FakeVideoMediaChannel* GetVideoSendChannel(size_t index); - FakeVoiceMediaChannel* GetVoiceReceiveChannel(size_t index); - FakeVideoMediaChannel* GetVideoReceiveChannel(size_t index); - void set_fail_create_channel(bool fail); + FakeVoiceEngine* fake_voice_engine() { return voice_; } + FakeVideoEngine* fake_video_engine() { return video_; } + private: FakeVoiceEngine* const voice_; FakeVideoEngine* const video_; diff --git a/third_party/libwebrtc/media/base/fake_network_interface.h b/third_party/libwebrtc/media/base/fake_network_interface.h index 993c6e1aff5f..d0763fe53302 100644 --- a/third_party/libwebrtc/media/base/fake_network_interface.h +++ b/third_party/libwebrtc/media/base/fake_network_interface.h @@ -43,7 +43,7 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { recvbuf_size_(-1), dscp_(rtc::DSCP_NO_CHANGE) {} - void SetDestination(MediaChannel* dest) { dest_ = dest; } + void SetDestination(MediaReceiveChannelInterface* dest) { dest_ = dest; } // Conference mode is a mode where instead of simply forwarding the packets, // the transport will send multiple copies of the packet with the specified @@ -207,7 +207,7 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { } webrtc::TaskQueueBase* thread_; - MediaChannel* dest_; + MediaReceiveChannelInterface* dest_; bool conf_; // The ssrcs used in sending out packets in conference mode. std::vector conf_sent_ssrcs_; diff --git a/third_party/libwebrtc/media/base/media_channel.h b/third_party/libwebrtc/media/base/media_channel.h index 79ae2a3935bb..0d0967023914 100644 --- a/third_party/libwebrtc/media/base/media_channel.h +++ b/third_party/libwebrtc/media/base/media_channel.h @@ -43,7 +43,6 @@ #include "call/video_receive_stream.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/codec.h" -#include "media/base/delayable.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" #include "modules/audio_processing/include/audio_processing_statistics.h" @@ -71,7 +70,6 @@ class VideoFrame; namespace cricket { class AudioSource; -class MediaChannel; // TODO(bugs.webrtc.org/13931): Delete when irrelevant class VideoCapturer; struct RtpHeader; struct VideoFormat; @@ -189,6 +187,9 @@ class MediaSendChannelInterface { virtual VoiceMediaSendChannelInterface* AsVoiceSendChannel() = 0; virtual cricket::MediaType media_type() const = 0; + // Gets the currently set codecs/payload types to be used for outgoing media. + virtual absl::optional GetSendCodec() const = 0; + // Creates a new outgoing media stream with SSRCs and CNAME as described // by sp. virtual bool AddSendStream(const StreamParams& sp) = 0; @@ -253,7 +254,7 @@ class MediaSendChannelInterface { absl::AnyInvocable callback) = 0; }; -class MediaReceiveChannelInterface : public Delayable { +class MediaReceiveChannelInterface { public: virtual ~MediaReceiveChannelInterface() = default; @@ -305,6 +306,16 @@ class MediaReceiveChannelInterface : public Delayable { uint32_t ssrc, rtc::scoped_refptr frame_transformer) = 0; + + // Set base minimum delay of the receive stream with specified ssrc. + // Base minimum delay sets lower bound on minimum delay value which + // determines minimum delay until audio playout. + // Returns false if there is no stream with given ssrc. + virtual bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) = 0; + + // Returns current value of base minimum delay in milliseconds. + virtual absl::optional GetBaseMinimumPlayoutDelayMs( + uint32_t ssrc) const = 0; }; // The stats information is structured as follows: @@ -455,6 +466,12 @@ struct MediaReceiverInfo { absl::optional codec_payload_type; std::vector local_stats; std::vector remote_stats; + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-fecpacketsreceived + absl::optional fec_packets_received; + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-fecpacketsdiscarded + absl::optional fec_packets_discarded; + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-fecbytesreceived + absl::optional fec_bytes_received; }; struct VoiceSenderInfo : public MediaSenderInfo { @@ -489,8 +506,6 @@ struct VoiceReceiverInfo : public MediaReceiverInfo { uint64_t concealment_events = 0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; - uint64_t fec_packets_received = 0; - uint64_t fec_packets_discarded = 0; // Stats below correspond to similarly-named fields in the WebRTC stats spec. // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats uint64_t packets_discarded = 0; @@ -548,7 +563,7 @@ struct VideoSenderInfo : public MediaSenderInfo { VideoSenderInfo(); ~VideoSenderInfo(); std::vector ssrc_groups; - std::string encoder_implementation_name; + absl::optional encoder_implementation_name; int firs_received = 0; int plis_received = 0; int send_frame_width = 0; @@ -592,7 +607,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo { VideoReceiverInfo(); ~VideoReceiverInfo(); std::vector ssrc_groups; - std::string decoder_implementation_name; + absl::optional decoder_implementation_name; absl::optional power_efficient_decoder; int packets_concealed = 0; int firs_sent = 0; @@ -773,7 +788,7 @@ struct VideoMediaInfo { send_codecs.clear(); receive_codecs.clear(); } - // Each sender info represents one "outbound-rtp" stream.In non - simulcast, + // Each sender info represents one "outbound-rtp" stream. In non-simulcast, // this means one info per RtpSender but if simulcast is used this means // one info per simulcast layer. std::vector senders; @@ -792,9 +807,8 @@ struct RtcpParameters { bool remote_estimate = false; }; -template -struct RtpParameters { - virtual ~RtpParameters() = default; +struct MediaChannelParameters { + virtual ~MediaChannelParameters() = default; std::vector codecs; std::vector extensions; @@ -824,10 +838,7 @@ struct RtpParameters { } }; -// TODO(deadbeef): Rename to RtpSenderParameters, since they're intended to -// encapsulate all the parameters needed for an RtpSender. -template -struct RtpSendParameters : RtpParameters { +struct SenderParameters : MediaChannelParameters { int max_bandwidth_bps = -1; // This is the value to be sent in the MID RTP header extension (if the header // extension in included in the list of extensions). @@ -836,7 +847,7 @@ struct RtpSendParameters : RtpParameters { protected: std::map ToStringMap() const override { - auto params = RtpParameters::ToStringMap(); + auto params = MediaChannelParameters::ToStringMap(); params["max_bandwidth_bps"] = rtc::ToString(max_bandwidth_bps); params["mid"] = (mid.empty() ? "" : mid); params["extmap-allow-mixed"] = extmap_allow_mixed ? "true" : "false"; @@ -844,20 +855,20 @@ struct RtpSendParameters : RtpParameters { } }; -struct AudioSendParameters : RtpSendParameters { - AudioSendParameters(); - ~AudioSendParameters() override; +struct AudioSenderParameter : SenderParameters { + AudioSenderParameter(); + ~AudioSenderParameter() override; AudioOptions options; protected: std::map ToStringMap() const override; }; -struct AudioRecvParameters : RtpParameters {}; +struct AudioReceiverParameters : MediaChannelParameters {}; class VoiceMediaSendChannelInterface : public MediaSendChannelInterface { public: - virtual bool SetSendParameters(const AudioSendParameters& params) = 0; + virtual bool SetSendParameters(const AudioSenderParameter& params) = 0; // Starts or stops sending (and potentially capture) of local audio. virtual void SetSend(bool send) = 0; // Configure stream for sending. @@ -879,7 +890,7 @@ class VoiceMediaSendChannelInterface : public MediaSendChannelInterface { class VoiceMediaReceiveChannelInterface : public MediaReceiveChannelInterface { public: - virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0; + virtual bool SetRecvParameters(const AudioReceiverParameters& params) = 0; // Get the receive parameters for the incoming stream identified by `ssrc`. virtual webrtc::RtpParameters GetRtpReceiveParameters( uint32_t ssrc) const = 0; @@ -903,11 +914,9 @@ class VoiceMediaReceiveChannelInterface : public MediaReceiveChannelInterface { virtual void SetReceiveNonSenderRttEnabled(bool enabled) = 0; }; -// TODO(deadbeef): Rename to VideoSenderParameters, since they're intended to -// encapsulate all the parameters needed for a video RtpSender. -struct VideoSendParameters : RtpSendParameters { - VideoSendParameters(); - ~VideoSendParameters() override; +struct VideoSenderParameters : SenderParameters { + VideoSenderParameters(); + ~VideoSenderParameters() override; // Use conference mode? This flag comes from the remote // description's SDP line 'a=x-google-flag:conference', copied over // by VideoChannel::SetRemoteContent_w, and ultimately used by @@ -920,15 +929,11 @@ struct VideoSendParameters : RtpSendParameters { std::map ToStringMap() const override; }; -// TODO(deadbeef): Rename to VideoReceiverParameters, since they're intended to -// encapsulate all the parameters needed for a video RtpReceiver. -struct VideoRecvParameters : RtpParameters {}; +struct VideoReceiverParameters : MediaChannelParameters {}; class VideoMediaSendChannelInterface : public MediaSendChannelInterface { public: - virtual bool SetSendParameters(const VideoSendParameters& params) = 0; - // Gets the currently set codecs/payload types to be used for outgoing media. - virtual absl::optional GetSendCodec() = 0; + virtual bool SetSendParameters(const VideoSenderParameters& params) = 0; // Starts or stops transmission (and potentially capture) of local video. virtual bool SetSend(bool send) = 0; // Configure stream for sending and register a source. @@ -943,6 +948,14 @@ class VideoMediaSendChannelInterface : public MediaSendChannelInterface { // Enable network condition based codec switching. virtual void SetVideoCodecSwitchingEnabled(bool enabled) = 0; virtual bool GetStats(VideoMediaSendInfo* stats) = 0; + // This fills the "bitrate parts" (rtx, video bitrate) of the + // BandwidthEstimationInfo, since that part that isn't possible to get + // through webrtc::Call::GetStats, as they are statistics of the send + // streams. + // TODO(holmer): We should change this so that either BWE graphs doesn't + // need access to bitrates of the streams, or change the (RTC)StatsCollector + // so that it's getting the send stream stats separately by calling + // GetStats(), and merges with BandwidthEstimationInfo by itself. virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0; // Information queries to support SetReceiverFeedbackParameters virtual webrtc::RtcpMode SendCodecRtcpMode() const = 0; @@ -952,7 +965,7 @@ class VideoMediaSendChannelInterface : public MediaSendChannelInterface { class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { public: - virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0; + virtual bool SetRecvParameters(const VideoReceiverParameters& params) = 0; // Get the receive parameters for the incoming stream identified by `ssrc`. virtual webrtc::RtpParameters GetRtpReceiveParameters( uint32_t ssrc) const = 0; diff --git a/third_party/libwebrtc/media/base/media_channel_impl.cc b/third_party/libwebrtc/media/base/media_channel_impl.cc index 16980af5c6d5..5fee8a2fb05d 100644 --- a/third_party/libwebrtc/media/base/media_channel_impl.cc +++ b/third_party/libwebrtc/media/base/media_channel_impl.cc @@ -46,24 +46,12 @@ VideoOptions::~VideoOptions() = default; MediaChannelUtil::MediaChannelUtil(TaskQueueBase* network_thread, bool enable_dscp) - : enable_dscp_(enable_dscp), - network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), - network_thread_(network_thread) {} + : transport_(network_thread, enable_dscp) {} -MediaChannel::MediaChannel(Role role, - TaskQueueBase* network_thread, - bool enable_dscp) - : MediaChannelUtil(network_thread, enable_dscp), role_(role) {} - -MediaChannelUtil::~MediaChannelUtil() { - RTC_DCHECK(!network_interface_); -} +MediaChannelUtil::~MediaChannelUtil() {} void MediaChannelUtil::SetInterface(MediaChannelNetworkInterface* iface) { - RTC_DCHECK_RUN_ON(network_thread_); - iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive(); - network_interface_ = iface; - UpdateDscp(); + transport_.SetInterface(iface); } int MediaChannelUtil::GetRtpSendTimeExtnId() const { @@ -84,19 +72,18 @@ void MediaChannelUtil::SetFrameDecryptor( bool MediaChannelUtil::SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { - return DoSendPacket(packet, false, options); + return transport_.DoSendPacket(packet, false, options); } bool MediaChannelUtil::SendRtcp(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { - return DoSendPacket(packet, true, options); + return transport_.DoSendPacket(packet, true, options); } int MediaChannelUtil::SetOption(MediaChannelNetworkInterface::SocketType type, rtc::Socket::Option opt, int option) { - RTC_DCHECK_RUN_ON(network_thread_); - return SetOptionLocked(type, opt, option); + return transport_.SetOption(type, opt, option); } // Corresponds to the SDP attribute extmap-allow-mixed, see RFC8285. @@ -112,8 +99,7 @@ bool MediaChannelUtil::ExtmapAllowMixed() const { } bool MediaChannelUtil::HasNetworkInterface() const { - RTC_DCHECK_RUN_ON(network_thread_); - return network_interface_ != nullptr; + return transport_.HasNetworkInterface(); } void MediaChannelUtil::SetEncoderToPacketizerFrameTransformer( @@ -124,119 +110,12 @@ void MediaChannelUtil::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) {} -int MediaChannelUtil::SetOptionLocked( - MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, - int option) { - if (!network_interface_) - return -1; - return network_interface_->SetOption(type, opt, option); -} - bool MediaChannelUtil::DscpEnabled() const { - return enable_dscp_; -} - -// This is the DSCP value used for both RTP and RTCP channels if DSCP is -// enabled. It can be changed at any time via `SetPreferredDscp`. -rtc::DiffServCodePoint MediaChannelUtil::PreferredDscp() const { - RTC_DCHECK_RUN_ON(network_thread_); - return preferred_dscp_; + return transport_.DscpEnabled(); } void MediaChannelUtil::SetPreferredDscp(rtc::DiffServCodePoint new_dscp) { - if (!network_thread_->IsCurrent()) { - // This is currently the common path as the derived channel classes - // get called on the worker thread. There are still some tests though - // that call directly on the network thread. - network_thread_->PostTask(SafeTask( - network_safety_, [this, new_dscp]() { SetPreferredDscp(new_dscp); })); - return; - } - - RTC_DCHECK_RUN_ON(network_thread_); - if (new_dscp == preferred_dscp_) - return; - - preferred_dscp_ = new_dscp; - UpdateDscp(); -} - -rtc::scoped_refptr MediaChannelUtil::network_safety() { - return network_safety_; -} - -void MediaChannelUtil::UpdateDscp() { - rtc::DiffServCodePoint value = - enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; - int ret = SetOptionLocked(MediaChannelNetworkInterface::ST_RTP, - rtc::Socket::OPT_DSCP, value); - if (ret == 0) - SetOptionLocked(MediaChannelNetworkInterface::ST_RTCP, - rtc::Socket::OPT_DSCP, value); -} - -bool MediaChannelUtil::DoSendPacket(rtc::CopyOnWriteBuffer* packet, - bool rtcp, - const rtc::PacketOptions& options) { - RTC_DCHECK_RUN_ON(network_thread_); - if (!network_interface_) - return false; - - return (!rtcp) ? network_interface_->SendPacket(packet, options) - : network_interface_->SendRtcp(packet, options); -} - -void MediaChannelUtil::SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) { - auto send = - [this, packet_id = options.packet_id, - included_in_feedback = options.included_in_feedback, - included_in_allocation = options.included_in_allocation, - batchable = options.batchable, - last_packet_in_batch = options.last_packet_in_batch, - packet = rtc::CopyOnWriteBuffer(data, len, kMaxRtpPacketLen)]() mutable { - rtc::PacketOptions rtc_options; - rtc_options.packet_id = packet_id; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - rtc_options.info_signaled_after_sent.included_in_feedback = - included_in_feedback; - rtc_options.info_signaled_after_sent.included_in_allocation = - included_in_allocation; - rtc_options.batchable = batchable; - rtc_options.last_packet_in_batch = last_packet_in_batch; - SendPacket(&packet, rtc_options); - }; - - // TODO(bugs.webrtc.org/11993): ModuleRtpRtcpImpl2 and related classes (e.g. - // RTCPSender) aren't aware of the network thread and may trigger calls to - // this function from different threads. Update those classes to keep - // network traffic on the network thread. - if (network_thread_->IsCurrent()) { - send(); - } else { - network_thread_->PostTask(SafeTask(network_safety_, std::move(send))); - } -} - -void MediaChannelUtil::SendRtcp(const uint8_t* data, size_t len) { - auto send = [this, packet = rtc::CopyOnWriteBuffer( - data, len, kMaxRtpPacketLen)]() mutable { - rtc::PacketOptions rtc_options; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - SendRtcp(&packet, rtc_options); - }; - - if (network_thread_->IsCurrent()) { - send(); - } else { - network_thread_->PostTask(SafeTask(network_safety_, std::move(send))); - } + transport_.SetPreferredDscp(new_dscp); } MediaSenderInfo::MediaSenderInfo() = default; @@ -275,32 +154,169 @@ VideoMediaReceiveInfo::~VideoMediaReceiveInfo() = default; VoiceMediaReceiveInfo::VoiceMediaReceiveInfo() = default; VoiceMediaReceiveInfo::~VoiceMediaReceiveInfo() = default; -AudioSendParameters::AudioSendParameters() = default; -AudioSendParameters::~AudioSendParameters() = default; +AudioSenderParameter::AudioSenderParameter() = default; +AudioSenderParameter::~AudioSenderParameter() = default; -std::map AudioSendParameters::ToStringMap() const { - auto params = RtpSendParameters::ToStringMap(); +std::map AudioSenderParameter::ToStringMap() const { + auto params = SenderParameters::ToStringMap(); params["options"] = options.ToString(); return params; } -cricket::MediaType VoiceMediaChannel::media_type() const { - return cricket::MediaType::MEDIA_TYPE_AUDIO; -} +VideoSenderParameters::VideoSenderParameters() = default; +VideoSenderParameters::~VideoSenderParameters() = default; -VideoSendParameters::VideoSendParameters() = default; -VideoSendParameters::~VideoSendParameters() = default; - -std::map VideoSendParameters::ToStringMap() const { - auto params = RtpSendParameters::ToStringMap(); +std::map VideoSenderParameters::ToStringMap() const { + auto params = SenderParameters::ToStringMap(); params["conference_mode"] = (conference_mode ? "yes" : "no"); return params; } -cricket::MediaType VideoMediaChannel::media_type() const { - return cricket::MediaType::MEDIA_TYPE_VIDEO; +// --------------------- MediaChannelUtil::TransportForMediaChannels ----- + +MediaChannelUtil::TransportForMediaChannels::TransportForMediaChannels( + webrtc::TaskQueueBase* network_thread, + bool enable_dscp) + : network_safety_(webrtc::PendingTaskSafetyFlag::CreateDetachedInactive()), + network_thread_(network_thread), + + enable_dscp_(enable_dscp) {} + +MediaChannelUtil::TransportForMediaChannels::~TransportForMediaChannels() { + RTC_DCHECK(!network_interface_); } -void VideoMediaChannel::SetVideoCodecSwitchingEnabled(bool enabled) {} +bool MediaChannelUtil::TransportForMediaChannels::SendRtcp(const uint8_t* data, + size_t len) { + return SendRtcp(rtc::MakeArrayView(data, len)); +} + +bool MediaChannelUtil::TransportForMediaChannels::SendRtcp( + rtc::ArrayView packet) { + auto send = [this, packet = rtc::CopyOnWriteBuffer( + packet, kMaxRtpPacketLen)]() mutable { + rtc::PacketOptions rtc_options; + if (DscpEnabled()) { + rtc_options.dscp = PreferredDscp(); + } + DoSendPacket(&packet, true, rtc_options); + }; + + if (network_thread_->IsCurrent()) { + send(); + } else { + network_thread_->PostTask(SafeTask(network_safety_, std::move(send))); + } + return true; +} + +bool MediaChannelUtil::TransportForMediaChannels::SendRtp( + const uint8_t* data, + size_t len, + const webrtc::PacketOptions& options) { + return SendRtp(rtc::ArrayView(data, len), options); +} + +bool MediaChannelUtil::TransportForMediaChannels::SendRtp( + rtc::ArrayView packet, + const webrtc::PacketOptions& options) { + auto send = + [this, packet_id = options.packet_id, + included_in_feedback = options.included_in_feedback, + included_in_allocation = options.included_in_allocation, + batchable = options.batchable, + last_packet_in_batch = options.last_packet_in_batch, + packet = rtc::CopyOnWriteBuffer(packet, kMaxRtpPacketLen)]() mutable { + rtc::PacketOptions rtc_options; + rtc_options.packet_id = packet_id; + if (DscpEnabled()) { + rtc_options.dscp = PreferredDscp(); + } + rtc_options.info_signaled_after_sent.included_in_feedback = + included_in_feedback; + rtc_options.info_signaled_after_sent.included_in_allocation = + included_in_allocation; + rtc_options.batchable = batchable; + rtc_options.last_packet_in_batch = last_packet_in_batch; + DoSendPacket(&packet, false, rtc_options); + }; + + // TODO(bugs.webrtc.org/11993): ModuleRtpRtcpImpl2 and related classes (e.g. + // RTCPSender) aren't aware of the network thread and may trigger calls to + // this function from different threads. Update those classes to keep + // network traffic on the network thread. + if (network_thread_->IsCurrent()) { + send(); + } else { + network_thread_->PostTask(SafeTask(network_safety_, std::move(send))); + } + return true; +} + +void MediaChannelUtil::TransportForMediaChannels::SetInterface( + MediaChannelNetworkInterface* iface) { + RTC_DCHECK_RUN_ON(network_thread_); + iface ? network_safety_->SetAlive() : network_safety_->SetNotAlive(); + network_interface_ = iface; + UpdateDscp(); +} + +void MediaChannelUtil::TransportForMediaChannels::UpdateDscp() { + rtc::DiffServCodePoint value = + enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; + int ret = SetOptionLocked(MediaChannelNetworkInterface::ST_RTP, + rtc::Socket::OPT_DSCP, value); + if (ret == 0) + SetOptionLocked(MediaChannelNetworkInterface::ST_RTCP, + rtc::Socket::OPT_DSCP, value); +} + +bool MediaChannelUtil::TransportForMediaChannels::DoSendPacket( + rtc::CopyOnWriteBuffer* packet, + bool rtcp, + const rtc::PacketOptions& options) { + RTC_DCHECK_RUN_ON(network_thread_); + if (!network_interface_) + return false; + + return (!rtcp) ? network_interface_->SendPacket(packet, options) + : network_interface_->SendRtcp(packet, options); +} + +int MediaChannelUtil::TransportForMediaChannels::SetOption( + MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) { + RTC_DCHECK_RUN_ON(network_thread_); + return SetOptionLocked(type, opt, option); +} + +int MediaChannelUtil::TransportForMediaChannels::SetOptionLocked( + MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) { + if (!network_interface_) + return -1; + return network_interface_->SetOption(type, opt, option); +} + +void MediaChannelUtil::TransportForMediaChannels::SetPreferredDscp( + rtc::DiffServCodePoint new_dscp) { + if (!network_thread_->IsCurrent()) { + // This is currently the common path as the derived channel classes + // get called on the worker thread. There are still some tests though + // that call directly on the network thread. + network_thread_->PostTask(SafeTask( + network_safety_, [this, new_dscp]() { SetPreferredDscp(new_dscp); })); + return; + } + + RTC_DCHECK_RUN_ON(network_thread_); + if (new_dscp == preferred_dscp_) + return; + + preferred_dscp_ = new_dscp; + UpdateDscp(); +} } // namespace cricket diff --git a/third_party/libwebrtc/media/base/media_channel_impl.h b/third_party/libwebrtc/media/base/media_channel_impl.h index 33007d07ee5b..1e7fa649d08e 100644 --- a/third_party/libwebrtc/media/base/media_channel_impl.h +++ b/third_party/libwebrtc/media/base/media_channel_impl.h @@ -59,16 +59,12 @@ #include "rtc_base/socket.h" #include "rtc_base/thread_annotations.h" // This file contains the base classes for classes that implement -// the MediaChannel interfaces. +// the channel interfaces. // These implementation classes used to be the exposed interface names, // but this is in the process of being changed. -// TODO(bugs.webrtc.org/13931): Remove the MediaChannel class. namespace cricket { -class VoiceMediaChannel; -class VideoMediaChannel; - // The `MediaChannelUtil` class provides functionality that is used by // multiple MediaChannel-like objects, of both sending and receiving // types. @@ -79,7 +75,11 @@ class MediaChannelUtil { virtual ~MediaChannelUtil(); // Returns the absolute sendtime extension id value from media channel. virtual int GetRtpSendTimeExtnId() const; - // Base method to send packet using MediaChannelNetworkInterface. + + webrtc::Transport* transport() { return &transport_; } + + // Base methods to send packet using MediaChannelNetworkInterface. + // These methods are used by some tests only. bool SendPacket(rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options); @@ -121,260 +121,77 @@ class MediaChannelUtil { rtc::scoped_refptr frame_transformer); protected: - int SetOptionLocked(MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, - int option) RTC_RUN_ON(network_thread_); - bool DscpEnabled() const; - // This is the DSCP value used for both RTP and RTCP channels if DSCP is - // enabled. It can be changed at any time via `SetPreferredDscp`. - rtc::DiffServCodePoint PreferredDscp() const; void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); - rtc::scoped_refptr network_safety(); - - // Utility implementation for derived classes (video/voice) that applies - // the packet options and passes the data onwards to `SendPacket`. - void SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options); - - void SendRtcp(const uint8_t* data, size_t len); - private: - // Apply the preferred DSCP setting to the underlying network interface RTP - // and RTCP channels. If DSCP is disabled, then apply the default DSCP value. - void UpdateDscp() RTC_RUN_ON(network_thread_); + // Implementation of the webrtc::Transport interface required + // by Call(). + class TransportForMediaChannels : public webrtc::Transport { + public: + TransportForMediaChannels(webrtc::TaskQueueBase* network_thread, + bool enable_dscp); - bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, - bool rtcp, - const rtc::PacketOptions& options); + virtual ~TransportForMediaChannels(); - const bool enable_dscp_; - const rtc::scoped_refptr network_safety_ - RTC_PT_GUARDED_BY(network_thread_); - webrtc::TaskQueueBase* const network_thread_; - MediaChannelNetworkInterface* network_interface_ - RTC_GUARDED_BY(network_thread_) = nullptr; - rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = - rtc::DSCP_DEFAULT; - bool extmap_allow_mixed_ = false; -}; + // Implementation of webrtc::Transport + bool SendRtp(const uint8_t* packet, + size_t length, + const webrtc::PacketOptions& options) override; + bool SendRtcp(const uint8_t* packet, size_t length) override; + bool SendRtp(rtc::ArrayView packet, + const webrtc::PacketOptions& options) override; + bool SendRtcp(rtc::ArrayView packet) override; -// The `MediaChannel` class implements both the SendChannel and -// ReceiveChannel interface. It is used in legacy code that does not -// use the split interfaces. -class MediaChannel : public MediaChannelUtil, - public MediaSendChannelInterface, - public MediaReceiveChannelInterface { - public: - // Role of the channel. Used to describe which interface it supports. - // This is temporary until we stop using the same implementation for both - // interfaces. - enum class Role { - kSend, - kReceive, - kBoth // Temporary value for non-converted test and downstream code - // TODO(bugs.webrtc.org/13931): Remove kBoth when usage is removed. + // Not implementation of webrtc::Transport + void SetInterface(MediaChannelNetworkInterface* iface); + + int SetOption(MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option); + + bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, + bool rtcp, + const rtc::PacketOptions& options); + + bool HasNetworkInterface() const { + RTC_DCHECK_RUN_ON(network_thread_); + return network_interface_ != nullptr; + } + bool DscpEnabled() const { return enable_dscp_; } + + void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); + + private: + // This is the DSCP value used for both RTP and RTCP channels if DSCP is + // enabled. It can be changed at any time via `SetPreferredDscp`. + rtc::DiffServCodePoint PreferredDscp() const { + RTC_DCHECK_RUN_ON(network_thread_); + return preferred_dscp_; + } + + // Apply the preferred DSCP setting to the underlying network interface RTP + // and RTCP channels. If DSCP is disabled, then apply the default DSCP + // value. + void UpdateDscp() RTC_RUN_ON(network_thread_); + + int SetOptionLocked(MediaChannelNetworkInterface::SocketType type, + rtc::Socket::Option opt, + int option) RTC_RUN_ON(network_thread_); + + const rtc::scoped_refptr network_safety_ + RTC_PT_GUARDED_BY(network_thread_); + webrtc::TaskQueueBase* const network_thread_; + const bool enable_dscp_; + MediaChannelNetworkInterface* network_interface_ + RTC_GUARDED_BY(network_thread_) = nullptr; + rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = + rtc::DSCP_DEFAULT; }; - explicit MediaChannel(Role role, - webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false); - virtual ~MediaChannel() = default; - - Role role() const { return role_; } - - // Downcasting to the subclasses. - virtual VideoMediaChannel* AsVideoChannel() { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - - virtual VoiceMediaChannel* AsVoiceChannel() { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - // Must declare the methods inherited from the base interface template, - // even when abstract, to tell the compiler that all instances of the name - // referred to by subclasses of this share the same implementation. - cricket::MediaType media_type() const override = 0; - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override = 0; - void OnPacketSent(const rtc::SentPacket& sent_packet) override = 0; - void OnReadyToSend(bool ready) override = 0; - void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override = - 0; - void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override = 0; - - // Methods from the APIs that are implemented in MediaChannelUtil - using MediaChannelUtil::ExtmapAllowMixed; - using MediaChannelUtil::HasNetworkInterface; - using MediaChannelUtil::SetExtmapAllowMixed; - using MediaChannelUtil::SetInterface; - - private: - const Role role_; -}; - -// Base class for implementation classes - -class VideoMediaChannel : public MediaChannel, - public VideoMediaSendChannelInterface, - public VideoMediaReceiveChannelInterface { - public: - explicit VideoMediaChannel(MediaChannel::Role role, - webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(role, network_thread, enable_dscp) {} - ~VideoMediaChannel() override {} - - // Downcasting to the implemented interfaces. - VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; } - VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - - VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { - return this; - } - VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - cricket::MediaType media_type() const override; - - // Downcasting to the subclasses. - VideoMediaChannel* AsVideoChannel() override { return this; } - - void SetExtmapAllowMixed(bool mixed) override { - MediaChannel::SetExtmapAllowMixed(mixed); - } - bool ExtmapAllowMixed() const override { - return MediaChannel::ExtmapAllowMixed(); - } - void SetInterface(MediaChannelNetworkInterface* iface) override { - return MediaChannel::SetInterface(iface); - } - // Declared here in order to avoid "found by multiple paths" compile error - bool AddSendStream(const StreamParams& sp) override = 0; - void ChooseReceiverReportSsrc(const std::set& choices) override = 0; - void SetSsrcListChangedCallback( - absl::AnyInvocable&)> callback) override = - 0; - bool AddRecvStream(const StreamParams& sp) override = 0; - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override = 0; - void SetEncoderSelector(uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* - encoder_selector) override {} - - // This fills the "bitrate parts" (rtx, video bitrate) of the - // BandwidthEstimationInfo, since that part that isn't possible to get - // through webrtc::Call::GetStats, as they are statistics of the send - // streams. - // TODO(holmer): We should change this so that either BWE graphs doesn't - // need access to bitrates of the streams, or change the (RTC)StatsCollector - // so that it's getting the send stream stats separately by calling - // GetStats(), and merges with BandwidthEstimationInfo by itself. - void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override = 0; - // Gets quality stats for the channel. - virtual bool GetSendStats(VideoMediaSendInfo* info) = 0; - virtual bool GetReceiveStats(VideoMediaReceiveInfo* info) = 0; - bool GetStats(VideoMediaSendInfo* info) override { - return GetSendStats(info); - } - bool GetStats(VideoMediaReceiveInfo* info) override { - return GetReceiveStats(info); - } - - // TODO(bugs.webrtc.org/13931): Remove when configuration is more sensible - void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override = 0; - // Enable network condition based codec switching. - // Note: should have been pure virtual. - void SetVideoCodecSwitchingEnabled(bool enabled) override; - - private: - // Functions not implemented on this interface - bool HasNetworkInterface() const override { - return MediaChannel::HasNetworkInterface(); - } -}; - -// Base class for implementation classes -class VoiceMediaChannel : public MediaChannel, - public VoiceMediaSendChannelInterface, - public VoiceMediaReceiveChannelInterface { - public: - MediaType media_type() const override; - VoiceMediaChannel(MediaChannel::Role role, - webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(role, network_thread, enable_dscp) {} - ~VoiceMediaChannel() override {} - - // Downcasting to the implemented interfaces. - VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } - - VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { - return this; - } - - VoiceMediaChannel* AsVoiceChannel() override { return this; } - - VideoMediaSendChannelInterface* AsVideoSendChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - - // Declared here in order to avoid "found by multiple paths" compile error - bool AddSendStream(const StreamParams& sp) override = 0; - bool AddRecvStream(const StreamParams& sp) override = 0; - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override = 0; - void SetEncoderSelector(uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* - encoder_selector) override {} - void ChooseReceiverReportSsrc(const std::set& choices) override = 0; - void SetSsrcListChangedCallback( - absl::AnyInvocable&)> callback) override = - 0; - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override = 0; - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback = nullptr) override = 0; - - void SetExtmapAllowMixed(bool mixed) override { - MediaChannel::SetExtmapAllowMixed(mixed); - } - bool ExtmapAllowMixed() const override { - return MediaChannel::ExtmapAllowMixed(); - } - void SetInterface(MediaChannelNetworkInterface* iface) override { - return MediaChannel::SetInterface(iface); - } - bool HasNetworkInterface() const override { - return MediaChannel::HasNetworkInterface(); - } - - // Gets quality stats for the channel. - virtual bool GetSendStats(VoiceMediaSendInfo* info) = 0; - virtual bool GetReceiveStats(VoiceMediaReceiveInfo* info, - bool get_and_clear_legacy_stats) = 0; - bool GetStats(VoiceMediaSendInfo* info) override { - return GetSendStats(info); - } - bool GetStats(VoiceMediaReceiveInfo* info, - bool get_and_clear_legacy_stats) override { - return GetReceiveStats(info, get_and_clear_legacy_stats); - } + bool extmap_allow_mixed_ = false; + TransportForMediaChannels transport_; }; } // namespace cricket diff --git a/third_party/libwebrtc/media/base/media_channel_shim.cc b/third_party/libwebrtc/media/base/media_channel_shim.cc deleted file mode 100644 index adc749ff9da0..000000000000 --- a/third_party/libwebrtc/media/base/media_channel_shim.cc +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/base/media_channel_shim.h" - -namespace cricket { - -// Note: The VideoMediaChannel default implementations are not used here, and -// should be removed from that interface. -// TODO(bugs.webrtc.org/13931): Remove them. -VoiceMediaShimChannel::VoiceMediaShimChannel( - std::unique_ptr send_impl, - std::unique_ptr receive_impl) - : VoiceMediaChannel(MediaChannel::Role::kBoth, nullptr, false), - send_impl_(std::move(send_impl)), - receive_impl_(std::move(receive_impl)) { - if (send_impl_ && receive_impl_) { - send_impl_->SetSsrcListChangedCallback( - [this](const std::set& choices) { - receive_impl_->ChooseReceiverReportSsrc(choices); - }); - send_impl_->SetSendCodecChangedCallback([this]() { - receive_impl_->SetReceiveNackEnabled(send_impl_->SendCodecHasNack()); - receive_impl_->SetReceiveNonSenderRttEnabled( - send_impl_->SenderNonSenderRttEnabled()); - }); - } -} - -VideoMediaShimChannel::VideoMediaShimChannel( - std::unique_ptr send_impl, - std::unique_ptr receive_impl) - : VideoMediaChannel(MediaChannel::Role::kBoth, nullptr, false), - send_impl_(std::move(send_impl)), - receive_impl_(std::move(receive_impl)) { - if (send_impl_ && receive_impl_) { - send_impl_->SetSendCodecChangedCallback([this]() { - // Adjust receive streams based on send codec. - receive_impl_->SetReceiverFeedbackParameters( - send_impl_->SendCodecHasLntf(), send_impl_->SendCodecHasNack(), - send_impl_->SendCodecRtcpMode(), send_impl_->SendCodecRtxTime()); - }); - send_impl_->SetSsrcListChangedCallback( - [this](const std::set& choices) { - receive_impl_->ChooseReceiverReportSsrc(choices); - }); - } -} - -} // namespace cricket diff --git a/third_party/libwebrtc/media/base/media_channel_shim.h b/third_party/libwebrtc/media/base/media_channel_shim.h deleted file mode 100644 index e4dbee700dea..000000000000 --- a/third_party/libwebrtc/media/base/media_channel_shim.h +++ /dev/null @@ -1,572 +0,0 @@ -/* - * Copyright 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_BASE_MEDIA_CHANNEL_SHIM_H_ -#define MEDIA_BASE_MEDIA_CHANNEL_SHIM_H_ - -#include - -#include -#include -#include -#include -#include -#include - -#include "absl/functional/any_invocable.h" -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/crypto/frame_decryptor_interface.h" -#include "api/crypto/frame_encryptor_interface.h" -#include "api/frame_transformer_interface.h" -#include "api/media_types.h" -#include "api/rtc_error.h" -#include "api/rtp_headers.h" -#include "api/rtp_parameters.h" -#include "api/rtp_sender_interface.h" -#include "api/scoped_refptr.h" -#include "api/transport/rtp/rtp_source.h" -#include "api/video/recordable_encoded_frame.h" -#include "api/video/video_frame.h" -#include "api/video/video_sink_interface.h" -#include "api/video/video_source_interface.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "media/base/codec.h" -#include "media/base/media_channel.h" -#include "media/base/media_channel_impl.h" -#include "media/base/stream_params.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/checks.h" -#include "rtc_base/network/sent_packet.h" -#include "rtc_base/network_route.h" - -namespace cricket { - -// The VideoMediaShimChannel is replacing the VideoMediaChannel -// interface. -// If called with both send_impl and receive_impl, it operates in kBoth -// mode; if called with only one, it will shim that one and DCHECK if one -// tries to do functions belonging to the other. -class VoiceMediaShimChannel : public VoiceMediaChannel { - public: - VoiceMediaShimChannel( - std::unique_ptr send_impl, - std::unique_ptr receive_impl); - - VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } - VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { - return this; - } - VideoMediaSendChannelInterface* AsVideoSendChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - - // SetInterface needs to run on both send and receive channels. - void SetInterface(MediaChannelNetworkInterface* iface) override { - if (send_impl_) { - send_impl()->SetInterface(iface); - } - if (receive_impl_) { - receive_impl()->SetInterface(iface); - } - } - - // Implementation of MediaBaseChannelInterface - cricket::MediaType media_type() const override { return MEDIA_TYPE_AUDIO; } - - // Implementation of MediaSendChannelInterface - void OnPacketSent(const rtc::SentPacket& sent_packet) override { - send_impl()->OnPacketSent(sent_packet); - } - void OnReadyToSend(bool ready) override { send_impl()->OnReadyToSend(ready); } - void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override { - send_impl()->OnNetworkRouteChanged(transport_name, network_route); - } - void SetExtmapAllowMixed(bool extmap_allow_mixed) override { - send_impl()->SetExtmapAllowMixed(extmap_allow_mixed); - } - bool HasNetworkInterface() const override { - return send_impl()->HasNetworkInterface(); - } - bool ExtmapAllowMixed() const override { - return send_impl()->ExtmapAllowMixed(); - } - - bool AddSendStream(const StreamParams& sp) override { - return send_impl()->AddSendStream(sp); - } - bool RemoveSendStream(uint32_t ssrc) override { - return send_impl()->RemoveSendStream(ssrc); - } - void SetFrameEncryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_encryptor) override { - send_impl()->SetFrameEncryptor(ssrc, frame_encryptor); - } - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback = nullptr) override { - return send_impl()->SetRtpSendParameters(ssrc, parameters, - std::move(callback)); - } - - void SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override { - return send_impl()->SetEncoderToPacketizerFrameTransformer( - ssrc, frame_transformer); - } - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override { - return send_impl()->GetRtpSendParameters(ssrc); - } - // Implementation of MediaReceiveChannelInterface - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override { - receive_impl()->OnPacketReceived(packet); - } - bool AddRecvStream(const StreamParams& sp) override { - return receive_impl()->AddRecvStream(sp); - } - bool RemoveRecvStream(uint32_t ssrc) override { - return receive_impl()->RemoveRecvStream(ssrc); - } - void ResetUnsignaledRecvStream() override { - return receive_impl()->ResetUnsignaledRecvStream(); - } - absl::optional GetUnsignaledSsrc() const override { - return receive_impl()->GetUnsignaledSsrc(); - } - void ChooseReceiverReportSsrc(const std::set& choices) override { - return receive_impl()->ChooseReceiverReportSsrc(choices); - } - void OnDemuxerCriteriaUpdatePending() override { - receive_impl()->OnDemuxerCriteriaUpdatePending(); - } - void OnDemuxerCriteriaUpdateComplete() override { - receive_impl()->OnDemuxerCriteriaUpdateComplete(); - } - void SetFrameDecryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_decryptor) override { - receive_impl()->SetFrameDecryptor(ssrc, frame_decryptor); - } - void SetDepacketizerToDecoderFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override { - receive_impl()->SetDepacketizerToDecoderFrameTransformer(ssrc, - frame_transformer); - } - bool SendCodecHasNack() const override { - return send_impl()->SendCodecHasNack(); - } - void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override { - send_impl()->SetSendCodecChangedCallback(std::move(callback)); - } - // Implementation of VoiceMediaSendChannel - bool SetSendParameters(const AudioSendParameters& params) override { - return send_impl()->SetSendParameters(params); - } - void SetSend(bool send) override { return send_impl()->SetSend(send); } - bool SetAudioSend(uint32_t ssrc, - bool enable, - const AudioOptions* options, - AudioSource* source) override { - return send_impl()->SetAudioSend(ssrc, enable, options, source); - } - bool CanInsertDtmf() override { return send_impl()->CanInsertDtmf(); } - bool InsertDtmf(uint32_t ssrc, int event, int duration) override { - return send_impl()->InsertDtmf(ssrc, event, duration); - } - bool GetStats(VoiceMediaSendInfo* info) override { - return send_impl()->GetStats(info); - } - bool SenderNackEnabled() const override { - return send_impl()->SenderNackEnabled(); - } - bool SenderNonSenderRttEnabled() const override { - return send_impl()->SenderNonSenderRttEnabled(); - } - // Implementation of VoiceMediaReceiveChannelInterface - bool SetRecvParameters(const AudioRecvParameters& params) override { - return receive_impl()->SetRecvParameters(params); - } - webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override { - return receive_impl()->GetRtpReceiveParameters(ssrc); - } - std::vector GetSources(uint32_t ssrc) const override { - return receive_impl()->GetSources(ssrc); - } - webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override { - return receive_impl()->GetDefaultRtpReceiveParameters(); - } - void SetPlayout(bool playout) override { - return receive_impl()->SetPlayout(playout); - } - bool SetOutputVolume(uint32_t ssrc, double volume) override { - return receive_impl()->SetOutputVolume(ssrc, volume); - } - bool SetDefaultOutputVolume(double volume) override { - return receive_impl()->SetDefaultOutputVolume(volume); - } - void SetRawAudioSink( - uint32_t ssrc, - std::unique_ptr sink) override { - return receive_impl()->SetRawAudioSink(ssrc, std::move(sink)); - } - void SetDefaultRawAudioSink( - std::unique_ptr sink) override { - return receive_impl()->SetDefaultRawAudioSink(std::move(sink)); - } - bool GetStats(VoiceMediaReceiveInfo* info, bool reset_legacy) override { - return receive_impl_->GetStats(info, reset_legacy); - } - void SetReceiveNackEnabled(bool enabled) override { - receive_impl_->SetReceiveNackEnabled(enabled); - } - void SetReceiveNonSenderRttEnabled(bool enabled) override { - receive_impl_->SetReceiveNonSenderRttEnabled(enabled); - } - void SetSsrcListChangedCallback( - absl::AnyInvocable&)> callback) override { - send_impl()->SetSsrcListChangedCallback(std::move(callback)); - } - // Implementation of Delayable - bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override { - return receive_impl()->SetBaseMinimumPlayoutDelayMs(ssrc, delay_ms); - } - absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const override { - return receive_impl()->GetBaseMinimumPlayoutDelayMs(ssrc); - } - bool GetSendStats(VoiceMediaSendInfo* info) override { - return send_impl()->GetStats(info); - } - bool GetReceiveStats(VoiceMediaReceiveInfo* info, - bool reset_legacy) override { - return receive_impl()->GetStats(info, reset_legacy); - } - - // Only for testing of implementations - these will be used to static_cast the - // pointers to the implementations, so can only be safely used in conjunction - // with the corresponding create functions. - VoiceMediaSendChannelInterface* SendImplForTesting() { - return send_impl_.get(); - } - VoiceMediaReceiveChannelInterface* ReceiveImplForTesting() { - return receive_impl_.get(); - } - - private: - VoiceMediaSendChannelInterface* send_impl() { return send_impl_.get(); } - VoiceMediaReceiveChannelInterface* receive_impl() { - RTC_DCHECK(receive_impl_); - return receive_impl_.get(); - } - const VoiceMediaSendChannelInterface* send_impl() const { - RTC_DCHECK(send_impl_); - return send_impl_.get(); - } - const VoiceMediaReceiveChannelInterface* receive_impl() const { - return receive_impl_.get(); - } - - std::unique_ptr send_impl_; - std::unique_ptr receive_impl_; -}; - -// The VideoMediaShimChannel is replacing the VideoMediaChannel -// interface. -// If called with both send_impl and receive_impl, it operates in kBoth -// mode; if called with only one, it will shim that one and DCHECK if one -// tries to do functions belonging to the other. - -class VideoMediaShimChannel : public VideoMediaChannel { - public: - VideoMediaShimChannel( - std::unique_ptr send_impl, - std::unique_ptr receive_impl); - - VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; } - VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { - return this; - } - VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { - RTC_CHECK_NOTREACHED(); - return nullptr; - } - - // SetInterface needs to run on both send and receive channels. - void SetInterface(MediaChannelNetworkInterface* iface) override { - if (send_impl_) { - send_impl()->SetInterface(iface); - } - if (receive_impl_) { - receive_impl()->SetInterface(iface); - } - } - - // Implementation of MediaBaseChannelInterface - cricket::MediaType media_type() const override { return MEDIA_TYPE_VIDEO; } - - // Implementation of MediaSendChannelInterface - void OnPacketSent(const rtc::SentPacket& sent_packet) override { - send_impl()->OnPacketSent(sent_packet); - } - void OnReadyToSend(bool ready) override { send_impl()->OnReadyToSend(ready); } - void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override { - send_impl()->OnNetworkRouteChanged(transport_name, network_route); - } - void SetExtmapAllowMixed(bool extmap_allow_mixed) override { - send_impl()->SetExtmapAllowMixed(extmap_allow_mixed); - } - bool HasNetworkInterface() const override { - return send_impl()->HasNetworkInterface(); - } - bool ExtmapAllowMixed() const override { - return send_impl()->ExtmapAllowMixed(); - } - - bool AddSendStream(const StreamParams& sp) override { - return send_impl()->AddSendStream(sp); - } - bool RemoveSendStream(uint32_t ssrc) override { - return send_impl()->RemoveSendStream(ssrc); - } - void SetFrameEncryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_encryptor) override { - send_impl()->SetFrameEncryptor(ssrc, frame_encryptor); - } - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback = nullptr) override { - return send_impl()->SetRtpSendParameters(ssrc, parameters, - std::move(callback)); - } - - void SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override { - return send_impl()->SetEncoderToPacketizerFrameTransformer( - ssrc, frame_transformer); - } - void SetEncoderSelector(uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* - encoder_selector) override { - send_impl()->SetEncoderSelector(ssrc, encoder_selector); - } - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override { - return send_impl()->GetRtpSendParameters(ssrc); - } - // Send_Implementation of VideoMediaSendChannelInterface - bool SetSendParameters(const VideoSendParameters& params) override { - return send_impl()->SetSendParameters(params); - } - absl::optional GetSendCodec() override { - return send_impl()->GetSendCodec(); - } - bool SetSend(bool send) override { return send_impl()->SetSend(send); } - bool SetVideoSend( - uint32_t ssrc, - const VideoOptions* options, - rtc::VideoSourceInterface* source) override { - return send_impl()->SetVideoSend(ssrc, options, source); - } - void GenerateSendKeyFrame(uint32_t ssrc, - const std::vector& rids) override { - return send_impl()->GenerateSendKeyFrame(ssrc, rids); - } - void SetVideoCodecSwitchingEnabled(bool enabled) override { - return send_impl()->SetVideoCodecSwitchingEnabled(enabled); - } - bool GetStats(VideoMediaSendInfo* info) override { - return send_impl_->GetStats(info); - } - bool GetSendStats(VideoMediaSendInfo* info) override { - return send_impl_->GetStats(info); - } - void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override { - return send_impl_->FillBitrateInfo(bwe_info); - } - // Information queries to support SetReceiverFeedbackParameters - webrtc::RtcpMode SendCodecRtcpMode() const override { - return send_impl()->SendCodecRtcpMode(); - } - bool SendCodecHasLntf() const override { - return send_impl()->SendCodecHasLntf(); - } - bool SendCodecHasNack() const override { - return send_impl()->SendCodecHasNack(); - } - absl::optional SendCodecRtxTime() const override { - return send_impl()->SendCodecRtxTime(); - } - void SetSsrcListChangedCallback( - absl::AnyInvocable&)> callback) override { - send_impl()->SetSsrcListChangedCallback(std::move(callback)); - } - void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override { - // This callback is used internally by the shim, so should not be called by - // users for the "both" case. - if (send_impl_ && receive_impl_) { - RTC_CHECK_NOTREACHED(); - } - send_impl()->SetSendCodecChangedCallback(std::move(callback)); - } - - // Implementation of Delayable - bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override { - return receive_impl()->SetBaseMinimumPlayoutDelayMs(ssrc, delay_ms); - } - absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const override { - return receive_impl()->GetBaseMinimumPlayoutDelayMs(ssrc); - } - // Implementation of MediaReceiveChannelInterface - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override { - receive_impl()->OnPacketReceived(packet); - } - bool AddRecvStream(const StreamParams& sp) override { - return receive_impl()->AddRecvStream(sp); - } - bool RemoveRecvStream(uint32_t ssrc) override { - return receive_impl()->RemoveRecvStream(ssrc); - } - void ResetUnsignaledRecvStream() override { - return receive_impl()->ResetUnsignaledRecvStream(); - } - absl::optional GetUnsignaledSsrc() const override { - return receive_impl()->GetUnsignaledSsrc(); - } - void ChooseReceiverReportSsrc(const std::set& choices) override { - return receive_impl()->ChooseReceiverReportSsrc(choices); - } - void OnDemuxerCriteriaUpdatePending() override { - receive_impl()->OnDemuxerCriteriaUpdatePending(); - } - void OnDemuxerCriteriaUpdateComplete() override { - receive_impl()->OnDemuxerCriteriaUpdateComplete(); - } - void SetFrameDecryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_decryptor) override { - receive_impl()->SetFrameDecryptor(ssrc, frame_decryptor); - } - void SetDepacketizerToDecoderFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override { - receive_impl()->SetDepacketizerToDecoderFrameTransformer(ssrc, - frame_transformer); - } - // Implementation of VideoMediaReceiveChannelInterface - bool SetRecvParameters(const VideoRecvParameters& params) override { - return receive_impl()->SetRecvParameters(params); - } - webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override { - return receive_impl()->GetRtpReceiveParameters(ssrc); - } - webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override { - return receive_impl()->GetDefaultRtpReceiveParameters(); - } - bool SetSink(uint32_t ssrc, - rtc::VideoSinkInterface* sink) override { - return receive_impl()->SetSink(ssrc, sink); - } - void SetDefaultSink( - rtc::VideoSinkInterface* sink) override { - return receive_impl()->SetDefaultSink(sink); - } - void RequestRecvKeyFrame(uint32_t ssrc) override { - return receive_impl()->RequestRecvKeyFrame(ssrc); - } - std::vector GetSources(uint32_t ssrc) const override { - return receive_impl()->GetSources(ssrc); - } - // Set recordable encoded frame callback for `ssrc` - void SetRecordableEncodedFrameCallback( - uint32_t ssrc, - std::function callback) - override { - return receive_impl()->SetRecordableEncodedFrameCallback( - ssrc, std::move(callback)); - } - // Clear recordable encoded frame callback for `ssrc` - void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override { - receive_impl()->ClearRecordableEncodedFrameCallback(ssrc); - } - bool GetStats(VideoMediaReceiveInfo* info) override { - return receive_impl()->GetStats(info); - } - bool GetReceiveStats(VideoMediaReceiveInfo* info) override { - return receive_impl()->GetStats(info); - } - void SetReceiverFeedbackParameters(bool lntf_enabled, - bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) override { - receive_impl()->SetReceiverFeedbackParameters(lntf_enabled, nack_enabled, - rtcp_mode, rtx_time); - } - void SetReceive(bool receive) override { - receive_impl()->SetReceive(receive); - } - bool AddDefaultRecvStreamForTesting(const StreamParams& sp) override { - return receive_impl()->AddDefaultRecvStreamForTesting(sp); - } - - // Only for testing of implementations - these will be used to static_cast the - // pointers to the implementations, so can only be safely used in conjunction - // with the corresponding create functions. - VideoMediaSendChannelInterface* SendImplForTesting() { - return send_impl_.get(); - } - VideoMediaReceiveChannelInterface* ReceiveImplForTesting() { - return receive_impl_.get(); - } - - private: - VideoMediaSendChannelInterface* send_impl() { return send_impl_.get(); } - VideoMediaReceiveChannelInterface* receive_impl() { - RTC_DCHECK(receive_impl_); - return receive_impl_.get(); - } - const VideoMediaSendChannelInterface* send_impl() const { - RTC_DCHECK(send_impl_); - return send_impl_.get(); - } - const VideoMediaReceiveChannelInterface* receive_impl() const { - return receive_impl_.get(); - } - - std::unique_ptr send_impl_; - std::unique_ptr receive_impl_; -}; - -} // namespace cricket - -#endif // MEDIA_BASE_MEDIA_CHANNEL_SHIM_H_ diff --git a/third_party/libwebrtc/media/base/media_engine.h b/third_party/libwebrtc/media/base/media_engine.h index 0d10248ed0b8..b3d74911656b 100644 --- a/third_party/libwebrtc/media/base/media_engine.h +++ b/third_party/libwebrtc/media/base/media_engine.h @@ -38,9 +38,6 @@ class Call; namespace cricket { -class VideoMediaChannel; -class VoiceMediaChannel; - // Checks that the scalability_mode value of each encoding is supported by at // least one video codec of the list. If the list is empty, no check is done. webrtc::RTCError CheckScalabilityModeValues( @@ -120,42 +117,6 @@ class VoiceEngineInterface : public RtpHeaderExtensionQueryInterface { return nullptr; } - // MediaChannel creation - // Creates a voice media channel. Returns NULL on failure. - virtual VoiceMediaChannel* CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { - // For the case where a subclass overrides the deprecated method - // but not the replacement method, call the deprecated method. - // TODO(bugs.webrtc.org/13931): Remove default implementation - // when downstream has migrated to new API. - RTC_CHECK(!recursion_guard_); -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - RTC_LOG(LS_ERROR) - << "Override of deprecated declaration detected - please update!"; - return CreateMediaChannel(call, config, options, crypto_options); -#pragma clang diagnostic pop - } - - // Backwards compatible version - [[deprecated("Use version with role parameter")]] virtual VoiceMediaChannel* - CreateMediaChannel(webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options) { - recursion_guard_ = true; - auto new_channel = - CreateMediaChannel(MediaChannel::Role::kBoth, call, config, options, - crypto_options, webrtc::AudioCodecPairId::Create()); - recursion_guard_ = false; - return new_channel; - } - virtual const std::vector& send_codecs() const = 0; virtual const std::vector& recv_codecs() const = 0; @@ -170,11 +131,6 @@ class VoiceEngineInterface : public RtpHeaderExtensionQueryInterface { virtual absl::optional GetAudioDeviceStats() = 0; - - private: - // Workaround variable for avoiding recursion between old and new APIs. - // TODO(bugs.webrtc.org/13931): Remove when old interface is gone. - bool recursion_guard_ = false; }; class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { @@ -206,47 +162,6 @@ class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { return nullptr; } - // Creates a video media channel. - // Returns NULL on failure. - virtual VideoMediaChannel* CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { - // For the case where a subclass overrides the deprecated method - // but not the replacement method, call the deprecated method. - // TODO(bugs.webrtc.org/13931): Remove default implementation - // when downstream has migrated to new API. - RTC_CHECK(!recursion_guard_); -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - RTC_LOG(LS_ERROR) - << "Override of deprecated declaration detected - please update!"; - return CreateMediaChannel(call, config, options, crypto_options, - video_bitrate_allocator_factory); -#pragma clang diagnostic pop - } - - // Creates a video media channel. - // Returns NULL on failure. - // TODO(bugs.webrtc.org/13931): Stop downstream usage of this function. - [[deprecated("Please specify the role")]] virtual VideoMediaChannel* - CreateMediaChannel( - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { - recursion_guard_ = true; - auto new_channel = - CreateMediaChannel(MediaChannel::Role::kBoth, call, config, options, - crypto_options, video_bitrate_allocator_factory); - recursion_guard_ = false; - return new_channel; - } - // Retrieve list of supported codecs. virtual std::vector send_codecs() const = 0; virtual std::vector recv_codecs() const = 0; @@ -261,11 +176,6 @@ class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { RTC_DCHECK(include_rtx); return recv_codecs(); } - - private: - // Workaround variable for avoiding recursion between old and new APIs. - // TODO(bugs.webrtc.org/13931): Remove when old interface is gone. - bool recursion_guard_ = false; }; // MediaEngineInterface is an abstraction of a media engine which can be diff --git a/third_party/libwebrtc/media/base/media_engine_unittest.cc b/third_party/libwebrtc/media/base/media_engine_unittest.cc index 23203d110e57..b8db32a2d56f 100644 --- a/third_party/libwebrtc/media/base/media_engine_unittest.cc +++ b/third_party/libwebrtc/media/base/media_engine_unittest.cc @@ -83,68 +83,4 @@ class MostlyMockVoiceEngineInterface : public VoiceEngineInterface { (override)); }; -class OldStyleVoiceEngineInterface : public MostlyMockVoiceEngineInterface { - public: - using MostlyMockVoiceEngineInterface::CreateMediaChannel; - // Old style overrides the deprecated API only. - VoiceMediaChannel* CreateMediaChannel( - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options) override { - ++call_count; - return nullptr; - } - int call_count = 0; -}; - -class NewStyleVoiceEngineInterface : public MostlyMockVoiceEngineInterface { - // New style overrides the non-deprecated API. - VoiceMediaChannel* CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) override { - return nullptr; - } -}; - -TEST(MediaEngineTest, NewStyleApiCallsOldIfOverridden) { - OldStyleVoiceEngineInterface implementation_under_test; - MediaConfig config; - AudioOptions options; - webrtc::CryptoOptions crypto_options; - // Calling the old-style interface. - implementation_under_test.CreateMediaChannel(nullptr, config, options, - crypto_options); - EXPECT_EQ(implementation_under_test.call_count, 1); - // Calling the new-style interface redirects to the old-style interface. - implementation_under_test.CreateMediaChannel( - MediaChannel::Role::kBoth, nullptr, config, options, crypto_options, - webrtc::AudioCodecPairId::Create()); - EXPECT_EQ(implementation_under_test.call_count, 2); -} - -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(MediaEngineTest, NoOverrideOfCreateCausesCrash) { - MostlyMockVoiceEngineInterface implementation_under_test; - MediaConfig config; - AudioOptions options; - webrtc::CryptoOptions crypto_options; -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - - EXPECT_DEATH(implementation_under_test.CreateMediaChannel( - nullptr, config, options, crypto_options), - "Check failed: !recursion_guard_"); -#pragma clang diagnostic pop - EXPECT_DEATH(implementation_under_test.CreateMediaChannel( - MediaChannel::Role::kBoth, nullptr, config, options, - crypto_options, webrtc::AudioCodecPairId::Create()), - "Check failed: !recursion_guard_"); -} -#endif - } // namespace cricket diff --git a/third_party/libwebrtc/media/base/stream_params.cc b/third_party/libwebrtc/media/base/stream_params.cc index 0fe1be6ac7e4..ac9daee200ea 100644 --- a/third_party/libwebrtc/media/base/stream_params.cc +++ b/third_party/libwebrtc/media/base/stream_params.cc @@ -183,16 +183,23 @@ void StreamParams::GetPrimarySsrcs(std::vector* ssrcs) const { } } -void StreamParams::GetFidSsrcs(const std::vector& primary_ssrcs, - std::vector* fid_ssrcs) const { +void StreamParams::GetSecondarySsrcs( + const std::string& semantics, + const std::vector& primary_ssrcs, + std::vector* secondary_ssrcs) const { for (uint32_t primary_ssrc : primary_ssrcs) { - uint32_t fid_ssrc; - if (GetFidSsrc(primary_ssrc, &fid_ssrc)) { - fid_ssrcs->push_back(fid_ssrc); + uint32_t secondary_ssrc; + if (GetSecondarySsrc(semantics, primary_ssrc, &secondary_ssrc)) { + secondary_ssrcs->push_back(secondary_ssrc); } } } +void StreamParams::GetFidSsrcs(const std::vector& primary_ssrcs, + std::vector* fid_ssrcs) const { + return GetSecondarySsrcs(kFidSsrcGroupSemantics, primary_ssrcs, fid_ssrcs); +} + bool StreamParams::AddSecondarySsrc(const std::string& semantics, uint32_t primary_ssrc, uint32_t secondary_ssrc) { diff --git a/third_party/libwebrtc/media/base/stream_params.h b/third_party/libwebrtc/media/base/stream_params.h index 60c67a1a1cdc..89fc1554cc12 100644 --- a/third_party/libwebrtc/media/base/stream_params.h +++ b/third_party/libwebrtc/media/base/stream_params.h @@ -166,6 +166,14 @@ struct StreamParams { // the first SSRC otherwise. void GetPrimarySsrcs(std::vector* ssrcs) const; + // Convenience to get all the secondary SSRCs for the given primary ssrcs + // of a particular semantic. + // If a given primary SSRC does not have a secondary SSRC, the list of + // secondary SSRCS will be smaller than the list of primary SSRCs. + void GetSecondarySsrcs(const std::string& semantic, + const std::vector& primary_ssrcs, + std::vector* fid_ssrcs) const; + // Convenience to get all the FID SSRCs for the given primary ssrcs. // If a given primary SSRC does not have a FID SSRC, the list of FID // SSRCS will be smaller than the list of primary SSRCs. diff --git a/third_party/libwebrtc/media/base/test_utils.cc b/third_party/libwebrtc/media/base/test_utils.cc index a6d5f61c17d6..1b288735be3f 100644 --- a/third_party/libwebrtc/media/base/test_utils.cc +++ b/third_party/libwebrtc/media/base/test_utils.cc @@ -35,26 +35,20 @@ cricket::StreamParams CreateSimWithRtxStreamParams( const std::vector& rtx_ssrcs) { cricket::StreamParams sp = CreateSimStreamParams(cname, ssrcs); for (size_t i = 0; i < ssrcs.size(); ++i) { - sp.ssrcs.push_back(rtx_ssrcs[i]); - std::vector fid_ssrcs; - fid_ssrcs.push_back(ssrcs[i]); - fid_ssrcs.push_back(rtx_ssrcs[i]); - cricket::SsrcGroup fid_group(cricket::kFidSsrcGroupSemantics, fid_ssrcs); - sp.ssrc_groups.push_back(fid_group); + sp.AddFidSsrc(ssrcs[i], rtx_ssrcs[i]); } return sp; } +// There should be one fec ssrc per ssrc. cricket::StreamParams CreatePrimaryWithFecFrStreamParams( const std::string& cname, uint32_t primary_ssrc, uint32_t flexfec_ssrc) { cricket::StreamParams sp; - cricket::SsrcGroup sg(cricket::kFecFrSsrcGroupSemantics, - {primary_ssrc, flexfec_ssrc}); sp.ssrcs = {primary_ssrc}; - sp.ssrc_groups.push_back(sg); sp.cname = cname; + sp.AddFecFrSsrc(primary_ssrc, flexfec_ssrc); return sp; } diff --git a/third_party/libwebrtc/media/codec_gn/moz.build b/third_party/libwebrtc/media/codec_gn/moz.build index 44eb62e22127..77cff5bc362a 100644 --- a/third_party/libwebrtc/media/codec_gn/moz.build +++ b/third_party/libwebrtc/media/codec_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/media/engine/fake_webrtc_call.h b/third_party/libwebrtc/media/engine/fake_webrtc_call.h index 7c8b93dde6b5..92ff1b84d7d2 100644 --- a/third_party/libwebrtc/media/engine/fake_webrtc_call.h +++ b/third_party/libwebrtc/media/engine/fake_webrtc_call.h @@ -348,6 +348,8 @@ class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream { uint32_t remote_ssrc() const { return config_.rtp.remote_ssrc; } + const webrtc::ReceiveStatistics* GetStats() const override { return nullptr; } + private: void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; diff --git a/third_party/libwebrtc/media/engine/null_webrtc_video_engine.h b/third_party/libwebrtc/media/engine/null_webrtc_video_engine.h index 5efefd498f57..f94cb43e750c 100644 --- a/third_party/libwebrtc/media/engine/null_webrtc_video_engine.h +++ b/third_party/libwebrtc/media/engine/null_webrtc_video_engine.h @@ -24,8 +24,6 @@ class Call; namespace cricket { -class VideoMediaChannel; - // Video engine implementation that does nothing and can be used in // CompositeMediaEngine. class NullWebRtcVideoEngine : public VideoEngineInterface { @@ -49,17 +47,6 @@ class NullWebRtcVideoEngine : public VideoEngineInterface { const override { return {}; } - - VideoMediaChannel* CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) - override { - return nullptr; - } }; } // namespace cricket diff --git a/third_party/libwebrtc/media/engine/webrtc_media_engine_unittest.cc b/third_party/libwebrtc/media/engine/webrtc_media_engine_unittest.cc index 79efea4e9c96..4615f03deb6a 100644 --- a/third_party/libwebrtc/media/engine/webrtc_media_engine_unittest.cc +++ b/third_party/libwebrtc/media/engine/webrtc_media_engine_unittest.cc @@ -11,6 +11,7 @@ #include "media/engine/webrtc_media_engine.h" #include +#include #include #include "media/engine/webrtc_media_engine_defaults.h" diff --git a/third_party/libwebrtc/media/engine/webrtc_video_engine.cc b/third_party/libwebrtc/media/engine/webrtc_video_engine.cc index bbda10dbd6b3..f041dce5efc7 100644 --- a/third_party/libwebrtc/media/engine/webrtc_video_engine.cc +++ b/third_party/libwebrtc/media/engine/webrtc_video_engine.cc @@ -51,6 +51,7 @@ #include "media/base/rid_description.h" #include "media/base/rtp_utils.h" #include "media/engine/webrtc_media_engine.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -192,13 +193,9 @@ std::vector GetPayloadTypesAndDefaultCodecs( supported_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName)); supported_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName)); - // flexfec-03 is supported as - // - receive codec unless WebRTC-FlexFEC-03-Advertised is disabled - // - send codec if WebRTC-FlexFEC-03-Advertised is enabled - if ((is_decoder_factory && - !IsDisabled(trials, "WebRTC-FlexFEC-03-Advertised")) || - (!is_decoder_factory && - IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised"))) { + // flexfec-03 is always supported as receive codec and as send codec + // only if WebRTC-FlexFEC-03-Advertised is enabled + if (is_decoder_factory || IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised")) { webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName); // This value is currently arbitrarily set to 10 seconds. (The unit // is microseconds.) This parameter MUST be present in the SDP, but @@ -310,30 +307,37 @@ static bool ValidateStreamParams(const StreamParams& sp) { std::vector primary_ssrcs; sp.GetPrimarySsrcs(&primary_ssrcs); - std::vector rtx_ssrcs; - sp.GetFidSsrcs(primary_ssrcs, &rtx_ssrcs); - for (uint32_t rtx_ssrc : rtx_ssrcs) { - bool rtx_ssrc_present = false; - for (uint32_t sp_ssrc : sp.ssrcs) { - if (sp_ssrc == rtx_ssrc) { - rtx_ssrc_present = true; - break; + for (const auto& semantic : + {kFidSsrcGroupSemantics, kFecFrSsrcGroupSemantics}) { + if (!sp.has_ssrc_group(semantic)) { + continue; + } + std::vector secondary_ssrcs; + sp.GetSecondarySsrcs(semantic, primary_ssrcs, &secondary_ssrcs); + for (uint32_t secondary_ssrc : secondary_ssrcs) { + bool secondary_ssrc_present = false; + for (uint32_t sp_ssrc : sp.ssrcs) { + if (sp_ssrc == secondary_ssrc) { + secondary_ssrc_present = true; + break; + } + } + if (!secondary_ssrc_present) { + RTC_LOG(LS_ERROR) << "SSRC '" << secondary_ssrc + << "' missing from StreamParams ssrcs with semantics " + << semantic << ": " << sp.ToString(); + return false; } } - if (!rtx_ssrc_present) { - RTC_LOG(LS_ERROR) << "RTX SSRC '" << rtx_ssrc - << "' missing from StreamParams ssrcs: " - << sp.ToString(); + if (!secondary_ssrcs.empty() && + primary_ssrcs.size() != secondary_ssrcs.size()) { + RTC_LOG(LS_ERROR) + << semantic + << " secondary SSRCs exist, but don't cover all SSRCs (unsupported): " + << sp.ToString(); return false; } } - if (!rtx_ssrcs.empty() && primary_ssrcs.size() != rtx_ssrcs.size()) { - RTC_LOG(LS_ERROR) - << "RTX SSRCs exist, but don't cover all SSRCs (unsupported): " - << sp.ToString(); - return false; - } - return true; } @@ -348,18 +352,6 @@ bool IsCodecDisabledForSimulcast(bool legacy_scalability_mode, return false; } -// Returns its smallest positive argument. If neither argument is positive, -// returns an arbitrary nonpositive value. -int MinPositive(int a, int b) { - if (a <= 0) { - return b; - } - if (b <= 0) { - return a; - } - return std::min(a, b); -} - bool IsLayerActive(const webrtc::RtpEncodingParameters& layer) { return layer.active && (!layer.max_bitrate_bps || *layer.max_bitrate_bps > 0) && @@ -754,29 +746,6 @@ WebRtcVideoEngine::CreateReceiveChannel( call, config, options, crypto_options, decoder_factory_.get()); } -VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { - RTC_LOG(LS_INFO) << "CreateMediaChannel. Options: " << options.ToString(); - std::unique_ptr send_channel; - std::unique_ptr receive_channel; - if (role == MediaChannel::Role::kSend || role == MediaChannel::Role::kBoth) { - send_channel = CreateSendChannel(call, config, options, crypto_options, - video_bitrate_allocator_factory); - } - if (role == MediaChannel::Role::kReceive || - role == MediaChannel::Role::kBoth) { - receive_channel = - CreateReceiveChannel(call, config, options, crypto_options); - } - return new VideoMediaShimChannel(std::move(send_channel), - std::move(receive_channel)); -} - std::vector WebRtcVideoEngine::send_codecs(bool include_rtx) const { return GetPayloadTypesAndDefaultCodecs(encoder_factory_.get(), /*is_decoder_factory=*/false, @@ -1003,7 +972,7 @@ std::vector WebRtcVideoSendChannel::SelectSendVideoCodecs( } bool WebRtcVideoSendChannel::GetChangedSendParameters( - const VideoSendParameters& params, + const VideoSenderParameters& params, ChangedSendParameters* changed_params) const { if (!ValidateCodecFormats(params.codecs) || !ValidateRtpExtensions(params.extensions, send_rtp_extensions_)) { @@ -1076,7 +1045,7 @@ bool WebRtcVideoSendChannel::GetChangedSendParameters( } bool WebRtcVideoSendChannel::SetSendParameters( - const VideoSendParameters& params) { + const VideoSenderParameters& params) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "WebRtcVideoSendChannel::SetSendParameters"); RTC_LOG(LS_INFO) << "SetSendParameters: " << params.ToString(); @@ -1300,7 +1269,7 @@ webrtc::RTCError WebRtcVideoSendChannel::SetRtpSendParameters( return it->second->SetRtpParameters(parameters, std::move(callback)); } -absl::optional WebRtcVideoSendChannel::GetSendCodec() { +absl::optional WebRtcVideoSendChannel::GetSendCodec() const { RTC_DCHECK_RUN_ON(&thread_checker_); if (!send_codec()) { RTC_LOG(LS_VERBOSE) << "GetSendCodec: No send codec set."; @@ -1370,7 +1339,7 @@ bool WebRtcVideoSendChannel::AddSendStream(const StreamParams& sp) { for (uint32_t used_ssrc : sp.ssrcs) send_ssrcs_.insert(used_ssrc); - webrtc::VideoSendStream::Config config(this); + webrtc::VideoSendStream::Config config(transport()); for (const RidDescription& rid : sp.rids()) { config.rtp.rids.push_back(rid.rid); @@ -1602,18 +1571,6 @@ void WebRtcVideoSendChannel::SetVideoCodecSwitchingEnabled(bool enabled) { } } -bool WebRtcVideoSendChannel::SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) { - MediaChannelUtil::SendRtp(data, len, options); - return true; -} - -bool WebRtcVideoSendChannel::SendRtcp(const uint8_t* data, size_t len) { - MediaChannelUtil::SendRtcp(data, len); - return true; -} - WebRtcVideoSendChannel::WebRtcVideoSendStream::VideoSendStreamParameters:: VideoSendStreamParameters( webrtc::VideoSendStream::Config config, @@ -1637,7 +1594,7 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( const absl::optional>& rtp_extensions, // TODO(deadbeef): Don't duplicate information between send_params, // rtp_extensions, options, etc. - const VideoSendParameters& send_params) + const VideoSenderParameters& send_params) : worker_thread_(call->worker_thread()), ssrcs_(sp.ssrcs), ssrc_groups_(sp.ssrc_groups), @@ -2073,29 +2030,23 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( } // parameters_.max_bitrate comes from the max bitrate set at the SDP - // (m-section) level with the attribute "b=AS." Note that we override this - // value below if the RtpParameters max bitrate set with - // RtpSender::SetParameters has a lower value. + // (m-section) level with the attribute "b=AS." Note that stream max bitrate + // is the RtpSender's max bitrate, but each individual encoding may also have + // its own max bitrate specified by SetParameters. int stream_max_bitrate = parameters_.max_bitrate_bps; - // When simulcast is enabled (when there are multiple encodings), - // encodings[i].max_bitrate_bps will be enforced by - // encoder_config.simulcast_layers[i].max_bitrate_bps. Otherwise, it's - // enforced by stream_max_bitrate, taking the minimum of the two maximums - // (one coming from SDP, the other coming from RtpParameters). - if (rtp_parameters_.encodings[0].max_bitrate_bps && - rtp_parameters_.encodings.size() == 1) { - stream_max_bitrate = - MinPositive(*(rtp_parameters_.encodings[0].max_bitrate_bps), - parameters_.max_bitrate_bps); - } - // The codec max bitrate comes from the "x-google-max-bitrate" parameter - // attribute set in the SDP for a specific codec. As done in - // WebRtcVideoSendChannel::SetSendParameters, this value does not override the - // stream max_bitrate set above. + // attribute set in the SDP for a specific codec. It only has an effect if + // max bitrate is not specified through other means. + bool encodings_has_max_bitrate = false; + for (const auto& encoding : rtp_parameters_.encodings) { + if (encoding.active && encoding.max_bitrate_bps.value_or(0) > 0) { + encodings_has_max_bitrate = true; + break; + } + } int codec_max_bitrate_kbps; if (codec.GetParam(kCodecParamMaxBitrate, &codec_max_bitrate_kbps) && - stream_max_bitrate == -1) { + stream_max_bitrate == -1 && !encodings_has_max_bitrate) { stream_max_bitrate = codec_max_bitrate_kbps * 1000; } encoder_config.max_bitrate_bps = stream_max_bitrate; @@ -2197,14 +2148,9 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::ReconfigureEncoder( // layers specified by `scalability_mode`), the number of streams can change. bool num_streams_changed = parameters_.encoder_config.number_of_streams != encoder_config.number_of_streams; - bool scalability_mode_used = !codec_settings.codec.scalability_modes.empty(); - bool scalability_modes = absl::c_any_of( - rtp_parameters_.encodings, - [](const auto& e) { return e.scalability_mode.has_value(); }); - parameters_.encoder_config = std::move(encoder_config); - if (num_streams_changed && (scalability_mode_used != scalability_modes)) { + if (num_streams_changed) { // The app is switching between legacy and standard modes, recreate instead // of reconfiguring to avoid number of streams not matching in lower layers. RecreateWebRtcStream(); @@ -2608,7 +2554,7 @@ WebRtcVideoReceiveChannel::GetDefaultRtpReceiveParameters() const { } bool WebRtcVideoReceiveChannel::GetChangedRecvParameters( - const VideoRecvParameters& params, + const VideoReceiverParameters& params, ChangedRecvParameters* changed_params) const { if (!ValidateCodecFormats(params.codecs) || !ValidateRtpExtensions(params.extensions, recv_rtp_extensions_)) { @@ -2663,7 +2609,7 @@ bool WebRtcVideoReceiveChannel::GetChangedRecvParameters( } bool WebRtcVideoReceiveChannel::SetRecvParameters( - const VideoRecvParameters& params) { + const VideoReceiverParameters& params) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "WebRtcVideoReceiveChannel::SetRecvParameters"); RTC_LOG(LS_INFO) << "SetRecvParameters: " << params.ToString(); @@ -2797,8 +2743,9 @@ bool WebRtcVideoReceiveChannel::AddRecvStream(const StreamParams& sp, for (uint32_t used_ssrc : sp.ssrcs) receive_ssrcs_.insert(used_ssrc); - webrtc::VideoReceiveStreamInterface::Config config(this, decoder_factory_); - webrtc::FlexfecReceiveStream::Config flexfec_config(this); + webrtc::VideoReceiveStreamInterface::Config config(transport(), + decoder_factory_); + webrtc::FlexfecReceiveStream::Config flexfec_config(transport()); ConfigureReceiverRtp(&config, &flexfec_config, sp); config.crypto_options = crypto_options_; @@ -3652,6 +3599,29 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.packets_received += stats.rtx_rtp_stats->packet_counter.packets; } + if (flexfec_stream_) { + const webrtc::ReceiveStatistics* fec_stats = flexfec_stream_->GetStats(); + if (fec_stats) { + const webrtc::StreamStatistician* statistican = + fec_stats->GetStatistician(flexfec_config_.rtp.remote_ssrc); + if (statistican) { + const webrtc::RtpReceiveStats fec_rtp_stats = statistican->GetStats(); + info.fec_packets_received = fec_rtp_stats.packet_counter.packets; + // TODO(bugs.webrtc.org/15250): implement fecPacketsDiscarded. + info.fec_bytes_received = fec_rtp_stats.packet_counter.payload_bytes; + // FEC information gets added to primary counters. + info.payload_bytes_received += + fec_rtp_stats.packet_counter.payload_bytes; + info.header_and_padding_bytes_received += + fec_rtp_stats.packet_counter.header_bytes + + fec_rtp_stats.packet_counter.padding_bytes; + info.packets_received += fec_rtp_stats.packet_counter.packets; + } else { + info.fec_packets_received = 0; + } + } + } + if (log_stats) RTC_LOG(LS_INFO) << stats.ToString(rtc::TimeMillis()); diff --git a/third_party/libwebrtc/media/engine/webrtc_video_engine.h b/third_party/libwebrtc/media/engine/webrtc_video_engine.h index e210f040bf1d..4ad6a3ac700c 100644 --- a/third_party/libwebrtc/media/engine/webrtc_video_engine.h +++ b/third_party/libwebrtc/media/engine/webrtc_video_engine.h @@ -59,7 +59,6 @@ #include "media/base/codec.h" #include "media/base/media_channel.h" #include "media/base/media_channel_impl.h" -#include "media/base/media_channel_shim.h" #include "media/base/media_config.h" #include "media/base/media_engine.h" #include "media/base/stream_params.h" @@ -117,15 +116,6 @@ class WebRtcVideoEngine : public VideoEngineInterface { const VideoOptions& options, const webrtc::CryptoOptions& crypto_options) override; - VideoMediaChannel* CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) - override; - std::vector send_codecs() const override { return send_codecs(true); } @@ -167,7 +157,6 @@ struct VideoCodecSettings { class WebRtcVideoSendChannel : public MediaChannelUtil, public VideoMediaSendChannelInterface, - public webrtc::Transport, public webrtc::EncoderSwitchRequestCallback { public: WebRtcVideoSendChannel( @@ -201,13 +190,13 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, // Common functions between sender and receiver void SetInterface(MediaChannelNetworkInterface* iface) override; // VideoMediaSendChannelInterface implementation - bool SetSendParameters(const VideoSendParameters& params) override; + bool SetSendParameters(const VideoSenderParameters& params) override; webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, const webrtc::RtpParameters& parameters, webrtc::SetParametersCallback callback) override; webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; - absl::optional GetSendCodec() override; + absl::optional GetSendCodec() const override; bool SetSend(bool send) override; bool SetVideoSend( uint32_t ssrc, @@ -319,7 +308,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, absl::optional rtcp_mode; }; - bool GetChangedSendParameters(const VideoSendParameters& params, + bool GetChangedSendParameters(const VideoSenderParameters& params, ChangedSendParameters* changed_params) const RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); bool ApplyChangedParams(const ChangedSendParameters& changed_params); @@ -347,7 +336,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, int max_bitrate_bps, const absl::optional& codec_settings, const absl::optional>& rtp_extensions, - const VideoSendParameters& send_params); + const VideoSenderParameters& send_params); ~WebRtcVideoSendStream(); void SetSendParameters(const ChangedSendParameters& send_params); @@ -452,11 +441,6 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, void Construct(webrtc::Call* call, WebRtcVideoEngine* engine); - bool SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) override; - bool SendRtcp(const uint8_t* data, size_t len) override; - // Get all codecs that are compatible with the receiver. std::vector SelectSendVideoCodecs( const std::vector& remote_mapped_codecs) const @@ -546,9 +530,9 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, webrtc::BitrateConstraints bitrate_config_ RTC_GUARDED_BY(thread_checker_); // TODO(deadbeef): Don't duplicate information between // send_params/recv_params, rtp_extensions, options, etc. - VideoSendParameters send_params_ RTC_GUARDED_BY(thread_checker_); + VideoSenderParameters send_params_ RTC_GUARDED_BY(thread_checker_); VideoOptions default_send_options_ RTC_GUARDED_BY(thread_checker_); - VideoRecvParameters recv_params_ RTC_GUARDED_BY(thread_checker_); + VideoReceiverParameters recv_params_ RTC_GUARDED_BY(thread_checker_); int64_t last_send_stats_log_ms_ RTC_GUARDED_BY(thread_checker_); int64_t last_receive_stats_log_ms_ RTC_GUARDED_BY(thread_checker_); const bool discard_unknown_ssrc_packets_ RTC_GUARDED_BY(thread_checker_); @@ -584,8 +568,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, }; class WebRtcVideoReceiveChannel : public MediaChannelUtil, - public VideoMediaReceiveChannelInterface, - public webrtc::Transport { + public VideoMediaReceiveChannelInterface { public: WebRtcVideoReceiveChannel(webrtc::Call* call, const MediaConfig& config, @@ -603,22 +586,11 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, RTC_CHECK_NOTREACHED(); return nullptr; } - // Functions imported from MediaChannelUtil - bool SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) override { - MediaChannelUtil::SendRtp(data, len, options); - return true; - } - bool SendRtcp(const uint8_t* data, size_t len) override { - MediaChannelUtil::SendRtcp(data, len); - return true; - } // Common functions between sender and receiver void SetInterface(MediaChannelNetworkInterface* iface) override; // VideoMediaReceiveChannelInterface implementation - bool SetRecvParameters(const VideoRecvParameters& params) override; + bool SetRecvParameters(const VideoReceiverParameters& params) override; webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override; webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; void SetReceive(bool receive) override; @@ -814,7 +786,7 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; bool receiving_ RTC_GUARDED_BY(&thread_checker_); }; - bool GetChangedRecvParameters(const VideoRecvParameters& params, + bool GetChangedRecvParameters(const VideoReceiverParameters& params, ChangedRecvParameters* changed_params) const RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); @@ -890,9 +862,9 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, webrtc::BitrateConstraints bitrate_config_ RTC_GUARDED_BY(thread_checker_); // TODO(deadbeef): Don't duplicate information between // send_params/recv_params, rtp_extensions, options, etc. - VideoSendParameters send_params_ RTC_GUARDED_BY(thread_checker_); + VideoSenderParameters send_params_ RTC_GUARDED_BY(thread_checker_); VideoOptions default_send_options_ RTC_GUARDED_BY(thread_checker_); - VideoRecvParameters recv_params_ RTC_GUARDED_BY(thread_checker_); + VideoReceiverParameters recv_params_ RTC_GUARDED_BY(thread_checker_); int64_t last_receive_stats_log_ms_ RTC_GUARDED_BY(thread_checker_); const bool discard_unknown_ssrc_packets_ RTC_GUARDED_BY(thread_checker_); // This is a stream param that comes from the remote description, but wasn't diff --git a/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc b/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc index 3f6cd2572c83..be47a5afa49a 100644 --- a/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc +++ b/third_party/libwebrtc/media/engine/webrtc_video_engine_unittest.cc @@ -55,6 +55,7 @@ #include "media/base/fake_frame_source.h" #include "media/base/fake_network_interface.h" #include "media/base/fake_video_renderer.h" +#include "media/base/media_channel.h" #include "media/base/media_constants.h" #include "media/base/rtp_utils.h" #include "media/base/test_utils.h" @@ -382,10 +383,13 @@ class WebRtcVideoEngineTest : public ::testing::Test { // present. cricket::VideoCodec GetEngineCodec(const std::string& name) const; void AddSupportedVideoCodecType(const std::string& name); - VideoMediaChannel* SetSendParamsWithAllSupportedCodecs(); + std::unique_ptr + SetSendParamsWithAllSupportedCodecs(); - VideoMediaChannel* SetRecvParamsWithSupportedCodecs( - const std::vector& codecs); + std::unique_ptr + SetRecvParamsWithAllSupportedCodecs(); + std::unique_ptr + SetRecvParamsWithSupportedCodecs(const std::vector& codecs); void ExpectRtpCapabilitySupport(const char* uri, bool supported) const; @@ -518,14 +522,12 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) { AddSupportedVideoCodecType("VP8"); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); // Add CVO extension. const int id = 1; - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, id)); @@ -556,12 +558,10 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) { AddSupportedVideoCodecType("VP8"); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); // Add CVO extension. const int id = 1; - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, id)); @@ -572,7 +572,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) { EXPECT_CALL( video_source, AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false))); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, nullptr, &video_source)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source)); } TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { @@ -581,9 +581,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { AddSupportedVideoCodecType("VP8"); AddSupportedVideoCodecType("VP9"); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); @@ -598,7 +596,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { // Add CVO extension. const int id = 1; - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.extensions.push_back( @@ -624,11 +622,10 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) { AddSupportedVideoCodecType("VP8"); - std::unique_ptr channel(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - auto send_channel = channel->AsVideoSendChannel(); + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(123))); @@ -638,28 +635,29 @@ TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) { << "Channel should be stoppable even without set codecs."; } -TEST_F(WebRtcVideoEngineTest, GetStatsWithoutSendCodecsSetDoesNotCrash) { +TEST_F(WebRtcVideoEngineTest, GetStatsWithoutCodecsSetDoesNotCrash) { AddSupportedVideoCodecType("VP8"); - std::unique_ptr channel(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - auto send_channel = channel->AsVideoSendChannel(); - + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(123))); VideoMediaSendInfo send_info; + send_channel->GetStats(&send_info); + + std::unique_ptr receive_channel = + engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), + VideoOptions(), webrtc::CryptoOptions()); + EXPECT_TRUE(receive_channel->AddRecvStream(StreamParams::CreateLegacy(123))); VideoMediaReceiveInfo receive_info; - channel->GetSendStats(&send_info); - channel->GetReceiveStats(&receive_info); + receive_channel->GetStats(&receive_info); } TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { AddSupportedVideoCodecType("VP8"); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); send_channel->OnReadyToSend(true); @@ -682,7 +680,7 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { // Setting codecs of the same type should not reallocate any encoders // (expecting a no-op). - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel->SetSendParameters(parameters)); EXPECT_EQ(num_created_encoders, encoder_factory_->GetNumCreatedEncoders()); @@ -738,9 +736,7 @@ TEST_F(WebRtcVideoEngineTest, RtxCodecAddedForH264Codec) { TEST_F(WebRtcVideoEngineTest, CanConstructDecoderForVp9EncoderFactory) { AddSupportedVideoCodecType("VP9"); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto receive_channel = channel->AsVideoReceiveChannel(); + auto receive_channel = SetRecvParamsWithAllSupportedCodecs(); EXPECT_TRUE(receive_channel->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); @@ -751,9 +747,7 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { AddSupportedVideoCodecType("VP8"); FakeCall* fake_call = new FakeCall(); call_.reset(fake_call); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); EXPECT_TRUE( send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); @@ -762,7 +756,7 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 60); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); - channel->SetSend(true); + send_channel->SetSend(true); FakeVideoSendStream* stream = fake_call->GetVideoSendStreams()[0]; @@ -866,13 +860,13 @@ void WebRtcVideoEngineTest::AddSupportedVideoCodecType( decoder_factory_->AddSupportedVideoCodecType(name); } -VideoMediaChannel* +std::unique_ptr WebRtcVideoEngineTest::SetSendParamsWithAllSupportedCodecs() { - VideoMediaChannel* channel = engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get()); - cricket::VideoSendParameters parameters; + std::unique_ptr channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + cricket::VideoSenderParameters parameters; // We need to look up the codec in the engine to get the correct payload type. for (const webrtc::SdpVideoFormat& format : encoder_factory_->GetSupportedFormats()) { @@ -887,19 +881,33 @@ WebRtcVideoEngineTest::SetSendParamsWithAllSupportedCodecs() { return channel; } -VideoMediaChannel* WebRtcVideoEngineTest::SetRecvParamsWithSupportedCodecs( +std::unique_ptr +WebRtcVideoEngineTest::SetRecvParamsWithSupportedCodecs( const std::vector& codecs) { - VideoMediaChannel* channel = engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get()); - cricket::VideoRecvParameters parameters; + std::unique_ptr channel = + engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), + VideoOptions(), webrtc::CryptoOptions()); + cricket::VideoReceiverParameters parameters; parameters.codecs = codecs; EXPECT_TRUE(channel->SetRecvParameters(parameters)); return channel; } +std::unique_ptr +WebRtcVideoEngineTest::SetRecvParamsWithAllSupportedCodecs() { + std::vector codecs; + for (const webrtc::SdpVideoFormat& format : + decoder_factory_->GetSupportedFormats()) { + cricket::VideoCodec engine_codec = GetEngineCodec(format.name); + if (!absl::c_linear_search(codecs, engine_codec)) { + codecs.push_back(engine_codec); + } + } + + return SetRecvParamsWithSupportedCodecs(codecs); +} + void WebRtcVideoEngineTest::ExpectRtpCapabilitySupport(const char* uri, bool supported) const { const std::vector header_extensions = @@ -927,18 +935,23 @@ TEST_F(WebRtcVideoEngineTest, SendsFeedbackAfterUnsignaledRtxPacket) { ASSERT_TRUE(rtcp_parser.Parse(packet->cdata(), packet->size())); }), Return(true))); - std::unique_ptr channel(engine_.CreateMediaChannel( - MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - cricket::VideoRecvParameters parameters; + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + std::unique_ptr receive_channel = + engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), + VideoOptions(), webrtc::CryptoOptions()); + cricket::VideoReceiverParameters parameters; parameters.codecs = supported_codecs; const int kTransportSeqExtensionId = 1; parameters.extensions.push_back(RtpExtension( RtpExtension::kTransportSequenceNumberUri, kTransportSeqExtensionId)); - ASSERT_TRUE(channel->SetRecvParameters(parameters)); - channel->SetInterface(&network); - channel->AsVideoSendChannel()->OnReadyToSend(true); - channel->AsVideoReceiveChannel()->SetReceive(true); + ASSERT_TRUE(receive_channel->SetRecvParameters(parameters)); + send_channel->SetInterface(&network); + receive_channel->SetInterface(&network); + send_channel->OnReadyToSend(true); + receive_channel->SetReceive(true); // Inject a RTX packet. webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions); @@ -949,13 +962,14 @@ TEST_F(WebRtcVideoEngineTest, SendsFeedbackAfterUnsignaledRtxPacket) { packet.SetExtension(7); uint8_t* buf_ptr = packet.AllocatePayload(11); memset(buf_ptr, 0, 11); // Pass MSAN (don't care about bytes 1-9) - channel->AsVideoReceiveChannel()->OnPacketReceived(packet); + receive_channel->OnPacketReceived(packet); // Expect that feedback is sent after a while. time_controller_.AdvanceTime(webrtc::TimeDelta::Seconds(1)); EXPECT_GT(rtcp_parser.transport_feedback()->num_packets(), 0); - channel->SetInterface(nullptr); + send_channel->SetInterface(nullptr); + receive_channel->SetInterface(nullptr); } TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { @@ -967,20 +981,20 @@ TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { ASSERT_EQ(supported_codecs[1].name, "rtx"); int rtx_payload_type = supported_codecs[1].id; - std::unique_ptr channel(engine_.CreateMediaChannel( - MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - cricket::VideoRecvParameters parameters; + std::unique_ptr receive_channel = + engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), + VideoOptions(), webrtc::CryptoOptions()); + cricket::VideoReceiverParameters parameters; parameters.codecs = supported_codecs; - ASSERT_TRUE(channel->SetRecvParameters(parameters)); - channel->AsVideoReceiveChannel()->SetReceive(true); + ASSERT_TRUE(receive_channel->SetRecvParameters(parameters)); + receive_channel->SetReceive(true); // Receive a normal payload packet. It is not a complete frame since the // marker bit is not set. RtpPacketReceived packet_1 = BuildVp8KeyFrame(/*ssrc*/ 123, supported_codecs[0].id); packet_1.SetMarker(false); - channel->AsVideoReceiveChannel()->OnPacketReceived(packet_1); + receive_channel->OnPacketReceived(packet_1); time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(100)); // No complete frame received. No decoder created yet. @@ -995,7 +1009,7 @@ TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { RtpPacketReceived rtx_packet = BuildRtxPacket(345, rtx_payload_type, packet_2); - channel->AsVideoReceiveChannel()->OnPacketReceived(rtx_packet); + receive_channel->OnPacketReceived(rtx_packet); time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(0)); ASSERT_THAT(decoder_factory_->decoders(), Not(IsEmpty())); @@ -1005,20 +1019,19 @@ TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) { AddSupportedVideoCodecType("VP8"); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); EXPECT_TRUE( send_channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); - EXPECT_TRUE(channel->SetSend(true)); + EXPECT_TRUE(send_channel->SetSend(true)); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 60); - EXPECT_TRUE(channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2)); @@ -1035,9 +1048,9 @@ TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) { prev_width = codec_settings.width; } - EXPECT_TRUE(channel->SetVideoSend(ssrcs.front(), nullptr, nullptr)); + EXPECT_TRUE(send_channel->SetVideoSend(ssrcs.front(), nullptr, nullptr)); - channel.reset(); + send_channel.reset(); ASSERT_EQ(0u, encoder_factory_->encoders().size()); } @@ -1050,27 +1063,26 @@ TEST_F(WebRtcVideoEngineTest, ChannelWithH264CanChangeToVp8) { cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - std::unique_ptr channel(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - auto send_channel = channel->AsVideoSendChannel(); - cricket::VideoSendParameters parameters; + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("H264")); - EXPECT_TRUE(channel->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel->SetSendParameters(parameters)); EXPECT_TRUE( send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); // Sending one frame will have allocate the encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); ASSERT_EQ(1u, encoder_factory_->encoders().size()); - cricket::VideoSendParameters new_parameters; + cricket::VideoSenderParameters new_parameters; new_parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel->SetSendParameters(new_parameters)); + EXPECT_TRUE(send_channel->SetSendParameters(new_parameters)); // Sending one frame will switch encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -1084,20 +1096,19 @@ TEST_F(WebRtcVideoEngineTest, AddSupportedVideoCodecType("VP8"); AddSupportedVideoCodecType("H264"); - std::unique_ptr channel(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - auto send_channel = channel->AsVideoSendChannel(); - cricket::VideoSendParameters parameters; + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel->SetSendParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); EXPECT_TRUE( send_channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); - EXPECT_TRUE(channel->SetSend(true)); + EXPECT_TRUE(send_channel->SetSend(true)); // Send a fake frame, or else the media engine will configure the simulcast // encoder adapter at a low-enough size that it'll only create a single @@ -1105,7 +1116,8 @@ TEST_F(WebRtcVideoEngineTest, webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); @@ -1114,7 +1126,7 @@ TEST_F(WebRtcVideoEngineTest, EXPECT_EQ(webrtc::kVideoCodecVP8, encoder_factory_->encoders()[0]->GetCodecSettings().codecType); - channel.reset(); + send_channel.reset(); // Make sure DestroyVideoEncoder was called on the factory. EXPECT_EQ(0u, encoder_factory_->encoders().size()); } @@ -1124,14 +1136,13 @@ TEST_F(WebRtcVideoEngineTest, AddSupportedVideoCodecType("VP8"); AddSupportedVideoCodecType("H264"); - std::unique_ptr channel(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - auto send_channel = channel->AsVideoSendChannel(); - cricket::VideoSendParameters parameters; + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("H264")); - EXPECT_TRUE(channel->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel->SetSendParameters(parameters)); EXPECT_TRUE( send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); @@ -1140,7 +1151,7 @@ TEST_F(WebRtcVideoEngineTest, webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1)); @@ -1149,7 +1160,7 @@ TEST_F(WebRtcVideoEngineTest, EXPECT_EQ(webrtc::kVideoCodecH264, encoder_factory_->encoders()[0]->GetCodecSettings().codecType); - channel.reset(); + send_channel.reset(); // Make sure DestroyVideoEncoder was called on the factory. ASSERT_EQ(0u, encoder_factory_->encoders().size()); } @@ -1157,15 +1168,14 @@ TEST_F(WebRtcVideoEngineTest, TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264) { AddSupportedVideoCodecType("H264"); - std::unique_ptr channel(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - auto send_channel = channel->AsVideoSendChannel(); + std::unique_ptr send_channel = + engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("H264")); - EXPECT_TRUE(channel->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel->SetSendParameters(parameters)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs3); EXPECT_TRUE(send_channel->AddSendStream( @@ -1175,7 +1185,7 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264) { webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); @@ -1185,7 +1195,7 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264) { ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode()); EXPECT_EQ(webrtc::kVideoCodecH264, encoder->GetCodecSettings().codecType); EXPECT_LT(1u, encoder->GetCodecSettings().numberOfSimulcastStreams); - EXPECT_TRUE(channel->SetVideoSend(ssrcs[0], nullptr, nullptr)); + EXPECT_TRUE(send_channel->SetVideoSend(ssrcs[0], nullptr, nullptr)); } // Test that FlexFEC is not supported as a send video codec by default. @@ -1202,20 +1212,6 @@ TEST_F(WebRtcVideoEngineTest, Flexfec03SendCodecEnablesWithFieldTrial) { EXPECT_THAT(engine_.send_codecs(), Contains(flexfec)); } -// Test that FlexFEC is supported as a receive video codec by default. -// Disabling field trial should prevent advertising FlexFEC receive codec. -TEST_F(WebRtcVideoEngineTest, Flexfec03ReceiveCodecDisablesWithFieldTrial) { - decoder_factory_->AddSupportedVideoCodecType("VP8"); - - auto flexfec = Field("name", &VideoCodec::name, "flexfec-03"); - - EXPECT_THAT(engine_.recv_codecs(), Contains(flexfec)); - - webrtc::test::ScopedKeyValueConfig override_field_trials( - field_trials_, "WebRTC-FlexFEC-03-Advertised/Disabled/"); - EXPECT_THAT(engine_.recv_codecs(), Not(Contains(flexfec))); -} - // Test that the FlexFEC "codec" gets assigned to the lower payload type range TEST_F(WebRtcVideoEngineTest, Flexfec03LowerPayloadTypeRange) { encoder_factory_->AddSupportedVideoCodecType("VP8"); @@ -1281,12 +1277,10 @@ TEST_F(WebRtcVideoEngineTest, ReportRtxForExternalCodec) { TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) { AddSupportedVideoCodecType("VP8"); - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - std::unique_ptr channel( - SetRecvParamsWithSupportedCodecs(parameters.codecs)); - auto receive_channel = channel->AsVideoReceiveChannel(); + auto receive_channel = SetRecvParamsWithSupportedCodecs(parameters.codecs); EXPECT_TRUE(receive_channel->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); @@ -1313,9 +1307,7 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) { std::vector codecs; codecs.push_back(GetEngineCodec("H264")); - std::unique_ptr channel( - SetRecvParamsWithSupportedCodecs(codecs)); - auto receive_channel = channel->AsVideoReceiveChannel(); + auto receive_channel = SetRecvParamsWithSupportedCodecs(codecs); EXPECT_TRUE(receive_channel->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); @@ -1329,17 +1321,16 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) { TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) { // Setup an recv stream with `kSsrc`. AddSupportedVideoCodecType("VP8"); - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - std::unique_ptr channel( - SetRecvParamsWithSupportedCodecs(parameters.codecs)); - auto receive_channel = channel->AsVideoReceiveChannel(); + auto receive_channel = SetRecvParamsWithSupportedCodecs(parameters.codecs); EXPECT_TRUE(receive_channel->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); // Call GetSources with |kSsrc + 1| which doesn't exist. - std::vector sources = channel->GetSources(kSsrc + 1); + std::vector sources = + receive_channel->GetSources(kSsrc + 1); EXPECT_EQ(0u, sources.size()); } @@ -1456,17 +1447,17 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { // Create send channel. const int send_ssrc = 123; - std::unique_ptr send_channel(engine.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), rate_allocator_factory.get())); - auto send_send_channel = send_channel->AsVideoSendChannel(); + std::unique_ptr send_channel = + engine.CreateSendChannel(call.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), + rate_allocator_factory.get()); - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(send_channel->SetSendParameters(send_parameters)); - send_send_channel->OnReadyToSend(true); + send_channel->OnReadyToSend(true); EXPECT_TRUE( - send_send_channel->AddSendStream(StreamParams::CreateLegacy(send_ssrc))); + send_channel->AddSendStream(StreamParams::CreateLegacy(send_ssrc))); EXPECT_TRUE(send_channel->SetSend(true)); // Set capturer. @@ -1480,12 +1471,11 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { // Create recv channel. const int recv_ssrc = 321; - std::unique_ptr recv_channel(engine.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), rate_allocator_factory.get())); - auto receive_channel = recv_channel->AsVideoReceiveChannel(); + std::unique_ptr receive_channel = + engine.CreateReceiveChannel(call.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions()); - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(receive_channel->SetRecvParameters(recv_parameters)); EXPECT_TRUE(receive_channel->AddRecvStream( @@ -1495,44 +1485,42 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { EXPECT_CALL(*encoder_factory, Die()); EXPECT_CALL(*decoder_factory, Die()); EXPECT_CALL(*rate_allocator_factory, Die()); - EXPECT_TRUE(send_send_channel->RemoveSendStream(send_ssrc)); + EXPECT_TRUE(send_channel->RemoveSendStream(send_ssrc)); EXPECT_TRUE(receive_channel->RemoveRecvStream(recv_ssrc)); } TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { encoder_factory_->AddSupportedVideoCodecType("VP8"); std::unique_ptr fake_call(new FakeCall()); - std::unique_ptr channel( - SetSendParamsWithAllSupportedCodecs()); - auto send_channel = channel->AsVideoSendChannel(); + auto send_channel = SetSendParamsWithAllSupportedCodecs(); ASSERT_TRUE( send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); send_channel->OnReadyToSend(true); - channel->SetSend(true); - ASSERT_TRUE(channel->SetSendParameters(parameters)); + send_channel->SetSend(true); + ASSERT_TRUE(send_channel->SetSendParameters(parameters)); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); VideoOptions options; - EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1)); EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo, encoder_factory_->encoders().back()->GetCodecSettings().mode); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); // No change in content type, keep current encoder. EXPECT_EQ(1, encoder_factory_->GetNumCreatedEncoders()); options.is_screencast.emplace(true); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); // Change to screen content, recreate encoder. For the simulcast encoder // adapter case, this will result in two calls since InitEncode triggers a @@ -1541,14 +1529,14 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { EXPECT_EQ(webrtc::VideoCodecMode::kScreensharing, encoder_factory_->encoders().back()->GetCodecSettings().mode); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); // Still screen content, no need to update encoder. EXPECT_EQ(2, encoder_factory_->GetNumCreatedEncoders()); options.is_screencast.emplace(false); options.video_noise_reduction.emplace(false); - EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); + EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); // Change back to regular video content, update encoder. Also change // a non `is_screencast` option just to verify it doesn't affect recreation. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -1607,31 +1595,32 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test { std::make_unique( []() { return std::make_unique(); }, kSdpVideoFormats), - field_trials_), - channel_(absl::WrapUnique(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, - call_.get(), - cricket::MediaConfig(), - cricket::VideoOptions(), - webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get()))) { - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + field_trials_) { + send_channel_ = engine_.CreateSendChannel( + call_.get(), cricket::MediaConfig(), cricket::VideoOptions(), + webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + call_.get(), cricket::MediaConfig(), cricket::VideoOptions(), + webrtc::CryptoOptions()); - network_interface_.SetDestination(channel_.get()); - channel_->SetInterface(&network_interface_); - cricket::VideoRecvParameters parameters; + network_interface_.SetDestination(receive_channel_.get()); + send_channel_->SetInterface(&network_interface_); + receive_channel_->SetInterface(&network_interface_); + cricket::VideoReceiverParameters parameters; parameters.codecs = engine_.recv_codecs(); - channel_->SetRecvParameters(parameters); + receive_channel_->SetRecvParameters(parameters); receive_channel_->SetReceive(true); } ~WebRtcVideoChannelEncodedFrameCallbackTest() override { - channel_->SetInterface(nullptr); + send_channel_->SetInterface(nullptr); + receive_channel_->SetInterface(nullptr); + send_channel_.reset(); + receive_channel_.reset(); } void DeliverKeyFrame(uint32_t ssrc) { - channel_->OnPacketReceived(BuildVp8KeyFrame(ssrc, 96)); + receive_channel_->OnPacketReceived(BuildVp8KeyFrame(ssrc, 96)); } void DeliverKeyFrameAndWait(uint32_t ssrc) { @@ -1650,9 +1639,8 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test { std::unique_ptr video_bitrate_allocator_factory_; WebRtcVideoEngine engine_; - std::unique_ptr channel_; - VideoMediaSendChannelInterface* send_channel_; - VideoMediaReceiveChannelInterface* receive_channel_; + std::unique_ptr send_channel_; + std::unique_ptr receive_channel_; cricket::FakeNetworkInterface network_interface_; cricket::FakeVideoRenderer renderer_; }; @@ -1665,11 +1653,11 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_DefaultStream) { testing::MockFunction callback; EXPECT_CALL(callback, Call); - EXPECT_TRUE(channel_->AddDefaultRecvStreamForTesting( + EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( cricket::StreamParams::CreateLegacy(kSsrc))); - channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0, - callback.AsStdFunction()); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); + receive_channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0, + callback.AsStdFunction()); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); DeliverKeyFrame(kSsrc); time_controller_.AdvanceTime(kFrameDuration); EXPECT_EQ(renderer_.num_rendered_frames(), 1); @@ -1680,10 +1668,11 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_MatchSsrcWithDefaultStream) { testing::MockFunction callback; EXPECT_CALL(callback, Call); - EXPECT_TRUE(channel_->AddDefaultRecvStreamForTesting( + EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); - channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); + receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, + callback.AsStdFunction()); DeliverKeyFrame(kSsrc); time_controller_.AdvanceTime(kFrameDuration); EXPECT_EQ(renderer_.num_rendered_frames(), 1); @@ -1694,10 +1683,11 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_MatchSsrc) { testing::MockFunction callback; EXPECT_CALL(callback, Call); - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); - channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); + EXPECT_TRUE(receive_channel_->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); + receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, + callback.AsStdFunction()); DeliverKeyFrame(kSsrc); time_controller_.AdvanceTime(kFrameDuration); EXPECT_EQ(renderer_.num_rendered_frames(), 1); @@ -1709,10 +1699,11 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, testing::StrictMock< testing::MockFunction> callback; - EXPECT_TRUE( - channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc + 1))); - EXPECT_TRUE(channel_->SetSink(kSsrc + 1, &renderer_)); - channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); + EXPECT_TRUE(receive_channel_->AddRecvStream( + cricket::StreamParams::CreateLegacy(kSsrc + 1))); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 1, &renderer_)); + receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, + callback.AsStdFunction()); DeliverKeyFrame(kSsrc); // Expected to not cause function to fire. DeliverKeyFrameAndWait(kSsrc + 1); receive_channel_->RemoveRecvStream(kSsrc + 1); @@ -1723,11 +1714,12 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, testing::StrictMock< testing::MockFunction> callback; - EXPECT_TRUE(channel_->AddDefaultRecvStreamForTesting( + EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( cricket::StreamParams::CreateLegacy(kSsrc + 1))); - EXPECT_TRUE(channel_->SetSink(kSsrc + 1, &renderer_)); - channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); - channel_->SetDefaultSink(&renderer_); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 1, &renderer_)); + receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, + callback.AsStdFunction()); + receive_channel_->SetDefaultSink(&renderer_); DeliverKeyFrame(kSsrc); // Expected to not cause function to fire. DeliverKeyFrameAndWait(kSsrc + 1); receive_channel_->RemoveRecvStream(kSsrc + 1); @@ -1736,11 +1728,11 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, DoesNotDecodeWhenDisabled) { testing::MockFunction callback; EXPECT_CALL(callback, Call); - EXPECT_TRUE(channel_->AddDefaultRecvStreamForTesting( + EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( cricket::StreamParams::CreateLegacy(kSsrc))); - channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0, - callback.AsStdFunction()); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); + receive_channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0, + callback.AsStdFunction()); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); receive_channel_->SetReceive(false); DeliverKeyFrame(kSsrc); time_controller_.AdvanceTime(kFrameDuration); @@ -1791,34 +1783,33 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // needs to be disabled, otherwise, tests which check the size of received // frames become flaky. media_config.video.enable_cpu_adaptation = false; - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), media_config, - cricket::VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + call_.get(), media_config, cricket::VideoOptions(), + webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel(call_.get(), media_config, + cricket::VideoOptions(), + webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); receive_channel_->SetReceive(true); - EXPECT_TRUE(channel_.get() != NULL); - network_interface_.SetDestination(channel_.get()); - channel_->SetInterface(&network_interface_); - cricket::VideoRecvParameters parameters; + network_interface_.SetDestination(receive_channel_.get()); + send_channel_->SetInterface(&network_interface_); + receive_channel_->SetInterface(&network_interface_); + cricket::VideoReceiverParameters parameters; parameters.codecs = engine_.send_codecs(); - channel_->SetRecvParameters(parameters); + receive_channel_->SetRecvParameters(parameters); EXPECT_TRUE(send_channel_->AddSendStream(DefaultSendStreamParams())); frame_forwarder_ = std::make_unique(); frame_source_ = std::make_unique( 640, 480, rtc::kNumMicrosecsPerSec / kFramerate); - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get())); + EXPECT_TRUE( + send_channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get())); } // Returns pointer to implementation of the send channel. WebRtcVideoSendChannel* SendImpl() { // Note that this function requires intimate knowledge of how the channel // was created. - return static_cast( - static_cast(channel_.get()) - ->SendImplForTesting()); + return static_cast(send_channel_.get()); } // Utility method to setup an additional stream to send and receive video. @@ -1829,7 +1820,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { EXPECT_TRUE(receive_channel_->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc + 2))); // Make the second renderer available for use by a new stream. - EXPECT_TRUE(channel_->SetSink(kSsrc + 2, &renderer2_)); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 2, &renderer2_)); } // Setup an additional stream just to send video. Defer add recv stream. @@ -1838,7 +1829,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // SetUp() already added kSsrc make sure duplicate SSRCs cant be added. EXPECT_TRUE(receive_channel_->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); EXPECT_FALSE(send_channel_->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(send_channel_->AddSendStream( @@ -1847,13 +1838,15 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // Setup the receive and renderer for second stream after send. frame_forwarder_2_ = std::make_unique(); - EXPECT_TRUE( - channel_->SetVideoSend(kSsrc + 2, nullptr, frame_forwarder_2_.get())); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc + 2, nullptr, + frame_forwarder_2_.get())); } void TearDown() override { - channel_->SetInterface(nullptr); - channel_.reset(); + send_channel_->SetInterface(nullptr); + receive_channel_->SetInterface(nullptr); + send_channel_.reset(); + receive_channel_.reset(); } void ResetTest() { @@ -1870,16 +1863,16 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { bool sending = SendImpl()->sending(); bool success = SetSend(false); if (success) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); - success = channel_->SetSendParameters(parameters); + success = send_channel_->SetSendParameters(parameters); } if (success) { success = SetSend(sending); } return success; } - bool SetSend(bool send) { return channel_->SetSend(send); } + bool SetSend(bool send) { return send_channel_->SetSend(send); } void SendFrame() { if (frame_forwarder_2_) { frame_forwarder_2_->IncomingCapturedFrame(frame_source_->GetFrame()); @@ -1914,7 +1907,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { void SendAndReceive(const cricket::VideoCodec& codec) { EXPECT_TRUE(SetOneCodec(codec)); EXPECT_TRUE(SetSend(true)); - channel_->SetDefaultSink(&renderer_); + receive_channel_->SetDefaultSink(&renderer_); EXPECT_EQ(0, renderer_.num_rendered_frames()); SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); @@ -1926,7 +1919,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { int fps) { EXPECT_TRUE(SetOneCodec(codec)); EXPECT_TRUE(SetSend(true)); - channel_->SetDefaultSink(&renderer_); + receive_channel_->SetDefaultSink(&renderer_); EXPECT_EQ(0, renderer_.num_rendered_frames()); for (int i = 0; i < duration_sec; ++i) { for (int frame = 1; frame <= fps; ++frame) { @@ -1939,13 +1932,13 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { cricket::VideoSenderInfo GetSenderStats(size_t i) { VideoMediaSendInfo send_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); return send_info.senders[i]; } cricket::VideoReceiverInfo GetReceiverStats(size_t i) { cricket::VideoMediaReceiveInfo info; - EXPECT_TRUE(channel_->GetReceiveStats(&info)); + EXPECT_TRUE(receive_channel_->GetStats(&info)); return info.receivers[i]; } @@ -1993,9 +1986,8 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { std::unique_ptr frame_forwarder_; std::unique_ptr frame_forwarder_2_; - std::unique_ptr channel_; - VideoMediaSendChannelInterface* send_channel_; - VideoMediaReceiveChannelInterface* receive_channel_; + std::unique_ptr send_channel_; + std::unique_ptr receive_channel_; cricket::FakeNetworkInterface network_interface_; cricket::FakeVideoRenderer renderer_; @@ -2006,7 +1998,8 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // Test that SetSend works. TEST_F(WebRtcVideoChannelBaseTest, SetSend) { EXPECT_FALSE(SendImpl()->sending()); - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get())); + EXPECT_TRUE( + send_channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get())); EXPECT_TRUE(SetOneCodec(DefaultCodec())); EXPECT_FALSE(SendImpl()->sending()); EXPECT_TRUE(SetSend(true)); @@ -2041,8 +2034,8 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStats) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1U, send_info.senders.size()); // TODO(whyuan): bytes_sent and bytes_received are different. Are both @@ -2098,17 +2091,17 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStats) { TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) { cricket::FakeVideoRenderer renderer1, renderer2; EXPECT_TRUE(SetOneCodec(DefaultCodec())); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); EXPECT_TRUE(SetSend(true)); EXPECT_TRUE( receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); EXPECT_TRUE( receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); - EXPECT_TRUE(channel_->SetSink(1, &renderer1)); - EXPECT_TRUE(channel_->SetSink(2, &renderer2)); + EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1)); + EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); EXPECT_EQ(0, renderer2.num_rendered_frames()); std::vector ssrcs; @@ -2119,12 +2112,12 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) { EXPECT_FRAME_ON_RENDERER(renderer1, 1, kVideoWidth, kVideoHeight); EXPECT_FRAME_ON_RENDERER(renderer2, 1, kVideoWidth, kVideoHeight); - EXPECT_TRUE(channel_->SetSend(false)); + EXPECT_TRUE(send_channel_->SetSend(false)); cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1U, send_info.senders.size()); // TODO(whyuan): bytes_sent and bytes_received are different. Are both @@ -2152,13 +2145,13 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { // Normal setup; note that we set the SSRC explicitly to ensure that // it will come first in the senders map. EXPECT_TRUE(SetOneCodec(DefaultCodec())); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); EXPECT_TRUE(receive_channel_->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); EXPECT_TRUE(SetSend(true)); SendFrame(); EXPECT_GT(NumRtpPackets(), 0); @@ -2173,17 +2166,17 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { rtc::kNumMicrosecsPerSec / 5); EXPECT_TRUE( send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(5678))); - EXPECT_TRUE(channel_->SetVideoSend(5678, nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel_->SetVideoSend(5678, nullptr, &frame_forwarder)); EXPECT_TRUE(receive_channel_->AddRecvStream( cricket::StreamParams::CreateLegacy(5678))); - EXPECT_TRUE(channel_->SetSink(5678, &renderer2)); + EXPECT_TRUE(receive_channel_->SetSink(5678, &renderer2)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(kFrameDuration); EXPECT_FRAME_ON_RENDERER(renderer2, 1, kTestWidth, kTestHeight); // Get stats, and make sure they are correct for two senders cricket::VideoMediaSendInfo send_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); ASSERT_EQ(2U, send_info.senders.size()); @@ -2198,17 +2191,17 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { EXPECT_EQ(kTestWidth, send_info.senders[1].send_frame_width); EXPECT_EQ(kTestHeight, send_info.senders[1].send_frame_height); // The capturer must be unregistered here as it runs out of it's scope next. - channel_->SetVideoSend(5678, nullptr, nullptr); + send_channel_->SetVideoSend(5678, nullptr, nullptr); } // Test that we can set the bandwidth. TEST_F(WebRtcVideoChannelBaseTest, SetSendBandwidth) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.max_bandwidth_bps = -1; // <= 0 means unlimited. - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); parameters.max_bandwidth_bps = 128 * 1024; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); } // Test that we can set the SSRC for the default send source. @@ -2237,7 +2230,8 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) { EXPECT_TRUE(SetDefaultCodec()); EXPECT_TRUE( send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(999))); - EXPECT_TRUE(channel_->SetVideoSend(999u, nullptr, frame_forwarder_.get())); + EXPECT_TRUE( + send_channel_->SetVideoSend(999u, nullptr, frame_forwarder_.get())); EXPECT_TRUE(SetSend(true)); EXPECT_TRUE(WaitAndSendFrame(0)); EXPECT_GT(NumRtpPackets(), 0); @@ -2258,12 +2252,12 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) { TEST_F(WebRtcVideoChannelBaseTest, SetSink) { RtpPacketReceived packet; packet.SetSsrc(kSsrc); - channel_->SetDefaultSink(NULL); + receive_channel_->SetDefaultSink(NULL); EXPECT_TRUE(SetDefaultCodec()); EXPECT_TRUE(SetSend(true)); EXPECT_EQ(0, renderer_.num_rendered_frames()); - channel_->SetDefaultSink(&renderer_); - channel_->OnPacketReceived(packet); + receive_channel_->SetDefaultSink(&renderer_); + receive_channel_->OnPacketReceived(packet); SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); } @@ -2272,7 +2266,7 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSink) { TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { EXPECT_TRUE(SetOneCodec(DefaultCodec())); EXPECT_TRUE(SetSend(true)); - channel_->SetDefaultSink(&renderer_); + receive_channel_->SetDefaultSink(&renderer_); SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); EXPECT_GT(NumRtpPackets(), 0); @@ -2287,7 +2281,8 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { EXPECT_TRUE( send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); - EXPECT_TRUE(channel_->SetVideoSend(789u, nullptr, frame_forwarder_.get())); + EXPECT_TRUE( + send_channel_->SetVideoSend(789u, nullptr, frame_forwarder_.get())); EXPECT_EQ(rtp_packets, NumRtpPackets()); // Wait 30ms to guarantee the engine does not drop the frame. EXPECT_TRUE(WaitAndSendFrame(30)); @@ -2302,17 +2297,17 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { cricket::FakeVideoRenderer renderer1, renderer2; EXPECT_TRUE(SetDefaultCodec()); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); EXPECT_TRUE(SetSend(true)); EXPECT_TRUE( receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); EXPECT_TRUE( receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); - EXPECT_TRUE(channel_->SetSink(1, &renderer1)); - EXPECT_TRUE(channel_->SetSink(2, &renderer2)); + EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1)); + EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); EXPECT_EQ(0, renderer2.num_rendered_frames()); std::vector ssrcs; @@ -2343,7 +2338,7 @@ TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) { const int time_between_send_ms = VideoFormat::FpsToInterval(kFramerate); EXPECT_TRUE(SetOneCodec(codec)); EXPECT_TRUE(SetSend(true)); - channel_->SetDefaultSink(&renderer_); + receive_channel_->SetDefaultSink(&renderer_); EXPECT_EQ(0, renderer_.num_rendered_frames()); SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); @@ -2357,11 +2352,11 @@ TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) { // test which is related to screencast logic. VideoOptions video_options; video_options.is_screencast = true; - channel_->SetVideoSend(kSsrc, &video_options, nullptr); + send_channel_->SetVideoSend(kSsrc, &video_options, nullptr); int captured_frames = 1; for (int iterations = 0; iterations < 2; ++iterations) { - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); time_controller_.AdvanceTime(TimeDelta::Millis(time_between_send_ms)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); @@ -2375,7 +2370,7 @@ TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) { EXPECT_EQ(360, renderer_.height()); captured_frames = renderer_.num_rendered_frames() + 1; EXPECT_FALSE(renderer_.black_frame()); - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr)); // Make sure a black frame was generated. // The black frame should have the resolution of the previous frame to // prevent expensive encoder reconfigurations. @@ -2401,7 +2396,7 @@ TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) { TEST_F(WebRtcVideoChannelBaseTest, RemoveCapturerWithoutAdd) { EXPECT_TRUE(SetOneCodec(DefaultCodec())); EXPECT_TRUE(SetSend(true)); - channel_->SetDefaultSink(&renderer_); + receive_channel_->SetDefaultSink(&renderer_); EXPECT_EQ(0, renderer_.num_rendered_frames()); SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); @@ -2409,10 +2404,10 @@ TEST_F(WebRtcVideoChannelBaseTest, RemoveCapturerWithoutAdd) { // tightly. time_controller_.AdvanceTime(kFrameDuration); // Remove the capturer. - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr)); // No capturer was added, so this SetVideoSend shouldn't do anything. - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr)); time_controller_.AdvanceTime(TimeDelta::Millis(300)); // Verify no more frames were sent. EXPECT_EQ(1, renderer_.num_rendered_frames()); @@ -2423,16 +2418,16 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up the stream associated with the engine. EXPECT_TRUE(receive_channel_->AddRecvStream( cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_)); + EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); cricket::VideoFormat capture_format( kVideoWidth, kVideoHeight, cricket::VideoFormat::FpsToInterval(kFramerate), cricket::FOURCC_I420); // Set up additional stream 1. cricket::FakeVideoRenderer renderer1; - EXPECT_FALSE(channel_->SetSink(1, &renderer1)); + EXPECT_FALSE(receive_channel_->SetSink(1, &renderer1)); EXPECT_TRUE( receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE(channel_->SetSink(1, &renderer1)); + EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1)); EXPECT_TRUE( send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(1))); @@ -2442,10 +2437,10 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up additional stream 2. cricket::FakeVideoRenderer renderer2; - EXPECT_FALSE(channel_->SetSink(2, &renderer2)); + EXPECT_FALSE(receive_channel_->SetSink(2, &renderer2)); EXPECT_TRUE( receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); - EXPECT_TRUE(channel_->SetSink(2, &renderer2)); + EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2)); EXPECT_TRUE( send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(2))); webrtc::test::FrameForwarder frame_forwarder2; @@ -2455,8 +2450,8 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // A limitation in the lmi implementation requires that SetVideoSend() is // called after SetOneCodec(). // TODO(hellner): this seems like an unnecessary constraint, fix it. - EXPECT_TRUE(channel_->SetVideoSend(1, nullptr, &frame_forwarder1)); - EXPECT_TRUE(channel_->SetVideoSend(2, nullptr, &frame_forwarder2)); + EXPECT_TRUE(send_channel_->SetVideoSend(1, nullptr, &frame_forwarder1)); + EXPECT_TRUE(send_channel_->SetVideoSend(2, nullptr, &frame_forwarder2)); EXPECT_TRUE(SetSend(true)); // Test capturer associated with engine. const int kTestWidth = 160; @@ -2473,11 +2468,11 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { time_controller_.AdvanceTime(kFrameDuration); EXPECT_FRAME_ON_RENDERER(renderer2, 1, kTestWidth, kTestHeight); // Successfully remove the capturer. - EXPECT_TRUE(channel_->SetVideoSend(kSsrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, nullptr)); // The capturers must be unregistered here as it runs out of it's scope // next. - EXPECT_TRUE(channel_->SetVideoSend(1, nullptr, nullptr)); - EXPECT_TRUE(channel_->SetVideoSend(2, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(1, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(2, nullptr, nullptr)); } // Tests empty StreamParams is rejected. @@ -2534,12 +2529,12 @@ TEST_F(WebRtcVideoChannelBaseTest, TwoStreamsSendAndReceive) { #if defined(RTC_ENABLE_VP9) TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderFallback) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); - absl::optional codec = channel_->GetSendCodec(); + absl::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP9", codec->name); @@ -2547,25 +2542,25 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderFallback) { // the current thread), hence the ProcessMessages call. SendImpl()->RequestEncoderFallback(); time_controller_.AdvanceTime(kFrameDuration); - codec = channel_->GetSendCodec(); + codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); // No other codec to fall back to, keep using VP8. SendImpl()->RequestEncoderFallback(); time_controller_.AdvanceTime(kFrameDuration); - codec = channel_->GetSendCodec(); + codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); } TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchDefaultFallback) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); - absl::optional codec = channel_->GetSendCodec(); + absl::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP9", codec->name); @@ -2577,7 +2572,7 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchDefaultFallback) { // Requested encoder is not available. Default fallback is allowed. Switch to // the next negotiated codec, VP8. - codec = channel_->GetSendCodec(); + codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); } @@ -2586,12 +2581,12 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) { VideoCodec vp9 = GetEngineCodec("VP9"); vp9.params["profile-id"] = "0"; - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(vp9); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); - absl::optional codec = channel_->GetSendCodec(); + absl::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); @@ -2602,7 +2597,7 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) { // VP9 profile_id=1 is not available. Default fallback is not allowed. Switch // is not performed. - codec = channel_->GetSendCodec(); + codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); @@ -2612,17 +2607,17 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) { time_controller_.AdvanceTime(kFrameDuration); // VP9 profile_id=0 is available. Switch encoder. - codec = channel_->GetSendCodec(); + codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP9", codec->name); } TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); - channel_->SetVideoCodecSwitchingEnabled(true); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); + send_channel_->SetVideoCodecSwitchingEnabled(true); auto send_codecs = send_channel_->GetRtpSendParameters(kSsrc).codecs; ASSERT_EQ(send_codecs.size(), 2u); @@ -2655,23 +2650,36 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { #endif fake_call_.reset(new FakeCall(&field_trials_)); - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, fake_call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + fake_call_.get(), GetMediaConfig(), VideoOptions(), + webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); + receive_channel_ = + engine_.CreateReceiveChannel(fake_call_.get(), GetMediaConfig(), + VideoOptions(), webrtc::CryptoOptions()); + send_channel_->SetSsrcListChangedCallback( + [receive_channel = + receive_channel_.get()](const std::set& choices) { + receive_channel->ChooseReceiverReportSsrc(choices); + }); + send_channel_->SetSendCodecChangedCallback([this]() { + receive_channel_->SetReceiverFeedbackParameters( + send_channel_->SendCodecHasLntf(), send_channel_->SendCodecHasNack(), + send_channel_->SendCodecRtcpMode(), + send_channel_->SendCodecRtxTime()); + }); send_channel_->OnReadyToSend(true); receive_channel_->SetReceive(true); last_ssrc_ = 123; send_parameters_.codecs = engine_.send_codecs(); recv_parameters_.codecs = engine_.recv_codecs(); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); } void TearDown() override { - channel_->SetInterface(nullptr); - channel_ = nullptr; + send_channel_->SetInterface(nullptr); + receive_channel_->SetInterface(nullptr); + send_channel_.reset(); + receive_channel_.reset(); fake_call_ = nullptr; } @@ -2684,17 +2692,13 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { WebRtcVideoSendChannel* SendImpl() { // Note that this function requires intimate knowledge of how the channel // was created. - return static_cast( - static_cast(channel_.get()) - ->SendImplForTesting()); + return static_cast(send_channel_.get()); } // Casts a shim channel to a webrtc::Transport. Used once. - webrtc::Transport* ChannelImplAsTransport(VideoMediaChannel* channel) { - return static_cast( - static_cast( - static_cast(channel) - ->SendImplForTesting())); + webrtc::Transport* ChannelImplAsTransport( + cricket::VideoMediaSendChannelInterface* channel) { + return static_cast(channel)->transport(); } cricket::VideoCodec GetEngineCodec(const std::string& name) { @@ -2763,7 +2767,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { codecs[0].params[kCodecParamMinBitrate] = min_bitrate_kbps; codecs[0].params[kCodecParamStartBitrate] = start_bitrate_kbps; codecs[0].params[kCodecParamMaxBitrate] = max_bitrate_kbps; - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); } void ExpectSetBitrateParameters(int min_bitrate_bps, @@ -2789,7 +2793,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { EXPECT_TRUE(send_channel_->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrc))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); const webrtc::VideoSendStream::Config& config = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); @@ -2809,9 +2813,9 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestSetSendRtpHeaderExtensions(const std::string& ext_uri) { // Enable extension. const int id = 1; - cricket::VideoSendParameters parameters = send_parameters_; + cricket::VideoSenderParameters parameters = send_parameters_; parameters.extensions.push_back(RtpExtension(ext_uri, id)); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* send_stream = AddSendStream(cricket::StreamParams::CreateLegacy(123)); @@ -2820,17 +2824,17 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { EXPECT_EQ(id, send_stream->GetConfig().rtp.extensions[0].id); EXPECT_EQ(ext_uri, send_stream->GetConfig().rtp.extensions[0].uri); // Verify call with same set of extensions returns true. - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); // Verify that existing RTP header extensions can be removed. - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size()); send_stream = fake_call_->GetVideoSendStreams()[0]; EXPECT_TRUE(send_stream->GetConfig().rtp.extensions.empty()); // Verify that adding receive RTP header extensions adds them for existing // streams. - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); send_stream = fake_call_->GetVideoSendStreams()[0]; ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size()); EXPECT_EQ(id, send_stream->GetConfig().rtp.extensions[0].id); @@ -2840,16 +2844,17 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestSetRecvRtpHeaderExtensions(const std::string& ext_uri) { // Enable extension. const int id = 1; - cricket::VideoRecvParameters parameters = recv_parameters_; + cricket::VideoReceiverParameters parameters = recv_parameters_; parameters.extensions.push_back(RtpExtension(ext_uri, id)); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); AddRecvStream(cricket::StreamParams::CreateLegacy(123)); - EXPECT_THAT(channel_->GetRtpReceiveParameters(123).header_extensions, - ElementsAre(RtpExtension(ext_uri, id))); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(123).header_extensions, + ElementsAre(RtpExtension(ext_uri, id))); // Verify call with same set of extensions returns true. - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); // Verify that SetRecvRtpHeaderExtensions doesn't implicitly add them for // senders. @@ -2858,14 +2863,15 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { .rtp.extensions.empty()); // Verify that existing RTP header extensions can be removed. - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); - EXPECT_THAT(channel_->GetRtpReceiveParameters(123).header_extensions, - IsEmpty()); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(123).header_extensions, + IsEmpty()); // Verify that adding receive RTP header extensions adds them for existing // streams. - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); - EXPECT_EQ(channel_->GetRtpReceiveParameters(123).header_extensions, + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); + EXPECT_EQ(receive_channel_->GetRtpReceiveParameters(123).header_extensions, parameters.extensions); } @@ -2873,10 +2879,10 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { AssignDefaultCodec(); VerifyCodecHasDefaultFeedbackParams(*default_codec_, expect_lntf_enabled); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs = engine_.send_codecs(); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSend(true)); // Send side. FakeVideoSendStream* send_stream = @@ -2891,7 +2897,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestExtensionFilter(const std::vector& extensions, const std::string& expected_extension) { - cricket::VideoSendParameters parameters = send_parameters_; + cricket::VideoSenderParameters parameters = send_parameters_; int expected_id = -1; int id = 1; for (const std::string& extension : extensions) { @@ -2899,7 +2905,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { expected_id = id; parameters.extensions.push_back(RtpExtension(extension, id++)); } - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* send_stream = AddSendStream(cricket::StreamParams::CreateLegacy(123)); @@ -2925,7 +2931,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { bool enabled) { cricket::VideoOptions options; options.video_noise_reduction = enabled; - EXPECT_TRUE(channel_->SetVideoSend(ssrc, &options, frame_forwarder)); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrc, &options, frame_forwarder)); // Options only take effect on the next frame. frame_forwarder->IncomingCapturedFrame(frame_source_.GetFrame()); @@ -2964,9 +2970,9 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void SetAndExpectMaxBitrate(int global_max, int stream_max, int expected_encoder_bitrate) { - VideoSendParameters limited_send_params = send_parameters_; + VideoSenderParameters limited_send_params = send_parameters_; limited_send_params.max_bandwidth_bps = global_max; - EXPECT_TRUE(channel_->SetSendParameters(limited_send_params)); + EXPECT_TRUE(send_channel_->SetSendParameters(limited_send_params)); webrtc::RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); @@ -2998,11 +3004,10 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { cricket::FakeFrameSource frame_source_; std::unique_ptr fake_call_; - std::unique_ptr channel_; - VideoMediaSendChannelInterface* send_channel_; - VideoMediaReceiveChannelInterface* receive_channel_; - cricket::VideoSendParameters send_parameters_; - cricket::VideoRecvParameters recv_parameters_; + std::unique_ptr send_channel_; + std::unique_ptr receive_channel_; + cricket::VideoSenderParameters send_parameters_; + cricket::VideoReceiverParameters recv_parameters_; uint32_t last_ssrc_; }; @@ -3021,12 +3026,12 @@ TEST_F(WebRtcVideoChannelTest, SetsSyncGroupFromSyncLabel) { } TEST_F(WebRtcVideoChannelTest, RecvStreamWithSimAndRtx) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs = engine_.send_codecs(); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSend(true)); parameters.conference_mode = true; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); // Send side. const std::vector ssrcs = MAKE_VECTOR(kSsrcs1); @@ -3160,7 +3165,7 @@ TEST_F(WebRtcVideoChannelTest, IdenticalSendExtensionsDoesntRecreateStream) { send_parameters_.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, kVideoRotationId)); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); FakeVideoSendStream* send_stream = AddSendStream(cricket::StreamParams::CreateLegacy(123)); @@ -3170,13 +3175,13 @@ TEST_F(WebRtcVideoChannelTest, IdenticalSendExtensionsDoesntRecreateStream) { // Setting the same extensions (even if in different order) shouldn't // reallocate the stream. absl::c_reverse(send_parameters_.extensions); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); // Setting different extensions should recreate the stream. send_parameters_.extensions.resize(1); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); EXPECT_EQ(2, fake_call_->GetNumCreatedSendStreams()); } @@ -3190,7 +3195,7 @@ TEST_F(WebRtcVideoChannelTest, RtpExtension(kUnsupportedExtensionName, kUnsupportedId)); send_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId)); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); FakeVideoSendStream* send_stream = AddSendStream(cricket::StreamParams::CreateLegacy(123)); @@ -3210,7 +3215,7 @@ TEST_F(WebRtcVideoChannelTest, RtpExtension(kUnsupportedExtensionName, kUnsupportedId)); recv_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId)); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); AddRecvStream(cricket::StreamParams::CreateLegacy(123)); // Only timestamp offset extension is set to receive stream, @@ -3228,7 +3233,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendRtpHeaderExtensionsRejectsIncorrectIds) { for (size_t i = 0; i < arraysize(kIncorrectIds); ++i) { send_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kIncorrectIds[i])); - EXPECT_FALSE(channel_->SetSendParameters(send_parameters_)) + EXPECT_FALSE(send_channel_->SetSendParameters(send_parameters_)) << "Bad extension id '" << kIncorrectIds[i] << "' accepted."; } } @@ -3238,7 +3243,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsIncorrectIds) { for (size_t i = 0; i < arraysize(kIncorrectIds); ++i) { recv_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kIncorrectIds[i])); - EXPECT_FALSE(channel_->SetRecvParameters(recv_parameters_)) + EXPECT_FALSE(receive_channel_->SetRecvParameters(recv_parameters_)) << "Bad extension id '" << kIncorrectIds[i] << "' accepted."; } } @@ -3249,14 +3254,14 @@ TEST_F(WebRtcVideoChannelTest, SetSendRtpHeaderExtensionsRejectsDuplicateIds) { RtpExtension(RtpExtension::kTimestampOffsetUri, id)); send_parameters_.extensions.push_back( RtpExtension(RtpExtension::kAbsSendTimeUri, id)); - EXPECT_FALSE(channel_->SetSendParameters(send_parameters_)); + EXPECT_FALSE(send_channel_->SetSendParameters(send_parameters_)); // Duplicate entries are also not supported. send_parameters_.extensions.clear(); send_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, id)); send_parameters_.extensions.push_back(send_parameters_.extensions.back()); - EXPECT_FALSE(channel_->SetSendParameters(send_parameters_)); + EXPECT_FALSE(send_channel_->SetSendParameters(send_parameters_)); } TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsDuplicateIds) { @@ -3265,21 +3270,21 @@ TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsDuplicateIds) { RtpExtension(RtpExtension::kTimestampOffsetUri, id)); recv_parameters_.extensions.push_back( RtpExtension(RtpExtension::kAbsSendTimeUri, id)); - EXPECT_FALSE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(recv_parameters_)); // Duplicate entries are also not supported. recv_parameters_.extensions.clear(); recv_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, id)); recv_parameters_.extensions.push_back(recv_parameters_.extensions.back()); - EXPECT_FALSE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(recv_parameters_)); } TEST_F(WebRtcVideoChannelTest, OnPacketReceivedIdentifiesExtensions) { - cricket::VideoRecvParameters parameters = recv_parameters_; + cricket::VideoReceiverParameters parameters = recv_parameters_; parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, /*id=*/1)); - ASSERT_TRUE(channel_->SetRecvParameters(parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(parameters)); webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions); RtpPacketReceived reference_packet(&extension_map); reference_packet.SetExtension( @@ -3327,10 +3332,10 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) { VerifyCodecHasDefaultFeedbackParams(*default_codec_, true); { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs = engine_.send_codecs(); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSend(true)); } // Start with LNTF enabled. @@ -3342,10 +3347,10 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) { ASSERT_TRUE(recv_stream->GetConfig().rtp.lntf.enabled); // Verify that LNTF is turned off when send(!) codecs without LNTF are set. - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8"))); EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty()); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_FALSE(recv_stream->GetConfig().rtp.lntf.enabled); send_stream = fake_call_->GetVideoSendStreams()[0]; @@ -3353,7 +3358,7 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) { // Setting the default codecs again, including VP8, turns LNTF back on. parameters.codecs = engine_.send_codecs(); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_TRUE(recv_stream->GetConfig().rtp.lntf.enabled); send_stream = fake_call_->GetVideoSendStreams()[0]; @@ -3364,10 +3369,10 @@ TEST_F(WebRtcVideoChannelTest, NackIsEnabledByDefault) { AssignDefaultCodec(); VerifyCodecHasDefaultFeedbackParams(*default_codec_, false); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs = engine_.send_codecs(); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSend(true)); // Send side. FakeVideoSendStream* send_stream = @@ -3392,10 +3397,10 @@ TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) { EXPECT_GT(send_stream->GetConfig().rtp.nack.rtp_history_ms, 0); // Verify that NACK is turned off when send(!) codecs without NACK are set. - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8"))); EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty()); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(0, recv_stream->GetConfig().rtp.nack.rtp_history_ms); send_stream = fake_call_->GetVideoSendStreams()[0]; @@ -3404,7 +3409,7 @@ TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) { // Verify that NACK is turned on when setting default codecs since the // default codecs have NACK enabled. parameters.codecs = engine_.send_codecs(); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_GT(recv_stream->GetConfig().rtp.nack.rtp_history_ms, 0); send_stream = fake_call_->GetVideoSendStreams()[0]; @@ -3417,10 +3422,10 @@ TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) { // same source that will be sent later, which just means that we're ready // earlier. TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); - channel_->SetSend(false); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); + send_channel_->SetSend(false); FakeVideoSendStream* stream = AddSendStream(); @@ -3433,7 +3438,8 @@ TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) { cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); // Frame entered, should be reconfigured to new dimensions. @@ -3441,15 +3447,15 @@ TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) { EXPECT_EQ(rtc::checked_cast(1280), streams[0].width); EXPECT_EQ(rtc::checked_cast(720), streams[0].height); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { static const int kScreenshareMinBitrateKbps = 800; cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; @@ -3457,10 +3463,10 @@ TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { rtc::kNumMicrosecsPerSec / 30); VideoOptions min_bitrate_options; min_bitrate_options.screencast_min_bitrate_kbps = kScreenshareMinBitrateKbps; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &min_bitrate_options, - &frame_forwarder)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, &min_bitrate_options, + &frame_forwarder)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size()); @@ -3479,12 +3485,12 @@ TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { EXPECT_EQ(0, encoder_config.min_transmit_bitrate_bps) << "Non-screenshare shouldn't use min-transmit bitrate."; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames()); VideoOptions screencast_options; screencast_options.is_screencast = true; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &screencast_options, - &frame_forwarder)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, &screencast_options, + &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); // Send stream recreated after option change. ASSERT_EQ(2, fake_call_->GetNumCreatedSendStreams()); @@ -3502,14 +3508,14 @@ TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { EXPECT_EQ(rtc::checked_cast(1280), streams.front().width); EXPECT_EQ(rtc::checked_cast(720), streams.front().height); EXPECT_FALSE(streams[0].num_temporal_layers.has_value()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, ConferenceModeScreencastConfiguresTemporalLayer) { static const int kConferenceScreencastTemporalBitrateBps = 200 * 1000; send_parameters_.conference_mode = true; - channel_->SetSendParameters(send_parameters_); + send_channel_->SetSendParameters(send_parameters_); AddSendStream(); VideoOptions options; @@ -3517,8 +3523,9 @@ TEST_F(WebRtcVideoChannelTest, webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE(send_channel_->SetSend(true)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size()); @@ -3538,7 +3545,7 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(kConferenceScreencastTemporalBitrateBps, streams[0].target_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, SuspendBelowMinBitrateDisabledByDefault) { @@ -3550,29 +3557,27 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { MediaConfig media_config = GetMediaConfig(); media_config.video.suspend_below_min_bitrate = true; - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, fake_call_.get(), media_config, - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); - channel_->SetSendParameters(send_parameters_); + send_channel_->SetSendParameters(send_parameters_); FakeVideoSendStream* stream = AddSendStream(); EXPECT_TRUE(stream->GetConfig().suspend_below_min_bitrate); media_config.video.suspend_below_min_bitrate = false; - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, fake_call_.get(), media_config, - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); - channel_->SetSendParameters(send_parameters_); + send_channel_->SetSendParameters(send_parameters_); stream = AddSendStream(); EXPECT_FALSE(stream->GetConfig().suspend_below_min_bitrate); @@ -3586,9 +3591,9 @@ TEST_F(WebRtcVideoChannelTest, Vp8DenoisingEnabledByDefault) { } TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); // Single-stream settings should apply with RTX as well (verifies that we // check number of regular SSRCs and not StreamParams::ssrcs which contains @@ -3596,8 +3601,9 @@ TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/true); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); @@ -3620,10 +3626,11 @@ TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { EXPECT_TRUE(vp8_settings.automaticResizeOn); EXPECT_TRUE(stream->GetEncoderConfig().frame_drop_enabled); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); stream = SetUpSimulcast(true, /*with_rtx=*/false); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); EXPECT_EQ(3u, stream->GetVideoStreams().size()); @@ -3635,7 +3642,8 @@ TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { // In screen-share mode, denoising is forced off. VideoOptions options; options.is_screencast = true; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); stream = SetDenoisingOption(last_ssrc_, &frame_forwarder, false); @@ -3653,7 +3661,7 @@ TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { EXPECT_FALSE(vp8_settings.automaticResizeOn); EXPECT_TRUE(stream->GetEncoderConfig().frame_drop_enabled); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } // Test that setting the same options doesn't result in the encoder being @@ -3663,27 +3671,31 @@ TEST_F(WebRtcVideoChannelTest, SetIdenticalOptionsDoesntReconfigureEncoder) { webrtc::test::FrameForwarder frame_forwarder; AddSendStream(); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Expect 1 reconfigurations at this point from the initial configuration. EXPECT_EQ(1, send_stream->num_encoder_reconfigurations()); // Set the options one more time and expect no additional reconfigurations. - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); EXPECT_EQ(1, send_stream->num_encoder_reconfigurations()); // Change `options` and expect 2 reconfigurations. options.video_noise_reduction = true; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); EXPECT_EQ(2, send_stream->num_encoder_reconfigurations()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } class Vp9SettingsTest : public WebRtcVideoChannelTest { @@ -3698,8 +3710,8 @@ class Vp9SettingsTest : public WebRtcVideoChannelTest { protected: void TearDown() override { // Remove references to encoder_factory_ since this will be destroyed - // before channel_ and engine_. - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + // before send_channel_ and engine_. + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); } }; @@ -3708,15 +3720,16 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { webrtc::SdpVideoFormat("VP9", webrtc::SdpVideoFormat::Parameters(), {ScalabilityMode::kL1T1, ScalabilityMode::kL2T1})); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); @@ -3779,7 +3792,8 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { // In screen-share mode, denoising is forced off. VideoOptions options; options.is_screencast = true; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); stream = SetDenoisingOption(last_ssrc_, &frame_forwarder, false); @@ -3797,13 +3811,13 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { EXPECT_TRUE(stream->GetEncoderConfig().frame_drop_enabled); EXPECT_FALSE(vp9_settings.automaticResizeOn); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(Vp9SettingsTest, MultipleSsrcsEnablesSvc) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); @@ -3813,8 +3827,8 @@ TEST_F(Vp9SettingsTest, MultipleSsrcsEnablesSvc) { webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); @@ -3826,13 +3840,13 @@ TEST_F(Vp9SettingsTest, MultipleSsrcsEnablesSvc) { EXPECT_EQ(vp9_settings.numberOfSpatialLayers, kNumSpatialLayers); EXPECT_EQ(vp9_settings.numberOfTemporalLayers, kNumTemporalLayers); - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); } TEST_F(Vp9SettingsTest, SvcModeCreatesSingleRtpStream) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); @@ -3845,8 +3859,8 @@ TEST_F(Vp9SettingsTest, SvcModeCreatesSingleRtpStream) { EXPECT_EQ(1u, config.rtp.ssrcs.size()); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); @@ -3856,13 +3870,13 @@ TEST_F(Vp9SettingsTest, SvcModeCreatesSingleRtpStream) { const size_t kNumSpatialLayers = ssrcs.size(); EXPECT_EQ(vp9_settings.numberOfSpatialLayers, kNumSpatialLayers); - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); } TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters)); const size_t kNumSpatialLayers = 3; std::vector ssrcs = MAKE_VECTOR(kSsrcs3); @@ -3893,9 +3907,9 @@ TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { } TEST_F(Vp9SettingsTest, MaxBitrateDeterminedBySvcResolutions) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); @@ -3905,8 +3919,8 @@ TEST_F(Vp9SettingsTest, MaxBitrateDeterminedBySvcResolutions) { webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); + send_channel_->SetSend(true); // Send frame at 1080p@30fps. frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( @@ -3921,7 +3935,7 @@ TEST_F(Vp9SettingsTest, MaxBitrateDeterminedBySvcResolutions) { EXPECT_EQ(vp9_settings.numberOfSpatialLayers, kNumSpatialLayers); EXPECT_EQ(vp9_settings.numberOfTemporalLayers, kNumTemporalLayers); - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); // VideoStream max bitrate should be more than legacy 2.5Mbps default stream // cap. @@ -3932,16 +3946,16 @@ TEST_F(Vp9SettingsTest, MaxBitrateDeterminedBySvcResolutions) { // Update send parameters to 2Mbps, this should cap the max bitrate of the // stream. parameters.max_bandwidth_bps = 2000000; - channel_->SetSendParameters(parameters); + send_channel_->SetSendParameters(parameters); EXPECT_THAT( stream->GetVideoStreams(), ElementsAre(Field(&webrtc::VideoStream::max_bitrate_bps, Eq(2000000)))); } TEST_F(Vp9SettingsTest, Vp9SvcTargetBitrateCappedByMax) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); @@ -3951,8 +3965,8 @@ TEST_F(Vp9SettingsTest, Vp9SvcTargetBitrateCappedByMax) { webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); + send_channel_->SetSend(true); // Set up 3 spatial layers with 720p, which should result in a max bitrate of // 2084 kbps. @@ -3968,7 +3982,7 @@ TEST_F(Vp9SettingsTest, Vp9SvcTargetBitrateCappedByMax) { EXPECT_EQ(vp9_settings.numberOfSpatialLayers, kNumSpatialLayers); EXPECT_EQ(vp9_settings.numberOfTemporalLayers, kNumTemporalLayers); - EXPECT_TRUE(channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, nullptr)); // VideoStream both min and max bitrate should be lower than legacy 2.5Mbps // default stream cap. @@ -3992,15 +4006,16 @@ class Vp9SettingsTestWithFieldTrial void VerifySettings(int num_spatial_layers, int num_temporal_layers, webrtc::InterLayerPredMode interLayerPred) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); @@ -4010,7 +4025,7 @@ class Vp9SettingsTestWithFieldTrial EXPECT_EQ(num_temporal_layers, vp9_settings.numberOfTemporalLayers); EXPECT_EQ(inter_layer_pred_mode_, vp9_settings.interLayerPred); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } const uint8_t num_spatial_layers_; @@ -4103,28 +4118,27 @@ TEST_F(WebRtcVideoChannelTest, DoesNotAdaptOnOveruseWhenScreensharing) { TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); MediaConfig media_config = GetMediaConfig(); media_config.video.enable_cpu_adaptation = true; - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, fake_call_.get(), media_config, - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; - ASSERT_TRUE(channel_->SetSend(true)); + ASSERT_TRUE(send_channel_->SetSend(true)); cricket::VideoOptions camera_options; camera_options.is_screencast = false; - channel_->SetVideoSend(last_ssrc_, &camera_options, &frame_forwarder); + send_channel_->SetVideoSend(last_ssrc_, &camera_options, &frame_forwarder); ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size()); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); @@ -4136,20 +4150,21 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { // Switch to screen share. Expect no resolution scaling. cricket::VideoOptions screenshare_options; screenshare_options.is_screencast = true; - channel_->SetVideoSend(last_ssrc_, &screenshare_options, &frame_forwarder); + send_channel_->SetVideoSend(last_ssrc_, &screenshare_options, + &frame_forwarder); ASSERT_EQ(2, fake_call_->GetNumCreatedSendStreams()); send_stream = fake_call_->GetVideoSendStreams().front(); EXPECT_FALSE(send_stream->resolution_scaling_enabled()); // Switch back to the normal capturer. Expect resolution scaling to be // reenabled. - channel_->SetVideoSend(last_ssrc_, &camera_options, &frame_forwarder); + send_channel_->SetVideoSend(last_ssrc_, &camera_options, &frame_forwarder); send_stream = fake_call_->GetVideoSendStreams().front(); ASSERT_EQ(3, fake_call_->GetNumCreatedSendStreams()); send_stream = fake_call_->GetVideoSendStreams().front(); EXPECT_TRUE(send_stream->resolution_scaling_enabled()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } // TODO(asapersson): Remove this test when the balanced field trial is removed. @@ -4157,65 +4172,65 @@ void WebRtcVideoChannelTest::TestDegradationPreference( bool resolution_scaling_enabled, bool fps_scaling_enabled) { cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); MediaConfig media_config = GetMediaConfig(); media_config.video.enable_cpu_adaptation = true; - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, fake_call_.get(), media_config, - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); EXPECT_EQ(resolution_scaling_enabled, send_stream->resolution_scaling_enabled()); EXPECT_EQ(fps_scaling_enabled, send_stream->framerate_scaling_enabled()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse, bool is_screenshare) { cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); MediaConfig media_config = GetMediaConfig(); if (enable_overuse) { media_config.video.enable_cpu_adaptation = true; } - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, fake_call_.get(), media_config, - VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; options.is_screencast = is_screenshare; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); @@ -4229,7 +4244,7 @@ void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse, EXPECT_TRUE(send_stream->resolution_scaling_enabled()); EXPECT_FALSE(send_stream->framerate_scaling_enabled()); } - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { @@ -4237,11 +4252,11 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { static const uint32_t kInitialTimestamp = 0xFFFFFFFFu; static const int64_t kInitialNtpTimeMs = 1247891230; static const int kFrameOffsetMs = 20; - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); FakeVideoReceiveStream* stream = AddRecvStream(); cricket::FakeVideoRenderer renderer; - EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer)); + EXPECT_TRUE(receive_channel_->SetSink(last_ssrc_, &renderer)); webrtc::VideoFrame video_frame = webrtc::VideoFrame::Builder() @@ -4269,8 +4284,8 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { // Verify that NTP time has been correctly deduced. cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1u, receive_info.receivers.size()); EXPECT_EQ(kInitialNtpTimeMs, @@ -4279,9 +4294,9 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { TEST_F(WebRtcVideoChannelTest, SetDefaultSendCodecs) { AssignDefaultAptRtxTypes(); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); - absl::optional codec = channel_->GetSendCodec(); + absl::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_TRUE(codec->Matches(engine_.send_codecs()[0], &field_trials_)); @@ -4304,9 +4319,9 @@ TEST_F(WebRtcVideoChannelTest, SetDefaultSendCodecs) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutPacketization) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); const webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); @@ -4314,10 +4329,10 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutPacketization) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithPacketization) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.back().packetization = kPacketizationParamRaw; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); const webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); @@ -4426,9 +4441,9 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithSsrc) { // existing video stream instance. TEST_F(WebRtcVideoChannelFlexfecRecvTest, EnablingFlexfecDoesNotRecreateVideoReceiveStream) { - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); AddRecvStream( CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); @@ -4444,7 +4459,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, // Enable FlexFEC. recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); // The count of created streams will remain 2 despite the creation of a new // flexfec stream. The existing receive stream will have been reconfigured @@ -4467,10 +4482,10 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, // stream will be set/cleared as dictated by the configuration change. TEST_F(WebRtcVideoChannelFlexfecRecvTest, DisablingFlexfecDoesNotRecreateVideoReceiveStream) { - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); AddRecvStream( CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); @@ -4488,7 +4503,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, // Disable FlexFEC. recv_parameters.codecs.clear(); recv_parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); // The count of created streams should remain 2 since the video stream will // have been reconfigured to not reference flexfec and not recreated on // account of the flexfec stream being deleted. @@ -4507,13 +4522,13 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, TEST_F(WebRtcVideoChannelFlexfecRecvTest, DuplicateFlexfecCodecIsDropped) { constexpr int kUnusedPayloadType1 = 127; - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); cricket::VideoCodec duplicate = GetEngineCodec("flexfec-03"); duplicate.id = kUnusedPayloadType1; recv_parameters.codecs.push_back(duplicate); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); AddRecvStream( CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); @@ -4558,9 +4573,9 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetDefaultSendCodecsWithSsrc) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFec) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); @@ -4570,9 +4585,9 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFec) { } TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetSendCodecsWithoutFec) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); @@ -4584,10 +4599,10 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) { AddRecvStream( CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); const std::vector& flexfec_streams = fake_call_->GetFlexfecReceiveStreams(); @@ -4618,10 +4633,10 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) { // TODO(brandtr): Remove when FlexFEC is enabled by default. TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetSendCodecsWithoutSsrcWithFecDoesNotEnableFec) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); @@ -4633,10 +4648,10 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetSendCodecsWithSsrcWithFecDoesNotEnableFec) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream( CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); @@ -4652,11 +4667,11 @@ TEST_F(WebRtcVideoChannelTest, const int kUnusedPayloadType = 127; EXPECT_FALSE(FindCodecById(engine_.send_codecs(), kUnusedPayloadType)); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; cricket::VideoCodec rtx_codec = cricket::CreateVideoCodec(kUnusedPayloadType, "rtx"); parameters.codecs.push_back(rtx_codec); - EXPECT_FALSE(channel_->SetSendParameters(parameters)) + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)) << "RTX codec without associated payload type should be rejected."; } @@ -4669,18 +4684,18 @@ TEST_F(WebRtcVideoChannelTest, { cricket::VideoCodec rtx_codec = cricket::CreateVideoRtxCodec( kUnusedPayloadType1, GetEngineCodec("VP8").id); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(rtx_codec); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); } { cricket::VideoCodec rtx_codec = cricket::CreateVideoRtxCodec(kUnusedPayloadType1, kUnusedPayloadType2); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(rtx_codec); - EXPECT_FALSE(channel_->SetSendParameters(parameters)) + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)) << "RTX without matching video codec should be rejected."; } } @@ -4698,13 +4713,13 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) { AddSendStream(params); // Original payload type for RTX. - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); cricket::VideoCodec rtx_codec = cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); rtx_codec.SetParam("apt", GetEngineCodec("VP8").id); parameters.codecs.push_back(rtx_codec); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoSendStreams().size()); const webrtc::VideoSendStream::Config& config_before = fake_call_->GetVideoSendStreams()[0]->GetConfig(); @@ -4714,7 +4729,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) { // Change payload type for RTX. parameters.codecs[1].id = kUnusedPayloadType2; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoSendStreams().size()); const webrtc::VideoSendStream::Config& config_after = fake_call_->GetVideoSendStreams()[0]->GetConfig(); @@ -4724,10 +4739,10 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFecDisablesFec) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("ulpfec")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); @@ -4735,7 +4750,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFecDisablesFec) { EXPECT_EQ(GetEngineCodec("ulpfec").id, config.rtp.ulpfec.ulpfec_payload_type); parameters.codecs.pop_back(); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); stream = fake_call_->GetVideoSendStreams()[0]; ASSERT_TRUE(stream != nullptr); config = stream->GetConfig().Copy(); @@ -4745,10 +4760,10 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFecDisablesFec) { TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetSendCodecsWithoutFecDisablesFec) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream( CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); @@ -4760,7 +4775,7 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, EXPECT_EQ(kSsrcs1[0], config.rtp.flexfec.protected_media_ssrcs[0]); parameters.codecs.pop_back(); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); stream = fake_call_->GetVideoSendStreams()[0]; ASSERT_TRUE(stream != nullptr); config = stream->GetConfig().Copy(); @@ -4769,17 +4784,18 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, } TEST_F(WebRtcVideoChannelTest, SetSendCodecsChangesExistingStreams) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; cricket::VideoCodec codec = cricket::CreateVideoCodec(100, "VP8"); codec.SetParam(kCodecParamMaxQuantization, kDefaultQpMax); parameters.codecs.push_back(codec); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); - channel_->SetSend(true); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); + send_channel_->SetSend(true); FakeVideoSendStream* stream = AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); std::vector streams = stream->GetVideoStreams(); EXPECT_EQ(kDefaultQpMax, streams[0].max_qp); @@ -4787,10 +4803,10 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsChangesExistingStreams) { parameters.codecs.clear(); codec.SetParam(kCodecParamMaxQuantization, kDefaultQpMax + 1); parameters.codecs.push_back(codec); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); streams = fake_call_->GetVideoSendStreams()[0]->GetVideoStreams(); EXPECT_EQ(kDefaultQpMax + 1, streams[0].max_qp); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithBitrates) { @@ -4817,7 +4833,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsCapsMinAndStartBitrate) { TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectsMaxLessThanMinBitrate) { send_parameters_.codecs[0].params[kCodecParamMinBitrate] = "300"; send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "200"; - EXPECT_FALSE(channel_->SetSendParameters(send_parameters_)); + EXPECT_FALSE(send_channel_->SetSendParameters(send_parameters_)); } // Test that when both the codec-specific bitrate params and max_bandwidth_bps @@ -4829,24 +4845,24 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithBitratesAndMaxSendBandwidth) { send_parameters_.max_bandwidth_bps = 400000; // We expect max_bandwidth_bps to take priority, if set. ExpectSetBitrateParameters(100000, 200000, 400000); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // Since the codec isn't changing, start_bitrate_bps should be -1. ExpectSetBitrateParameters(100000, -1, 350000); // Decrease max_bandwidth_bps. send_parameters_.max_bandwidth_bps = 350000; - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // Now try again with the values flipped around. send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "400"; send_parameters_.max_bandwidth_bps = 300000; ExpectSetBitrateParameters(100000, 200000, 300000); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // If we change the codec max, max_bandwidth_bps should still apply. send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "350"; ExpectSetBitrateParameters(100000, 200000, 300000); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); } TEST_F(WebRtcVideoChannelTest, SetMaxSendBandwidthShouldPreserveOtherBitrates) { @@ -4856,36 +4872,36 @@ TEST_F(WebRtcVideoChannelTest, SetMaxSendBandwidthShouldPreserveOtherBitrates) { // Setting max bitrate should keep previous min bitrate. // Setting max bitrate should not reset start bitrate. ExpectSetBitrateParameters(100000, -1, 300000); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); } TEST_F(WebRtcVideoChannelTest, SetMaxSendBandwidthShouldBeRemovable) { send_parameters_.max_bandwidth_bps = 300000; ExpectSetMaxBitrate(300000); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // -1 means to disable max bitrate (set infinite). send_parameters_.max_bandwidth_bps = -1; ExpectSetMaxBitrate(-1); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); } TEST_F(WebRtcVideoChannelTest, SetMaxSendBandwidthAndAddSendStream) { send_parameters_.max_bandwidth_bps = 99999; FakeVideoSendStream* stream = AddSendStream(); ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); ASSERT_EQ(1u, stream->GetVideoStreams().size()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].max_bitrate_bps); send_parameters_.max_bandwidth_bps = 77777; ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].max_bitrate_bps); } -// Tests that when the codec specific max bitrate and VideoSendParameters +// Tests that when the codec specific max bitrate and VideoSenderParameters // max_bandwidth_bps are used, that it sets the VideoStream's max bitrate // appropriately. TEST_F(WebRtcVideoChannelTest, @@ -4896,7 +4912,7 @@ TEST_F(WebRtcVideoChannelTest, send_parameters_.max_bandwidth_bps = -1; AddSendStream(); ExpectSetMaxBitrate(300000); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); std::vector video_send_streams = GetFakeSendStreams(); ASSERT_EQ(1u, video_send_streams.size()); @@ -4906,10 +4922,10 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(300000, video_send_streams[0]->GetVideoStreams()[0].max_bitrate_bps); - // The VideoSendParameters max bitrate overrides the codec's. + // The VideoSenderParameters max bitrate overrides the codec's. send_parameters_.max_bandwidth_bps = 500000; ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); ASSERT_EQ(1u, video_send_stream->GetVideoStreams().size()); EXPECT_EQ(500000, video_send_stream->GetVideoStreams()[0].max_bitrate_bps); } @@ -4925,7 +4941,7 @@ TEST_F(WebRtcVideoChannelTest, send_parameters_.max_bandwidth_bps = -1; AddSendStream(); ExpectSetMaxBitrate(300000); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); std::vector video_send_streams = GetFakeSendStreams(); ASSERT_EQ(1u, video_send_streams.size()); @@ -4950,7 +4966,7 @@ TEST_F(WebRtcVideoChannelTest, send_parameters_.max_bandwidth_bps = 99999; FakeVideoSendStream* stream = AddSendStream(); ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); ASSERT_EQ(1u, stream->GetVideoStreams().size()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, stream->GetVideoStreams()[0].max_bitrate_bps); @@ -4972,42 +4988,44 @@ TEST_F(WebRtcVideoChannelTest, } TEST_F(WebRtcVideoChannelTest, SetMaxSendBitrateCanIncreaseSenderBitrate) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); - channel_->SetSend(true); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); + send_channel_->SetSend(true); FakeVideoSendStream* stream = AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); std::vector streams = stream->GetVideoStreams(); int initial_max_bitrate_bps = streams[0].max_bitrate_bps; EXPECT_GT(initial_max_bitrate_bps, 0); parameters.max_bandwidth_bps = initial_max_bitrate_bps * 2; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); // Insert a frame to update the encoder config. frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); streams = stream->GetVideoStreams(); EXPECT_EQ(initial_max_bitrate_bps * 2, streams[0].max_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, SetMaxSendBitrateCanIncreaseSimulcastSenderBitrate) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); - channel_->SetSend(true); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); + send_channel_->SetSend(true); FakeVideoSendStream* stream = AddSendStream( cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))); // Send a frame to make sure this scales up to >1 stream (simulcast). webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(kSsrcs3[0], nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); std::vector streams = stream->GetVideoStreams(); @@ -5017,26 +5035,26 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_GT(initial_max_bitrate_bps, 0); parameters.max_bandwidth_bps = initial_max_bitrate_bps * 2; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); // Insert a frame to update the encoder config. frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); streams = stream->GetVideoStreams(); int increased_max_bitrate_bps = GetTotalMaxBitrate(streams).bps(); EXPECT_EQ(initial_max_bitrate_bps * 2, increased_max_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(kSsrcs3[0], nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrcs3[0], nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithMaxQuantization) { static const char* kMaxQuantization = "21"; - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs[0].params[kCodecParamMaxQuantization] = kMaxQuantization; - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); EXPECT_EQ(atoi(kMaxQuantization), AddSendStream()->GetVideoStreams().back().max_qp); - absl::optional codec = channel_->GetSendCodec(); + absl::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ(kMaxQuantization, codec->params[kCodecParamMaxQuantization]); } @@ -5044,21 +5062,21 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithMaxQuantization) { TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectBadPayloadTypes) { // TODO(pbos): Should we only allow the dynamic range? static const int kIncorrectPayloads[] = {-2, -1, 128, 129}; - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); for (size_t i = 0; i < arraysize(kIncorrectPayloads); ++i) { parameters.codecs[0].id = kIncorrectPayloads[i]; - EXPECT_FALSE(channel_->SetSendParameters(parameters)) + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)) << "Bad payload type '" << kIncorrectPayloads[i] << "' accepted."; } } TEST_F(WebRtcVideoChannelTest, SetSendCodecsAcceptAllValidPayloadTypes) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); for (int payload_type = 96; payload_type <= 127; ++payload_type) { parameters.codecs[0].id = payload_type; - EXPECT_TRUE(channel_->SetSendParameters(parameters)) + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)) << "Payload type '" << payload_type << "' rejected."; } } @@ -5069,24 +5087,24 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsAcceptAllValidPayloadTypes) { // result of one of the codecs being rejected. TEST_F(WebRtcVideoChannelTest, SetSendCodecsIdenticalFirstCodecDoesntRecreateStream) { - cricket::VideoSendParameters parameters1; + cricket::VideoSenderParameters parameters1; parameters1.codecs.push_back(GetEngineCodec("VP8")); parameters1.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetSendParameters(parameters1)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters1)); AddSendStream(); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); - cricket::VideoSendParameters parameters2; + cricket::VideoSenderParameters parameters2; parameters2.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters2)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters2)); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithOnlyVp8) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); } // Test that we set our inbound RTX codecs properly. @@ -5096,27 +5114,27 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithRtx) { EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType1)); EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType2)); - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); cricket::VideoCodec rtx_codec = cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); parameters.codecs.push_back(rtx_codec); - EXPECT_FALSE(channel_->SetRecvParameters(parameters)) + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)) << "RTX codec without associated payload should be rejected."; parameters.codecs[1].SetParam("apt", kUnusedPayloadType2); - EXPECT_FALSE(channel_->SetRecvParameters(parameters)) + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)) << "RTX codec with invalid associated payload type should be rejected."; parameters.codecs[1].SetParam("apt", GetEngineCodec("VP8").id); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); cricket::VideoCodec rtx_codec2 = cricket::CreateVideoCodec(kUnusedPayloadType2, "rtx"); rtx_codec2.SetParam("apt", rtx_codec.id); parameters.codecs.push_back(rtx_codec2); - EXPECT_FALSE(channel_->SetRecvParameters(parameters)) + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)) << "RTX codec with another RTX as associated payload type should be " "rejected."; } @@ -5125,9 +5143,9 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithPacketization) { cricket::VideoCodec vp8_codec = GetEngineCodec("VP8"); vp8_codec.packetization = kPacketizationParamRaw; - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs = {vp8_codec, GetEngineCodec("VP9")}; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); const cricket::StreamParams params = cricket::StreamParams::CreateLegacy(kSsrcs1[0]); @@ -5141,10 +5159,10 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithPacketization) { } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithPacketizationRecreatesStream) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs = {GetEngineCodec("VP8"), GetEngineCodec("VP9")}; parameters.codecs.back().packetization = kPacketizationParamRaw; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); const cricket::StreamParams params = cricket::StreamParams::CreateLegacy(kSsrcs1[0]); @@ -5153,7 +5171,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithPacketizationRecreatesStream) { EXPECT_EQ(fake_call_->GetNumCreatedReceiveStreams(), 1); parameters.codecs.back().packetization.reset(); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_EQ(fake_call_->GetNumCreatedReceiveStreams(), 2); } @@ -5161,13 +5179,13 @@ TEST_F(WebRtcVideoChannelTest, DuplicateUlpfecCodecIsDropped) { constexpr int kFirstUlpfecPayloadType = 126; constexpr int kSecondUlpfecPayloadType = 127; - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(cricket::CreateVideoCodec( kFirstUlpfecPayloadType, cricket::kUlpfecCodecName)); parameters.codecs.push_back(cricket::CreateVideoCodec( kSecondUlpfecPayloadType, cricket::kUlpfecCodecName)); - ASSERT_TRUE(channel_->SetRecvParameters(parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(parameters)); FakeVideoReceiveStream* recv_stream = AddRecvStream(); EXPECT_EQ(kFirstUlpfecPayloadType, @@ -5178,13 +5196,13 @@ TEST_F(WebRtcVideoChannelTest, DuplicateRedCodecIsDropped) { constexpr int kFirstRedPayloadType = 126; constexpr int kSecondRedPayloadType = 127; - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back( cricket::CreateVideoCodec(kFirstRedPayloadType, cricket::kRedCodecName)); parameters.codecs.push_back( cricket::CreateVideoCodec(kSecondRedPayloadType, cricket::kRedCodecName)); - ASSERT_TRUE(channel_->SetRecvParameters(parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(parameters)); FakeVideoReceiveStream* recv_stream = AddRecvStream(); EXPECT_EQ(kFirstRedPayloadType, @@ -5204,13 +5222,13 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithChangedRtxPayloadType) { AddRecvStream(params); // Original payload type for RTX. - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); cricket::VideoCodec rtx_codec = cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); rtx_codec.SetParam("apt", GetEngineCodec("VP8").id); parameters.codecs.push_back(rtx_codec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size()); const webrtc::VideoReceiveStreamInterface::Config& config_before = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); @@ -5223,7 +5241,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithChangedRtxPayloadType) { // Change payload type for RTX. parameters.codecs[1].id = kUnusedPayloadType2; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size()); const webrtc::VideoReceiveStreamInterface::Config& config_after = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); @@ -5248,13 +5266,13 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { AddRecvStream(params); // Payload type for RTX. - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); cricket::VideoCodec rtx_codec = cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); rtx_codec.SetParam("apt", GetEngineCodec("VP8").id); parameters.codecs.push_back(rtx_codec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size()); const webrtc::VideoReceiveStreamInterface::Config& config = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); @@ -5267,7 +5285,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { // Set rtx-time. parameters.codecs[1].SetParam(kCodecParamRtxTime, kRtxTime); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_EQ(fake_call_->GetVideoReceiveStreams()[0] ->GetConfig() .rtp.nack.rtp_history_ms, @@ -5275,7 +5293,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { // Negative values are ignored so the default value applies. parameters.codecs[1].SetParam(kCodecParamRtxTime, -1); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_NE(fake_call_->GetVideoReceiveStreams()[0] ->GetConfig() .rtp.nack.rtp_history_ms, @@ -5287,7 +5305,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { // 0 is ignored so the default applies. parameters.codecs[1].SetParam(kCodecParamRtxTime, 0); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_NE(fake_call_->GetVideoReceiveStreams()[0] ->GetConfig() .rtp.nack.rtp_history_ms, @@ -5299,7 +5317,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { // Values larger than the default are clamped to the default. parameters.codecs[1].SetParam(kCodecParamRtxTime, default_history_ms + 100); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_EQ(fake_call_->GetVideoReceiveStreams()[0] ->GetConfig() .rtp.nack.rtp_history_ms, @@ -5307,16 +5325,16 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsDifferentPayloadType) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs[0].id = 99; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptDefaultCodecs) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs = engine_.recv_codecs(); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); FakeVideoReceiveStream* stream = AddRecvStream(); const webrtc::VideoReceiveStreamInterface::Config& config = @@ -5327,34 +5345,34 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptDefaultCodecs) { } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectUnsupportedCodec) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(cricket::CreateVideoCodec(101, "WTF3")); - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptsMultipleVideoCodecs) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithoutFecDisablesFec) { - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP8")); send_parameters.codecs.push_back(GetEngineCodec("red")); send_parameters.codecs.push_back(GetEngineCodec("ulpfec")); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters)); FakeVideoReceiveStream* stream = AddRecvStream(); EXPECT_EQ(GetEngineCodec("ulpfec").id, stream->GetConfig().rtp.ulpfec_payload_type); - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); stream = fake_call_->GetVideoReceiveStreams()[0]; ASSERT_TRUE(stream != nullptr); EXPECT_EQ(-1, stream->GetConfig().rtp.ulpfec_payload_type) @@ -5374,9 +5392,9 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvParamsWithoutFecDisablesFec) { ASSERT_EQ(1U, stream->GetConfig().protected_media_ssrcs.size()); EXPECT_EQ(kSsrcs1[0], stream->GetConfig().protected_media_ssrcs[0]); - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); EXPECT_TRUE(streams.empty()) << "SetSendCodec without FlexFEC should disable current FlexFEC."; } @@ -5386,22 +5404,22 @@ TEST_F(WebRtcVideoChannelTest, SetSendParamsWithFecEnablesFec) { EXPECT_EQ(GetEngineCodec("ulpfec").id, stream->GetConfig().rtp.ulpfec_payload_type); - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("red")); recv_parameters.codecs.push_back(GetEngineCodec("ulpfec")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); stream = fake_call_->GetVideoReceiveStreams()[0]; ASSERT_TRUE(stream != nullptr); EXPECT_EQ(GetEngineCodec("ulpfec").id, stream->GetConfig().rtp.ulpfec_payload_type) << "ULPFEC should be enabled on the receive stream."; - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP8")); send_parameters.codecs.push_back(GetEngineCodec("red")); send_parameters.codecs.push_back(GetEngineCodec("ulpfec")); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters)); stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(GetEngineCodec("ulpfec").id, stream->GetConfig().rtp.ulpfec_payload_type) @@ -5415,10 +5433,10 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, const std::vector& streams = fake_call_->GetFlexfecReceiveStreams(); - cricket::VideoRecvParameters recv_parameters; + cricket::VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(recv_parameters)); ASSERT_EQ(1U, streams.size()); const FakeFlexfecReceiveStream* stream_with_recv_params = streams.front(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, @@ -5429,10 +5447,10 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, EXPECT_EQ(kSsrcs1[0], stream_with_recv_params->GetConfig().protected_media_ssrcs[0]); - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP8")); send_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters)); ASSERT_EQ(1U, streams.size()); const FakeFlexfecReceiveStream* stream_with_send_params = streams.front(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, @@ -5445,55 +5463,55 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateFecPayloads) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("red")); parameters.codecs[1].id = parameters.codecs[0].id; - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); } TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsRejectDuplicateFecPayloads) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); parameters.codecs[1].id = parameters.codecs[0].id; - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateCodecPayloads) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs[1].id = parameters.codecs[0].id; - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptSameCodecOnMultiplePayloadTypes) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs[1].id += 1; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); } // Test that setting the same codecs but with a different order // doesn't result in the stream being recreated. TEST_F(WebRtcVideoChannelTest, SetRecvCodecsDifferentOrderDoesntRecreateStream) { - cricket::VideoRecvParameters parameters1; + cricket::VideoReceiverParameters parameters1; parameters1.codecs.push_back(GetEngineCodec("VP8")); parameters1.codecs.push_back(GetEngineCodec("red")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters1)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters1)); AddRecvStream(cricket::StreamParams::CreateLegacy(123)); EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams()); - cricket::VideoRecvParameters parameters2; + cricket::VideoReceiverParameters parameters2; parameters2.codecs.push_back(GetEngineCodec("red")); parameters2.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters2)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters2)); EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams()); } @@ -5510,19 +5528,19 @@ TEST_F(WebRtcVideoChannelTest, SetSend) { EXPECT_FALSE(stream->IsSending()); // false->true - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); EXPECT_TRUE(stream->IsSending()); // true->true - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); EXPECT_TRUE(stream->IsSending()); // true->false - EXPECT_TRUE(channel_->SetSend(false)); + EXPECT_TRUE(send_channel_->SetSend(false)); EXPECT_FALSE(stream->IsSending()); // false->false - EXPECT_TRUE(channel_->SetSend(false)); + EXPECT_TRUE(send_channel_->SetSend(false)); EXPECT_FALSE(stream->IsSending()); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); FakeVideoSendStream* new_stream = AddSendStream(); EXPECT_TRUE(new_stream->IsSending()) << "Send stream created after SetSend(true) not sending initially."; @@ -5533,28 +5551,25 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { std::unique_ptr network_interface( new cricket::FakeNetworkInterface); MediaConfig config; - std::unique_ptr channel; - cricket::VideoMediaSendChannelInterface* send_channel; + std::unique_ptr send_channel; webrtc::RtpParameters parameters; - channel.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), config, VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - send_channel = channel->AsVideoSendChannel(); + send_channel = engine_.CreateSendChannel( + call_.get(), config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); - channel->SetInterface(network_interface.get()); + send_channel->SetInterface(network_interface.get()); // Default value when DSCP is disabled should be DSCP_DEFAULT. EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); - channel->SetInterface(nullptr); + send_channel->SetInterface(nullptr); // Default value when DSCP is enabled is also DSCP_DEFAULT, until it is set // through rtp parameters. config.enable_dscp = true; - channel.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), config, VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - send_channel = channel->AsVideoSendChannel(); - channel->SetInterface(network_interface.get()); + send_channel = engine_.CreateSendChannel( + call_.get(), config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + send_channel->SetInterface(network_interface.get()); EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); // Create a send stream to configure @@ -5574,20 +5589,19 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { // Packets should also self-identify their dscp in PacketOptions. const uint8_t kData[10] = {0}; - EXPECT_TRUE( - ChannelImplAsTransport(channel.get())->SendRtcp(kData, sizeof(kData))); + EXPECT_TRUE(ChannelImplAsTransport(send_channel.get())->SendRtcp(kData)); EXPECT_EQ(rtc::DSCP_CS1, network_interface->options().dscp); - channel->SetInterface(nullptr); + send_channel->SetInterface(nullptr); // Verify that setting the option to false resets the // DiffServCodePoint. config.enable_dscp = false; - channel.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_.get(), config, VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get())); - channel->SetInterface(network_interface.get()); + send_channel = engine_.CreateSendChannel( + call_.get(), config, VideoOptions(), webrtc::CryptoOptions(), + video_bitrate_allocator_factory_.get()); + send_channel->SetInterface(network_interface.get()); EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); - channel->SetInterface(nullptr); + send_channel->SetInterface(nullptr); } // This test verifies that the RTCP reduced size mode is properly applied to @@ -5602,7 +5616,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { // Now enable reduced size mode. send_parameters_.rtcp.reduced_size = true; - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); stream1 = fake_call_->GetVideoSendStreams()[0]; EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -5624,7 +5638,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetRecvRtcpReducedSize) { // TODO(deadbeef): Once "recv_parameters" becomes "receiver_parameters", // the reduced_size flag should come from that. send_parameters_.rtcp.reduced_size = true; - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); stream1 = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); @@ -5653,16 +5667,16 @@ TEST_F(WebRtcVideoChannelTest, OnReadyToSendSignalsNetworkState) { } TEST_F(WebRtcVideoChannelTest, GetStatsReportsSentCodecName) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); AddSendStream(); cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ("VP8", send_info.senders[0].codec_name); } @@ -5675,8 +5689,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsEncoderImplementationName) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(stats.encoder_implementation_name, send_info.senders[0].encoder_implementation_name); @@ -5690,8 +5704,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsPowerEfficientEncoder) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_TRUE(send_info.senders[0].power_efficient_encoder); } @@ -5705,8 +5719,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuOveruseMetrics) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(stats.avg_encode_time_ms, send_info.senders[0].avg_encode_ms); EXPECT_EQ(stats.encode_usage_percent, @@ -5721,8 +5735,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsFramesEncoded) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(stats.frames_encoded, send_info.senders[0].frames_encoded); } @@ -5736,8 +5750,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsKeyFramesEncoded) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(send_info.senders.size(), 2u); EXPECT_EQ(10u, send_info.senders[0].key_frames_encoded); @@ -5754,8 +5768,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsPerLayerQpSum) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(send_info.senders.size(), 2u); EXPECT_EQ(stats.substreams[123].qp_sum, send_info.senders[0].qp_sum); @@ -5806,8 +5820,8 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportWithoutSubStreams) { stream->SetStats(stats); cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(send_info.aggregated_senders.size(), 1u); auto& sender = send_info.aggregated_senders[0]; @@ -5919,8 +5933,8 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(send_info.aggregated_senders.size(), 1u); auto& sender = send_info.aggregated_senders[0]; @@ -6043,8 +6057,8 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(send_info.senders.size(), 2u); auto& sender = send_info.senders[0]; @@ -6149,8 +6163,8 @@ TEST_F(WebRtcVideoChannelTest, // GetStats() and ensure `active` matches `encodings` for each SSRC. cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(send_info.senders.size(), 2u); ASSERT_TRUE(send_info.senders[0].active.has_value()); @@ -6160,9 +6174,9 @@ TEST_F(WebRtcVideoChannelTest, } TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP9")); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters)); constexpr uint32_t kSsrc1 = 123u; constexpr uint32_t kSsrc2 = 456u; @@ -6195,8 +6209,8 @@ TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { // GetStats() and ensure `active` is true if ANY encoding is active. cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(send_info.senders.size(), 1u); // Middle layer is active. @@ -6209,8 +6223,8 @@ TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { parameters.encodings[1].active = false; parameters.encodings[2].active = false; send_channel_->SetRtpSendParameters(kSsrc1, parameters); - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(send_info.senders.size(), 1u); // No layer is active. @@ -6236,8 +6250,8 @@ TEST_F(WebRtcVideoChannelTest, MediaSubstreamMissingProducesEmpyStats) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_TRUE(send_info.senders.empty()); } @@ -6255,8 +6269,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsUpperResolution) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1u, send_info.aggregated_senders.size()); ASSERT_EQ(3u, send_info.senders.size()); @@ -6279,8 +6293,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuAdaptationStats) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1U, send_info.senders.size()); EXPECT_EQ(WebRtcVideoChannel::ADAPTREASON_CPU, @@ -6299,8 +6313,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsAdaptationAndBandwidthStats) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1U, send_info.senders.size()); EXPECT_EQ(WebRtcVideoChannel::ADAPTREASON_CPU | @@ -6474,8 +6488,8 @@ TEST_F(WebRtcVideoChannelTest, cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(send_info.senders.size(), 2u); EXPECT_EQ(15u, send_info.senders[0].header_and_padding_bytes_sent); @@ -6500,8 +6514,8 @@ TEST_F(WebRtcVideoChannelTest, cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1U, send_info.senders.size()); EXPECT_EQ(WebRtcVideoChannel::ADAPTREASON_BANDWIDTH, @@ -6523,8 +6537,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesSendRtcpPacketTypesCorrectly) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(2, send_info.senders[0].firs_received); EXPECT_EQ(3u, send_info.senders[0].nacks_received); @@ -6550,8 +6564,8 @@ TEST_F(WebRtcVideoChannelTest, cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ( stats.rtcp_packet_type_counts.fir_packets, @@ -6597,8 +6611,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(stats.decoder_implementation_name, receive_info.receivers[0].decoder_implementation_name); @@ -6651,8 +6665,8 @@ TEST_F(WebRtcVideoChannelTest, cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(stats.total_inter_frame_delay, receive_info.receivers[0].total_inter_frame_delay); @@ -6672,8 +6686,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesReceivePacketStatsCorrectly) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_EQ(stats.rtp_stats.packet_counter.payload_bytes, rtc::checked_cast( @@ -6694,8 +6708,8 @@ TEST_F(WebRtcVideoChannelTest, TranslatesCallStatsCorrectly) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(2u, send_info.senders.size()); EXPECT_EQ(stats.rtt_ms, send_info.senders[0].rtt_ms); @@ -6725,13 +6739,13 @@ TEST_F(WebRtcVideoChannelTest, TranslatesSenderBitrateStatsCorrectly) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(2u, send_info.aggregated_senders.size()); ASSERT_EQ(4u, send_info.senders.size()); BandwidthEstimationInfo bwe_info; - channel_->FillBitrateInfo(&bwe_info); + send_channel_->FillBitrateInfo(&bwe_info); // Assuming stream and stream2 corresponds to senders[0] and [1] respectively // is OK as std::maps are sorted and AddSendStream() gives increasing SSRCs. EXPECT_EQ(stats.media_bitrate_bps, @@ -6749,7 +6763,7 @@ TEST_F(WebRtcVideoChannelTest, TranslatesSenderBitrateStatsCorrectly) { } TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) { - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs1); const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); @@ -6781,7 +6795,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) { } TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithMissingSsrcsForRtx) { - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs1); const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); @@ -6795,7 +6809,7 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithMissingSsrcsForRtx) { } TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) { - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs1); const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); @@ -6823,7 +6837,7 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingSimulcastSsrcs) { static const uint32_t kFirstStreamSsrcs[] = {1, 2, 3}; static const uint32_t kOverlappingStreamSsrcs[] = {4, 3, 5}; - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); StreamParams sp = cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kFirstStreamSsrcs)); @@ -6846,7 +6860,7 @@ TEST_F(WebRtcVideoChannelTest, } TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); static const uint32_t kSenderSsrcs[] = {4, 7, 10}; static const uint32_t kSenderRtxSsrcs[] = {5, 8, 11}; @@ -6865,8 +6879,8 @@ TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); ASSERT_EQ(1u, send_info.senders.size()); ASSERT_EQ(1u, receive_info.receivers.size()); @@ -6884,24 +6898,24 @@ TEST_F(WebRtcVideoChannelTest, MapsReceivedPayloadTypeToCodecName) { stream->SetStats(stats); cricket::VideoMediaSendInfo send_info; cricket::VideoMediaReceiveInfo receive_info; - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_STREQ("", receive_info.receivers[0].codec_name.c_str()); // Report VP8 if we're receiving it. stats.current_payload_type = GetEngineCodec("VP8").id; stream->SetStats(stats); - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_STREQ(kVp8CodecName, receive_info.receivers[0].codec_name.c_str()); // Report no codec name for unknown playload types. stats.current_payload_type = 3; stream->SetStats(stats); - EXPECT_TRUE(channel_->GetSendStats(&send_info)); - EXPECT_TRUE(channel_->GetReceiveStats(&receive_info)); + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); EXPECT_STREQ("", receive_info.receivers[0].codec_name.c_str()); } @@ -7323,7 +7337,7 @@ void WebRtcVideoChannelTest::TestReceiveUnsignaledSsrcPacket( VideoCodec red_rtx_codec = cricket::CreateVideoRtxCodec( kRedRtxPayloadType, GetEngineCodec("red").id); recv_parameters_.codecs.push_back(red_rtx_codec); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); ASSERT_EQ(0u, fake_call_->GetVideoReceiveStreams().size()); RtpPacketReceived packet; @@ -7459,7 +7473,7 @@ TEST_F(WebRtcVideoChannelTest, // Any different unsignalled SSRC received will replace the default. TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { // Allow receiving VP8, VP9, H264 (if enabled). - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); @@ -7468,11 +7482,11 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { parameters.codecs.push_back(H264codec); #endif - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); // No receive streams yet. ASSERT_EQ(0u, fake_call_->GetVideoReceiveStreams().size()); cricket::FakeVideoRenderer renderer; - channel_->SetDefaultSink(&renderer); + receive_channel_->SetDefaultSink(&renderer); // Receive VP8 packet on first SSRC. RtpPacketReceived rtp_packet; @@ -7540,9 +7554,9 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { // been the default receive stream before being properly signaled. TEST_F(WebRtcVideoChannelTest, NewUnsignaledStreamDoesNotDestroyPreviouslyUnsignaledStream) { - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetRecvParameters(parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(parameters)); // No streams signaled and no packets received, so we should not have any // stream objects created yet. @@ -7581,8 +7595,9 @@ TEST_F(WebRtcVideoChannelTest, CanSetMaxBitrateForExistingStream) { AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel_->SetSend(true)); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); int default_encoder_bitrate = GetMaxEncoderBitrate(); @@ -7601,7 +7616,7 @@ TEST_F(WebRtcVideoChannelTest, CanSetMaxBitrateForExistingStream) { SetAndExpectMaxBitrate(0, 800, 800); SetAndExpectMaxBitrate(0, 0, default_encoder_bitrate); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, CannotSetMaxBitrateForNonexistentStream) { @@ -7673,7 +7688,7 @@ TEST_F(WebRtcVideoChannelTest, send_parameters_.max_bandwidth_bps = 99999; FakeVideoSendStream* stream = AddSendStream(); ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); ASSERT_EQ(1UL, stream->GetVideoStreams().size()); EXPECT_EQ(webrtc::kDefaultMinVideoBitrateBps, stream->GetVideoStreams()[0].min_bitrate_bps); @@ -7861,8 +7876,9 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) { // reconfiguring, and allows us to test this behavior. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, rtc::kNumMicrosecsPerSec / 30)); @@ -7907,22 +7923,23 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) { video_send_stream->GetVideoStreams()[1].bitrate_priority); EXPECT_EQ(absl::nullopt, video_send_stream->GetVideoStreams()[2].bitrate_priority); - EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersScaleResolutionDownByVP8) { - VideoSendParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(cricket::CreateVideoCodec(kVp8CodecName)); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); // Try layers in natural order (smallest to largest). { @@ -8016,7 +8033,7 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(180u, video_streams[2].height); } - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -8029,14 +8046,14 @@ TEST_F(WebRtcVideoChannelTest, field_trials_, "WebRTC-NormalizeSimulcastResolution/Enabled-3/"); // Set up WebRtcVideoChannel for 3-layer VP8 simulcast. - VideoSendParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(cricket::CreateVideoCodec(kVp8CodecName)); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, - &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, + &frame_forwarder)); + send_channel_->SetSend(true); // Set `scale_resolution_down_by`'s. auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -8066,23 +8083,24 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(video_streams[2].height, 300u); // Tear down. - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersScaleResolutionDownByH264) { encoder_factory_->AddSupportedVideoCodecType(kH264CodecName); - VideoSendParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(cricket::CreateVideoCodec(kH264CodecName)); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); // Try layers in natural order (smallest to largest). { @@ -8175,7 +8193,7 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(320u, video_streams[2].width); EXPECT_EQ(180u, video_streams[2].height); } - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -8189,14 +8207,14 @@ TEST_F(WebRtcVideoChannelTest, // Set up WebRtcVideoChannel for 3-layer H264 simulcast. encoder_factory_->AddSupportedVideoCodecType(kH264CodecName); - VideoSendParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(cricket::CreateVideoCodec(kH264CodecName)); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, - &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, + &frame_forwarder)); + send_channel_->SetSend(true); // Set `scale_resolution_down_by`'s. auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -8226,7 +8244,7 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(video_streams[2].height, 300u); // Tear down. - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { @@ -8302,8 +8320,9 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. @@ -8337,7 +8356,7 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -8348,8 +8367,9 @@ TEST_F(WebRtcVideoChannelTest, // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Change rtp encoding parameters. @@ -8376,7 +8396,7 @@ TEST_F(WebRtcVideoChannelTest, stream->GetVideoStreams()[1].num_temporal_layers); EXPECT_EQ(1UL, stream->GetVideoStreams()[2].num_temporal_layers); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -8387,8 +8407,9 @@ TEST_F(WebRtcVideoChannelTest, // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. @@ -8418,7 +8439,7 @@ TEST_F(WebRtcVideoChannelTest, stream->GetVideoStreams()[1].max_framerate); EXPECT_EQ(20, stream->GetVideoStreams()[2].max_framerate); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, FallbackForUnsetOrUnsupportedScalabilityMode) { @@ -8434,8 +8455,9 @@ TEST_F(WebRtcVideoChannelTest, FallbackForUnsetOrUnsupportedScalabilityMode) { // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set scalability mode. @@ -8487,7 +8509,7 @@ TEST_F(WebRtcVideoChannelTest, FallbackForUnsetOrUnsupportedScalabilityMode) { EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -8500,17 +8522,18 @@ TEST_F(WebRtcVideoChannelTest, encoder_factory_->AddSupportedVideoCodec(webrtc::SdpVideoFormat( "VP9", webrtc::SdpVideoFormat::Parameters(), {ScalabilityMode::kL3T3})); - cricket::VideoSendParameters send_parameters; + cricket::VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetSendParameters(send_parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set scalability mode. @@ -8553,9 +8576,9 @@ TEST_F(WebRtcVideoChannelTest, kDefaultScalabilityModeStr))); // Change codec to VP8. - cricket::VideoSendParameters vp8_parameters; + cricket::VideoSenderParameters vp8_parameters; vp8_parameters.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(vp8_parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(vp8_parameters)); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // The stream should be recreated due to codec change. @@ -8582,7 +8605,7 @@ TEST_F(WebRtcVideoChannelTest, Field(&webrtc::RtpEncodingParameters::scalability_mode, kDefaultScalabilityModeStr))); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) { @@ -8642,8 +8665,9 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. @@ -8692,7 +8716,7 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } // Test to only specify the min or max bitrate value for a layer via @@ -8705,8 +8729,9 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. @@ -8761,7 +8786,7 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { EXPECT_EQ(kDefault[2].max_bitrate_bps, stream->GetVideoStreams()[2].max_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } // Test that specifying the min (or max) bitrate value for a layer via @@ -8774,8 +8799,9 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. @@ -8812,7 +8838,7 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { EXPECT_EQ(kDefault[2].max_bitrate_bps, stream->GetVideoStreams()[2].max_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { @@ -8822,8 +8848,9 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for all but the highest layer. @@ -8838,7 +8865,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { send_parameters_.max_bandwidth_bps = GetTotalMaxBitrate(stream->GetVideoStreams()).bps(); ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // No bitrate above the total max to give to the highest layer. EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size()); @@ -8849,7 +8876,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { send_parameters_.max_bandwidth_bps = GetTotalMaxBitrate(stream->GetVideoStreams()).bps() + 1; ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // The highest layer has no max bitrate set -> the bitrate above the total // max should be given to the highest layer. @@ -8859,7 +8886,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { EXPECT_EQ(kDefault[2].max_bitrate_bps + 1, stream->GetVideoStreams()[2].max_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -8871,8 +8898,9 @@ TEST_F(WebRtcVideoChannelTest, // Send a full size frame so all simulcast layers are used when reconfiguring. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for the highest layer. @@ -8886,7 +8914,7 @@ TEST_F(WebRtcVideoChannelTest, send_parameters_.max_bandwidth_bps = GetTotalMaxBitrate(stream->GetVideoStreams()).bps() + 1; ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); - ASSERT_TRUE(channel_->SetSendParameters(send_parameters_)); + ASSERT_TRUE(send_channel_->SetSendParameters(send_parameters_)); // The highest layer has the max bitrate set -> the bitrate above the total // max should not be given to the highest layer. @@ -8894,14 +8922,14 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(*parameters.encodings[2].max_bitrate_bps, stream->GetVideoStreams()[2].max_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } // Test that min and max bitrate values set via RtpParameters are correctly // propagated to the underlying encoder for a single stream. TEST_F(WebRtcVideoChannelTest, MinAndMaxBitratePropagatedToEncoder) { FakeVideoSendStream* stream = AddSendStream(); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); EXPECT_TRUE(stream->IsSending()); // Set min and max bitrate. @@ -8932,7 +8960,7 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxBitratePropagatedToEncoder) { // RtpParameters). TEST_F(WebRtcVideoChannelTest, DefaultMinAndMaxBitratePropagatedToEncoder) { FakeVideoSendStream* stream = AddSendStream(); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); EXPECT_TRUE(stream->IsSending()); // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. @@ -8957,7 +8985,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultMinAndMaxBitratePropagatedToEncoder) { // through SetRtpSendParameters. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersOneEncodingActive) { FakeVideoSendStream* stream = AddSendStream(); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); EXPECT_TRUE(stream->IsSending()); // Get current parameters and change "active" to false. @@ -8991,8 +9019,9 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { // appropriately. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, rtc::kNumMicrosecsPerSec / 30)); @@ -9048,7 +9077,7 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { EXPECT_FALSE(simulcast_streams[1].active); EXPECT_FALSE(simulcast_streams[2].active); - EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); } // Tests that when some streams are disactivated then the lowest @@ -9067,8 +9096,9 @@ TEST_F(WebRtcVideoChannelTest, // appropriately. webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; - EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); - channel_->SetSend(true); + EXPECT_TRUE( + send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, rtc::kNumMicrosecsPerSec / 30)); @@ -9102,7 +9132,7 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(simulcast_streams[2].min_bitrate_bps, simulcast_streams[0].min_bitrate_bps); - EXPECT_TRUE(channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); } // Test that if a stream is reconfigured (due to a codec change or other @@ -9110,13 +9140,13 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, InactiveStreamDoesntStartSendingWhenReconfigured) { // Set an initial codec list, which will be modified later. - cricket::VideoSendParameters parameters1; + cricket::VideoSenderParameters parameters1; parameters1.codecs.push_back(GetEngineCodec("VP8")); parameters1.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetSendParameters(parameters1)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters1)); FakeVideoSendStream* stream = AddSendStream(); - EXPECT_TRUE(channel_->SetSend(true)); + EXPECT_TRUE(send_channel_->SetSend(true)); EXPECT_TRUE(stream->IsSending()); // Get current parameters and change "active" to false. @@ -9131,10 +9161,10 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_FALSE(stream->IsSending()); // Reorder the codec list, causing the stream to be reconfigured. - cricket::VideoSendParameters parameters2; + cricket::VideoSenderParameters parameters2; parameters2.codecs.push_back(GetEngineCodec("VP9")); parameters2.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(channel_->SetSendParameters(parameters2)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters2)); auto new_streams = GetFakeSendStreams(); // Assert that a new underlying stream was created due to the codec change. // Otherwise, this test isn't testing what it set out to test. @@ -9149,10 +9179,10 @@ TEST_F(WebRtcVideoChannelTest, // Test that GetRtpSendParameters returns the currently configured codecs. TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersCodecs) { AddSendStream(); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); webrtc::RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -9203,7 +9233,8 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersDegradationPreference) { AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); webrtc::RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -9220,16 +9251,16 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersDegradationPreference) { webrtc::DegradationPreference::MAINTAIN_FRAMERATE); // Remove the source since it will be destroyed before the channel - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } // Test that if we set/get parameters multiple times, we get the same results. TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) { AddSendStream(); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); webrtc::RtpParameters initial_params = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -9245,13 +9276,13 @@ TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) { // Test that GetRtpReceiveParameters returns the currently configured codecs. TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersCodecs) { AddRecvStream(); - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpReceiveParameters(last_ssrc_); + receive_channel_->GetRtpReceiveParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(), rtp_parameters.codecs[0]); @@ -9264,20 +9295,20 @@ TEST_F(WebRtcVideoChannelTest, GetRtpReceiveFmtpSprop) { #else TEST_F(WebRtcVideoChannelTest, DISABLED_GetRtpReceiveFmtpSprop) { #endif - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; cricket::VideoCodec kH264sprop1 = cricket::CreateVideoCodec(101, "H264"); kH264sprop1.SetParam(kH264FmtpSpropParameterSets, "uvw"); parameters.codecs.push_back(kH264sprop1); cricket::VideoCodec kH264sprop2 = cricket::CreateVideoCodec(102, "H264"); kH264sprop2.SetParam(kH264FmtpSpropParameterSets, "xyz"); parameters.codecs.push_back(kH264sprop2); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); FakeVideoReceiveStream* recv_stream = AddRecvStream(); const webrtc::VideoReceiveStreamInterface::Config& cfg = recv_stream->GetConfig(); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpReceiveParameters(last_ssrc_); + receive_channel_->GetRtpReceiveParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(kH264sprop1.ToCodecParameters(), rtp_parameters.codecs[0]); ASSERT_EQ(2u, cfg.decoders.size()); @@ -9302,7 +9333,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersSsrc) { AddRecvStream(); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpReceiveParameters(last_ssrc_); + receive_channel_->GetRtpReceiveParameters(last_ssrc_); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc); } @@ -9310,17 +9341,18 @@ TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersSsrc) { // Test that if we set/get parameters multiple times, we get the same results. TEST_F(WebRtcVideoChannelTest, SetAndGetRtpReceiveParameters) { AddRecvStream(); - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); webrtc::RtpParameters initial_params = - channel_->GetRtpReceiveParameters(last_ssrc_); + receive_channel_->GetRtpReceiveParameters(last_ssrc_); // ... And this shouldn't change the params returned by // GetRtpReceiveParameters. - EXPECT_EQ(initial_params, channel_->GetRtpReceiveParameters(last_ssrc_)); + EXPECT_EQ(initial_params, + receive_channel_->GetRtpReceiveParameters(last_ssrc_)); } // Test that GetDefaultRtpReceiveParameters returns parameters correctly when @@ -9331,23 +9363,23 @@ TEST_F(WebRtcVideoChannelTest, GetDefaultRtpReceiveParametersWithUnsignaledSsrc) { // Call necessary methods to configure receiving a default stream as // soon as it arrives. - cricket::VideoRecvParameters parameters; + cricket::VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); // Call GetRtpReceiveParameters before configured to receive an unsignaled // stream. Should return nothing. EXPECT_EQ(webrtc::RtpParameters(), - channel_->GetDefaultRtpReceiveParameters()); + receive_channel_->GetDefaultRtpReceiveParameters()); // Set a sink for an unsignaled stream. cricket::FakeVideoRenderer renderer; - channel_->SetDefaultSink(&renderer); + receive_channel_->SetDefaultSink(&renderer); // Call GetDefaultRtpReceiveParameters before the SSRC is known. webrtc::RtpParameters rtp_parameters = - channel_->GetDefaultRtpReceiveParameters(); + receive_channel_->GetDefaultRtpReceiveParameters(); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_FALSE(rtp_parameters.encodings[0].ssrc); @@ -9358,7 +9390,7 @@ TEST_F(WebRtcVideoChannelTest, ReceivePacketAndAdvanceTime(rtp_packet); // The `ssrc` member should still be unset. - rtp_parameters = channel_->GetDefaultRtpReceiveParameters(); + rtp_parameters = receive_channel_->GetDefaultRtpReceiveParameters(); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_FALSE(rtp_parameters.encodings[0].ssrc); } @@ -9383,7 +9415,7 @@ TEST_F(WebRtcVideoChannelTest, void WebRtcVideoChannelTest::TestReceiverLocalSsrcConfiguration( bool receiver_first) { - EXPECT_TRUE(channel_->SetSendParameters(send_parameters_)); + EXPECT_TRUE(send_channel_->SetSendParameters(send_parameters_)); const uint32_t kSenderSsrc = 0xC0FFEE; const uint32_t kSecondSenderSsrc = 0xBADCAFE; @@ -9448,13 +9480,14 @@ TEST_F(WebRtcVideoChannelTest, webrtc::test::FrameForwarder frame_forwarder; VideoOptions options; options.is_screencast = true; - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); // Fetch the latest stream since SetVideoSend() may recreate it if the // screen content setting is changed. FakeVideoSendStream* stream = fake_call_->GetVideoSendStreams().front(); EXPECT_FALSE(stream->GetEncoderConfig().is_quality_scaling_allowed); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, @@ -9492,12 +9525,11 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { void SetUp() override { encoder_factory_->AddSupportedVideoCodecType("VP8"); decoder_factory_->AddSupportedVideoCodecType("VP8"); - channel_.reset(engine_.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &fake_call_, GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions(), - mock_rate_allocator_factory_.get())); - send_channel_ = channel_->AsVideoSendChannel(); - receive_channel_ = channel_->AsVideoReceiveChannel(); + send_channel_ = engine_.CreateSendChannel( + &fake_call_, GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), + mock_rate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + &fake_call_, GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions()); send_channel_->OnReadyToSend(true); receive_channel_->SetReceive(true); last_ssrc_ = 123; @@ -9511,10 +9543,10 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { size_t expected_num_streams, bool screenshare, bool conference_mode) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(codec); parameters.conference_mode = conference_mode; - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); RTC_DCHECK(num_configured_streams <= ssrcs.size()); @@ -9531,11 +9563,11 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { if (screenshare) options.is_screencast = screenshare; EXPECT_TRUE( - channel_->SetVideoSend(ssrcs.front(), &options, &frame_forwarder)); + send_channel_->SetVideoSend(ssrcs.front(), &options, &frame_forwarder)); // Fetch the latest stream since SetVideoSend() may recreate it if the // screen content setting is changed. FakeVideoSendStream* stream = fake_call_.GetVideoSendStreams().front(); - channel_->SetSend(true); + send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto rtp_parameters = send_channel_->GetRtpSendParameters(kSsrcs3[0]); @@ -9605,7 +9637,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { } } - EXPECT_TRUE(channel_->SetVideoSend(ssrcs.front(), nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs.front(), nullptr, nullptr)); } FakeVideoSendStream* AddSendStream() { @@ -9646,9 +9678,8 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { std::unique_ptr mock_rate_allocator_factory_; WebRtcVideoEngine engine_; - std::unique_ptr channel_; - VideoMediaSendChannelInterface* send_channel_; - VideoMediaReceiveChannelInterface* receive_channel_; + std::unique_ptr send_channel_; + std::unique_ptr receive_channel_; uint32_t last_ssrc_; }; @@ -9684,9 +9715,9 @@ TEST_F(WebRtcVideoChannelSimulcastTest, SimulcastScreenshareWithoutConference) { } TEST_F(WebRtcVideoChannelBaseTest, GetSources) { - EXPECT_THAT(channel_->GetSources(kSsrc), IsEmpty()); + EXPECT_THAT(receive_channel_->GetSources(kSsrc), IsEmpty()); - channel_->SetDefaultSink(&renderer_); + receive_channel_->SetDefaultSink(&renderer_); EXPECT_TRUE(SetDefaultCodec()); EXPECT_TRUE(SetSend(true)); EXPECT_EQ(renderer_.num_rendered_frames(), 0); @@ -9695,11 +9726,11 @@ TEST_F(WebRtcVideoChannelBaseTest, GetSources) { SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); - EXPECT_THAT(channel_->GetSources(kSsrc - 1), IsEmpty()); - EXPECT_THAT(channel_->GetSources(kSsrc), SizeIs(1)); - EXPECT_THAT(channel_->GetSources(kSsrc + 1), IsEmpty()); + EXPECT_THAT(receive_channel_->GetSources(kSsrc - 1), IsEmpty()); + EXPECT_THAT(receive_channel_->GetSources(kSsrc), SizeIs(1)); + EXPECT_THAT(receive_channel_->GetSources(kSsrc + 1), IsEmpty()); - webrtc::RtpSource source = channel_->GetSources(kSsrc)[0]; + webrtc::RtpSource source = receive_channel_->GetSources(kSsrc)[0]; EXPECT_EQ(source.source_id(), kSsrc); EXPECT_EQ(source.source_type(), webrtc::RtpSourceType::SSRC); int64_t rtp_timestamp_1 = source.rtp_timestamp(); @@ -9709,11 +9740,11 @@ TEST_F(WebRtcVideoChannelBaseTest, GetSources) { SendFrame(); EXPECT_FRAME(2, kVideoWidth, kVideoHeight); - EXPECT_THAT(channel_->GetSources(kSsrc - 1), IsEmpty()); - EXPECT_THAT(channel_->GetSources(kSsrc), SizeIs(1)); - EXPECT_THAT(channel_->GetSources(kSsrc + 1), IsEmpty()); + EXPECT_THAT(receive_channel_->GetSources(kSsrc - 1), IsEmpty()); + EXPECT_THAT(receive_channel_->GetSources(kSsrc), SizeIs(1)); + EXPECT_THAT(receive_channel_->GetSources(kSsrc + 1), IsEmpty()); - source = channel_->GetSources(kSsrc)[0]; + source = receive_channel_->GetSources(kSsrc)[0]; EXPECT_EQ(source.source_id(), kSsrc); EXPECT_EQ(source.source_type(), webrtc::RtpSourceType::SSRC); int64_t rtp_timestamp_2 = source.rtp_timestamp(); @@ -9745,13 +9776,13 @@ TEST_F(WebRtcVideoChannelTest, SetsRidsOnSendStream) { TEST_F(WebRtcVideoChannelBaseTest, EncoderSelectorSwitchCodec) { VideoCodec vp9 = GetEngineCodec("VP9"); - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(vp9); - EXPECT_TRUE(channel_->SetSendParameters(parameters)); - channel_->SetSend(true); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters)); + send_channel_->SetSend(true); - absl::optional codec = channel_->GetSendCodec(); + absl::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); @@ -9759,27 +9790,28 @@ TEST_F(WebRtcVideoChannelBaseTest, EncoderSelectorSwitchCodec) { EXPECT_CALL(encoder_selector, OnAvailableBitrate) .WillRepeatedly(Return(webrtc::SdpVideoFormat("VP9"))); - channel_->SetEncoderSelector(kSsrc, &encoder_selector); + send_channel_->SetEncoderSelector(kSsrc, &encoder_selector); time_controller_.AdvanceTime(kFrameDuration); - codec = channel_->GetSendCodec(); + codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP9", codec->name); // Deregister the encoder selector in case it's called during test tear-down. - channel_->SetEncoderSelector(kSsrc, nullptr); + send_channel_->SetEncoderSelector(kSsrc, nullptr); } TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { // TEST requested_resolution < frame size webrtc::RtpParameters rtp_parameters = @@ -9825,19 +9857,20 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { EXPECT_EQ(rtc::checked_cast(720), streams[0].height); } - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); @@ -9882,19 +9915,20 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { EXPECT_EQ(rtc::checked_cast(480), streams[0].height); } - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { - cricket::VideoSendParameters parameters; + cricket::VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - ASSERT_TRUE(channel_->SetSendParameters(parameters)); + ASSERT_TRUE(send_channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); webrtc::test::FrameForwarder frame_forwarder; cricket::FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { webrtc::RtpParameters rtp_parameters = @@ -9962,7 +9996,7 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { })); } - EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } } // namespace cricket diff --git a/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc b/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc index 1ee0c219938f..25fdbdc8119f 100644 --- a/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc +++ b/third_party/libwebrtc/media/engine/webrtc_voice_engine.cc @@ -49,6 +49,7 @@ #include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "media/base/audio_source.h" +#include "media/base/codec.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" #include "media/engine/adm_helpers.h" @@ -485,29 +486,6 @@ WebRtcVoiceEngine::CreateReceiveChannel( this, config, options, crypto_options, call, codec_pair_id); } -VoiceMediaChannel* WebRtcVoiceEngine::CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { - RTC_DCHECK_RUN_ON(call->worker_thread()); - std::unique_ptr send_channel; - std::unique_ptr receive_channel; - if (role == MediaChannel::Role::kSend || role == MediaChannel::Role::kBoth) { - send_channel = - CreateSendChannel(call, config, options, crypto_options, codec_pair_id); - } - if (role == MediaChannel::Role::kReceive || - role == MediaChannel::Role::kBoth) { - receive_channel = CreateReceiveChannel(call, config, options, - crypto_options, codec_pair_id); - } - return new VoiceMediaShimChannel(std::move(send_channel), - std::move(receive_channel)); -} - void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::ApplyOptions: " @@ -1285,7 +1263,7 @@ bool WebRtcVoiceSendChannel::SetOptions(const AudioOptions& options) { } bool WebRtcVoiceSendChannel::SetSendParameters( - const AudioSendParameters& params) { + const AudioSenderParameter& params) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetSendParameters"); RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetSendParameters: " @@ -1330,6 +1308,13 @@ bool WebRtcVoiceSendChannel::SetSendParameters( return SetOptions(params.options); } +absl::optional WebRtcVoiceSendChannel::GetSendCodec() const { + if (send_codec_spec_) { + return CreateAudioCodec(send_codec_spec_->format); + } + return absl::nullopt; +} + // Utility function called from SetSendParameters() to extract current send // codec settings from the given list of codecs (originally from SDP). Both send // and receive streams may be reconfigured based on the new settings. @@ -1537,7 +1522,7 @@ bool WebRtcVoiceSendChannel::AddSendStream(const StreamParams& sp) { ssrc, mid_, sp.cname, sp.id, send_codec_spec_, ExtmapAllowMixed(), send_rtp_extensions_, max_send_bitrate_bps_, audio_config_.rtcp_report_interval_ms, audio_network_adaptor_config, - call_, this, engine()->encoder_factory_, codec_pair_id_, nullptr, + call_, transport(), engine()->encoder_factory_, codec_pair_id_, nullptr, crypto_options_); send_streams_.insert(std::make_pair(ssrc, stream)); if (ssrc_list_changed_callback_) { @@ -1791,18 +1776,6 @@ void WebRtcVoiceSendChannel::SetEncoderToPacketizerFrameTransformer( std::move(frame_transformer)); } -bool WebRtcVoiceSendChannel::SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) { - MediaChannelUtil::SendRtp(data, len, options); - return true; -} - -bool WebRtcVoiceSendChannel::SendRtcp(const uint8_t* data, size_t len) { - MediaChannelUtil::SendRtcp(data, len); - return true; -} - webrtc::RtpParameters WebRtcVoiceSendChannel::GetRtpSendParameters( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(worker_thread_); @@ -2025,7 +1998,7 @@ WebRtcVoiceReceiveChannel::~WebRtcVoiceReceiveChannel() { } bool WebRtcVoiceReceiveChannel::SetRecvParameters( - const AudioRecvParameters& params) { + const AudioReceiverParameters& params) { TRACE_EVENT0("webrtc", "WebRtcVoiceMediaChannel::SetRecvParameters"); RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetRecvParameters: " @@ -2257,8 +2230,9 @@ bool WebRtcVoiceReceiveChannel::AddRecvStream(const StreamParams& sp) { // Create a new channel for receiving audio data. auto config = BuildReceiveStreamConfig( ssrc, receiver_reports_ssrc_, recv_nack_enabled_, enable_non_sender_rtt_, - sp.stream_ids(), recv_rtp_extensions_, this, engine()->decoder_factory_, - decoder_map_, codec_pair_id_, engine()->audio_jitter_buffer_max_packets_, + sp.stream_ids(), recv_rtp_extensions_, transport(), + engine()->decoder_factory_, decoder_map_, codec_pair_id_, + engine()->audio_jitter_buffer_max_packets_, engine()->audio_jitter_buffer_fast_accelerate_, engine()->audio_jitter_buffer_min_delay_ms_, unsignaled_frame_decryptor_, crypto_options_, unsignaled_frame_transformer_); @@ -2684,18 +2658,6 @@ void WebRtcVoiceReceiveChannel::SetDepacketizerToDecoderFrameTransformer( std::move(frame_transformer)); } -bool WebRtcVoiceReceiveChannel::SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) { - MediaChannelUtil::SendRtp(data, len, options); - return true; -} - -bool WebRtcVoiceReceiveChannel::SendRtcp(const uint8_t* data, size_t len) { - MediaChannelUtil::SendRtcp(data, len); - return true; -} - bool WebRtcVoiceReceiveChannel::MaybeDeregisterUnsignaledRecvStream( uint32_t ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); diff --git a/third_party/libwebrtc/media/engine/webrtc_voice_engine.h b/third_party/libwebrtc/media/engine/webrtc_voice_engine.h index 0f7313ea1039..925d086fdeec 100644 --- a/third_party/libwebrtc/media/engine/webrtc_voice_engine.h +++ b/third_party/libwebrtc/media/engine/webrtc_voice_engine.h @@ -53,7 +53,6 @@ #include "media/base/codec.h" #include "media/base/media_channel.h" #include "media/base/media_channel_impl.h" -#include "media/base/media_channel_shim.h" #include "media/base/media_config.h" #include "media/base/media_engine.h" #include "media/base/rtp_utils.h" @@ -121,14 +120,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { const webrtc::CryptoOptions& crypto_options, webrtc::AudioCodecPairId codec_pair_id) override; - VoiceMediaChannel* CreateMediaChannel( - MediaChannel::Role role, - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) override; - const std::vector& send_codecs() const override; const std::vector& recv_codecs() const override; std::vector GetRtpHeaderExtensions() @@ -195,8 +186,7 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { }; class WebRtcVoiceSendChannel final : public MediaChannelUtil, - public VoiceMediaSendChannelInterface, - public webrtc::Transport { + public VoiceMediaSendChannelInterface { public: WebRtcVoiceSendChannel(WebRtcVoiceEngine* engine, const MediaConfig& config, @@ -218,6 +208,8 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, } VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } + absl::optional GetSendCodec() const override; + // Functions imported from MediaChannelUtil void SetInterface(MediaChannelNetworkInterface* iface) override { MediaChannelUtil::SetInterface(iface); @@ -235,7 +227,7 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, const AudioOptions& options() const { return options_; } - bool SetSendParameters(const AudioSendParameters& params) override; + bool SetSendParameters(const AudioSenderParameter& params) override; webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, @@ -277,13 +269,6 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, rtc::scoped_refptr frame_transformer) override; - // implements Transport interface - bool SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) override; - - bool SendRtcp(const uint8_t* data, size_t len) override; - bool SenderNackEnabled() const override { if (!send_codec_spec_) { return false; @@ -361,8 +346,7 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, class WebRtcVoiceReceiveChannel final : public MediaChannelUtil, - public VoiceMediaReceiveChannelInterface, - public webrtc::Transport { + public VoiceMediaReceiveChannelInterface { public: WebRtcVoiceReceiveChannel(WebRtcVoiceEngine* engine, const MediaConfig& config, @@ -393,7 +377,7 @@ class WebRtcVoiceReceiveChannel final void SetInterface(MediaChannelNetworkInterface* iface) override { MediaChannelUtil::SetInterface(iface); } - bool SetRecvParameters(const AudioRecvParameters& params) override; + bool SetRecvParameters(const AudioReceiverParameters& params) override; webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override; webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; @@ -444,13 +428,6 @@ class WebRtcVoiceReceiveChannel final rtc::scoped_refptr frame_transformer) override; - // implements Transport interface - bool SendRtp(const uint8_t* data, - size_t len, - const webrtc::PacketOptions& options) override; - - bool SendRtcp(const uint8_t* data, size_t len) override; - void SetReceiveNackEnabled(bool enabled) override; void SetReceiveNonSenderRttEnabled(bool enabled) override; diff --git a/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc b/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc index bd19636210f4..aa3d4266b4de 100644 --- a/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc +++ b/third_party/libwebrtc/media/engine/webrtc_voice_engine_unittest.cc @@ -27,7 +27,7 @@ #include "media/base/fake_media_engine.h" #include "media/base/fake_network_interface.h" #include "media/base/fake_rtp.h" -#include "media/base/media_channel_shim.h" +#include "media/base/media_channel.h" #include "media/base/media_constants.h" #include "media/engine/fake_webrtc_call.h" #include "modules/audio_device/include/mock_audio_device.h" @@ -233,13 +233,26 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { } bool SetupChannel() { - channel_ = engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, cricket::MediaConfig(), - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create()); - send_channel_ = channel_->AsVoiceSendChannel(); - receive_channel_ = channel_->AsVoiceReceiveChannel(); - return (channel_ != nullptr); + send_channel_ = engine_->CreateSendChannel( + &call_, cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + receive_channel_ = engine_->CreateReceiveChannel( + &call_, cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + send_channel_->SetSsrcListChangedCallback( + [receive_channel = + receive_channel_.get()](const std::set& choices) { + receive_channel->ChooseReceiverReportSsrc(choices); + }); + send_channel_->SetSendCodecChangedCallback( + [receive_channel = receive_channel_.get(), + send_channel = send_channel_.get()]() { + receive_channel->SetReceiveNackEnabled( + send_channel->SendCodecHasNack()); + receive_channel->SetReceiveNonSenderRttEnabled( + send_channel->SenderNonSenderRttEnabled()); + }); + return true; } bool SetupRecvStream() { @@ -263,11 +276,11 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { if (!use_null_apm_) { EXPECT_CALL(*apm_, set_output_will_be_muted(false)); } - return channel_->SetAudioSend(kSsrcX, true, nullptr, &fake_source_); + return send_channel_->SetAudioSend(kSsrcX, true, nullptr, &fake_source_); } bool AddRecvStream(uint32_t ssrc) { - EXPECT_TRUE(channel_); + EXPECT_TRUE(receive_channel_); return receive_channel_->AddRecvStream( cricket::StreamParams::CreateLegacy(ssrc)); } @@ -288,8 +301,6 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { rtc::Thread::Current()->ProcessMessages(0); } - void TearDown() override { delete channel_; } - const cricket::FakeAudioSendStream& GetSendStream(uint32_t ssrc) { const auto* send_stream = call_.GetAudioSendStream(ssrc); EXPECT_TRUE(send_stream); @@ -312,7 +323,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { } void SetSend(bool enable) { - ASSERT_TRUE(channel_); + ASSERT_TRUE(send_channel_); if (enable) { EXPECT_CALL(*adm_, RecordingIsInitialized()) .Times(::testing::AtMost(1)) @@ -324,23 +335,23 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { .Times(::testing::AtMost(1)) .WillOnce(Return(0)); } - channel_->SetSend(enable); + send_channel_->SetSend(enable); } - void SetSendParameters(const cricket::AudioSendParameters& params) { - ASSERT_TRUE(channel_); - EXPECT_TRUE(channel_->SetSendParameters(params)); + void SetSendParameters(const cricket::AudioSenderParameter& params) { + ASSERT_TRUE(send_channel_); + EXPECT_TRUE(send_channel_->SetSendParameters(params)); } void SetAudioSend(uint32_t ssrc, bool enable, cricket::AudioSource* source, const cricket::AudioOptions* options = nullptr) { - ASSERT_TRUE(channel_); + ASSERT_TRUE(send_channel_); if (!use_null_apm_) { EXPECT_CALL(*apm_, set_output_will_be_muted(!enable)); } - EXPECT_TRUE(channel_->SetAudioSend(ssrc, enable, options, source)); + EXPECT_TRUE(send_channel_->SetAudioSend(ssrc, enable, options, source)); } void TestInsertDtmf(uint32_t ssrc, @@ -357,27 +368,27 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // Test we can only InsertDtmf when the other side supports telephone-event. SetSendParameters(send_parameters_); SetSend(true); - EXPECT_FALSE(channel_->CanInsertDtmf()); - EXPECT_FALSE(channel_->InsertDtmf(ssrc, 1, 111)); + EXPECT_FALSE(send_channel_->CanInsertDtmf()); + EXPECT_FALSE(send_channel_->InsertDtmf(ssrc, 1, 111)); send_parameters_.codecs.push_back(codec); SetSendParameters(send_parameters_); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); if (!caller) { // If this is callee, there's no active send channel yet. - EXPECT_FALSE(channel_->InsertDtmf(ssrc, 2, 123)); + EXPECT_FALSE(send_channel_->InsertDtmf(ssrc, 2, 123)); EXPECT_TRUE(send_channel_->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrcX))); } // Check we fail if the ssrc is invalid. - EXPECT_FALSE(channel_->InsertDtmf(-1, 1, 111)); + EXPECT_FALSE(send_channel_->InsertDtmf(-1, 1, 111)); // Test send. cricket::FakeAudioSendStream::TelephoneEvent telephone_event = GetSendStream(kSsrcX).GetLatestTelephoneEvent(); EXPECT_EQ(-1, telephone_event.payload_type); - EXPECT_TRUE(channel_->InsertDtmf(ssrc, 2, 123)); + EXPECT_TRUE(send_channel_->InsertDtmf(ssrc, 2, 123)); telephone_event = GetSendStream(kSsrcX).GetLatestTelephoneEvent(); EXPECT_EQ(codec.id, telephone_event.payload_type); EXPECT_EQ(codec.clockrate, telephone_event.payload_frequency); @@ -401,7 +412,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // For a callee, the answer will be applied in set local description // where SetExtmapAllowMixed() and AddSendStream() are called. EXPECT_TRUE(SetupChannel()); - channel_->SetExtmapAllowMixed(extmap_allow_mixed); + send_channel_->SetExtmapAllowMixed(extmap_allow_mixed); EXPECT_TRUE(send_channel_->AddSendStream( cricket::StreamParams::CreateLegacy(kSsrcX))); @@ -418,13 +429,13 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { int max_bitrate, bool expected_result, int expected_bitrate) { - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(codec); parameters.max_bandwidth_bps = max_bitrate; if (expected_result) { SetSendParameters(parameters); } else { - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } EXPECT_EQ(expected_bitrate, GetCodecBitrate(kSsrcX)); } @@ -440,7 +451,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { } void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) { - cricket::AudioSendParameters send_parameters; + cricket::AudioSenderParameter send_parameters; send_parameters.codecs.push_back(codec); send_parameters.max_bandwidth_bps = bitrate; SetSendParameters(send_parameters); @@ -563,41 +574,48 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_TRUE(SetupRecvStream()); // Ensure extensions are off by default. - EXPECT_THAT(channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, - IsEmpty()); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, + IsEmpty()); // Ensure unknown extensions won't cause an error. recv_parameters_.extensions.push_back( webrtc::RtpExtension("urn:ietf:params:unknownextention", 1)); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); - EXPECT_THAT(channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, - IsEmpty()); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, + IsEmpty()); // Ensure extensions stay off with an empty list of headers. recv_parameters_.extensions.clear(); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); - EXPECT_THAT(channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, - IsEmpty()); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, + IsEmpty()); // Ensure extension is set properly. const int id = 2; recv_parameters_.extensions.push_back(webrtc::RtpExtension(ext, id)); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); - EXPECT_EQ(channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, - recv_parameters_.extensions); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); + EXPECT_EQ( + receive_channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, + recv_parameters_.extensions); // Ensure extension is set properly on new stream. EXPECT_TRUE(AddRecvStream(kSsrcY)); - EXPECT_EQ(channel_->GetRtpReceiveParameters(kSsrcY).header_extensions, - recv_parameters_.extensions); + EXPECT_EQ( + receive_channel_->GetRtpReceiveParameters(kSsrcY).header_extensions, + recv_parameters_.extensions); // Ensure all extensions go back off with an empty list. recv_parameters_.extensions.clear(); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); - EXPECT_THAT(channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, - IsEmpty()); - EXPECT_THAT(channel_->GetRtpReceiveParameters(kSsrcY).header_extensions, - IsEmpty()); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(kSsrcX).header_extensions, + IsEmpty()); + EXPECT_THAT( + receive_channel_->GetRtpReceiveParameters(kSsrcY).header_extensions, + IsEmpty()); } webrtc::AudioSendStream::Stats GetAudioSendStreamStats() const { @@ -806,19 +824,16 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { } cricket::WebRtcVoiceSendChannel* SendImplFromPointer( - cricket::VoiceMediaChannel* channel) { - return static_cast( - static_cast(channel) - ->SendImplForTesting()); + cricket::VoiceMediaSendChannelInterface* channel) { + return static_cast(channel); } cricket::WebRtcVoiceSendChannel* SendImpl() { - return SendImplFromPointer(channel_); + return SendImplFromPointer(send_channel_.get()); } cricket::WebRtcVoiceReceiveChannel* ReceiveImpl() { return static_cast( - static_cast(channel_) - ->ReceiveImplForTesting()); + receive_channel_.get()); } protected: @@ -830,11 +845,10 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { rtc::scoped_refptr> apm_; cricket::FakeCall call_; std::unique_ptr engine_; - cricket::VoiceMediaChannel* channel_ = nullptr; - cricket::VoiceMediaSendChannelInterface* send_channel_; - cricket::VoiceMediaReceiveChannelInterface* receive_channel_; - cricket::AudioSendParameters send_parameters_; - cricket::AudioRecvParameters recv_parameters_; + std::unique_ptr send_channel_; + std::unique_ptr receive_channel_; + cricket::AudioSenderParameter send_parameters_; + cricket::AudioReceiverParameters recv_parameters_; FakeAudioSource fake_source_; webrtc::AudioProcessing::Config apm_config_; }; @@ -857,7 +871,7 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateSendStream) { EXPECT_EQ(kSsrcX, config.rtp.ssrc); EXPECT_EQ("", config.rtp.c_name); EXPECT_EQ(0u, config.rtp.extensions.size()); - EXPECT_EQ(SendImpl(), config.send_transport); + EXPECT_EQ(SendImpl()->transport(), config.send_transport); } // Test that we can add a receive stream and that it has the correct defaults. @@ -868,7 +882,7 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateRecvStream) { GetRecvStreamConfig(kSsrcX); EXPECT_EQ(kSsrcX, config.rtp.remote_ssrc); EXPECT_EQ(0xFA17FA17, config.rtp.local_ssrc); - EXPECT_EQ(ReceiveImpl(), config.rtcp_send_transport); + EXPECT_EQ(ReceiveImpl()->transport(), config.rtcp_send_transport); EXPECT_EQ("", config.sync_group); } @@ -887,14 +901,14 @@ TEST_P(WebRtcVoiceEngineTestFake, OpusSupportsTransportCc) { // Test that we set our inbound codecs properly, including changing PT. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kTelephoneEventCodec2); parameters.codecs[0].id = 106; // collide with existing CN 32k parameters.codecs[2].id = 126; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( @@ -907,29 +921,29 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { // Test that we fail to set an unknown inbound codec. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(cricket::CreateAudioCodec(127, "XYZ", 32000, 1)); - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); } // Test that we fail if we have duplicate types in the inbound list. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[1].id = kOpusCodec.id; - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); } // Test that we can decode OPUS without stereo parameters. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( @@ -939,11 +953,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) { // Test that we can decode OPUS with stereo = 0. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); parameters.codecs[1].params["stereo"] = "0"; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( @@ -954,11 +968,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { // Test that we can decode OPUS with stereo = 1. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); parameters.codecs[1].params["stereo"] = "1"; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( @@ -969,14 +983,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { // Test that changes to recv codecs are applied to all streams. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { EXPECT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kTelephoneEventCodec2); parameters.codecs[0].id = 106; // collide with existing CN 32k parameters.codecs[2].id = 126; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); for (const auto& ssrc : {kSsrcX, kSsrcY}) { EXPECT_TRUE(AddRecvStream(ssrc)); EXPECT_THAT(GetRecvStreamConfig(ssrc).decoder_map, @@ -990,10 +1004,10 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 106; // collide with existing CN 32k - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); const auto& dm = GetRecvStreamConfig(kSsrcX).decoder_map; ASSERT_EQ(1u, dm.count(106)); @@ -1003,31 +1017,31 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) { // Test that we can apply the same set of codecs again while playing. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); - channel_->SetPlayout(true); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); + receive_channel_->SetPlayout(true); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); // Remapping a payload type to a different codec should fail. parameters.codecs[0] = kOpusCodec; parameters.codecs[0].id = kPcmuCodec.id; - EXPECT_FALSE(channel_->SetRecvParameters(parameters)); + EXPECT_FALSE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); } // Test that we can add a codec while playing. TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); - channel_->SetPlayout(true); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); + receive_channel_->SetPlayout(true); parameters.codecs.push_back(kOpusCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); } @@ -1035,22 +1049,22 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5847 TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); ++parameters.codecs[0].id; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); } // Test that we do allow setting Opus/Red by default. TEST_P(WebRtcVoiceEngineTestFake, RecvRedDefault) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kRed48000Codec); parameters.codecs[1].params[""] = "111/111"; - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( {{111, {"opus", 48000, 2}}, @@ -1100,7 +1114,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthFixedRateAsCaller) { TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) { EXPECT_TRUE(SetupChannel()); const int kDesiredBitrate = 128000; - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs = engine_->send_codecs(); parameters.max_bandwidth_bps = kDesiredBitrate; SetSendParameters(parameters); @@ -1126,7 +1140,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthCbr) { EXPECT_EQ(64000, GetCodecBitrate(kSsrcX)); send_parameters_.max_bandwidth_bps = 128; - EXPECT_FALSE(channel_->SetSendParameters(send_parameters_)); + EXPECT_FALSE(send_channel_->SetSendParameters(send_parameters_)); EXPECT_EQ(64000, GetCodecBitrate(kSsrcX)); } @@ -1289,7 +1303,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { // Test that GetRtpSendParameters returns the currently configured codecs. TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); @@ -1339,7 +1353,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { // Test that if we set/get parameters multiple times, we get the same results. TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); @@ -1360,7 +1374,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) { // SetRtpSendParameters is called. TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters send_parameters; + cricket::AudioSenderParameter send_parameters; send_parameters.codecs.push_back(kOpusCodec); SetSendParameters(send_parameters); @@ -1420,13 +1434,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { // Test that GetRtpReceiveParameters returns the currently configured codecs. TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpReceiveParameters(kSsrcX); + receive_channel_->GetRtpReceiveParameters(kSsrcX); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(kOpusCodec.ToCodecParameters(), rtp_parameters.codecs[0]); EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]); @@ -1436,7 +1450,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) { TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersSsrc) { EXPECT_TRUE(SetupRecvStream()); webrtc::RtpParameters rtp_parameters = - channel_->GetRtpReceiveParameters(kSsrcX); + receive_channel_->GetRtpReceiveParameters(kSsrcX); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(kSsrcX, rtp_parameters.encodings[0].ssrc); } @@ -1444,18 +1458,19 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersSsrc) { // Test that if we set/get parameters multiple times, we get the same results. TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpReceiveParameters) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); webrtc::RtpParameters initial_params = - channel_->GetRtpReceiveParameters(kSsrcX); + receive_channel_->GetRtpReceiveParameters(kSsrcX); // ... And this shouldn't change the params returned by // GetRtpReceiveParameters. - webrtc::RtpParameters new_params = channel_->GetRtpReceiveParameters(kSsrcX); - EXPECT_EQ(initial_params, channel_->GetRtpReceiveParameters(kSsrcX)); + webrtc::RtpParameters new_params = + receive_channel_->GetRtpReceiveParameters(kSsrcX); + EXPECT_EQ(initial_params, receive_channel_->GetRtpReceiveParameters(kSsrcX)); } // Test that GetRtpReceiveParameters returns parameters correctly when SSRCs @@ -1466,23 +1481,23 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { ASSERT_TRUE(SetupChannel()); // Call necessary methods to configure receiving a default stream as // soon as it arrives. - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); // Call GetDefaultRtpReceiveParameters before configured to receive an // unsignaled stream. Should return nothing. EXPECT_EQ(webrtc::RtpParameters(), - channel_->GetDefaultRtpReceiveParameters()); + receive_channel_->GetDefaultRtpReceiveParameters()); // Set a sink for an unsignaled stream. std::unique_ptr fake_sink(new FakeAudioSink()); - channel_->SetDefaultRawAudioSink(std::move(fake_sink)); + receive_channel_->SetDefaultRawAudioSink(std::move(fake_sink)); // Call GetDefaultRtpReceiveParameters before the SSRC is known. webrtc::RtpParameters rtp_parameters = - channel_->GetDefaultRtpReceiveParameters(); + receive_channel_->GetDefaultRtpReceiveParameters(); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_FALSE(rtp_parameters.encodings[0].ssrc); @@ -1490,17 +1505,17 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); // The `ssrc` member should still be unset. - rtp_parameters = channel_->GetDefaultRtpReceiveParameters(); + rtp_parameters = receive_channel_->GetDefaultRtpReceiveParameters(); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_FALSE(rtp_parameters.encodings[0].ssrc); } TEST_P(WebRtcVoiceEngineTestFake, OnPacketReceivedIdentifiesExtensions) { ASSERT_TRUE(SetupChannel()); - cricket::AudioRecvParameters parameters = recv_parameters_; + cricket::AudioReceiverParameters parameters = recv_parameters_; parameters.extensions.push_back( RtpExtension(RtpExtension::kAudioLevelUri, /*id=*/1)); - ASSERT_TRUE(channel_->SetRecvParameters(parameters)); + ASSERT_TRUE(receive_channel_->SetRecvParameters(parameters)); webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions); webrtc::RtpPacketReceived reference_packet(&extension_map); constexpr uint8_t kAudioLevel = 123; @@ -1523,7 +1538,7 @@ TEST_P(WebRtcVoiceEngineTestFake, OnPacketReceivedIdentifiesExtensions) { // Test that we apply codecs properly. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); @@ -1536,14 +1551,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); EXPECT_NE(send_codec_spec.format.clockrate_hz, 8000); EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); - EXPECT_FALSE(channel_->CanInsertDtmf()); + EXPECT_FALSE(send_channel_->CanInsertDtmf()); } // Test that we use Opus/Red by default when it is // listed as the first codec and there is an fmtp line. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRed) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs[0].params[""] = "111/111"; parameters.codecs.push_back(kOpusCodec); @@ -1558,7 +1573,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRed) { // listed as the first codec but there is no fmtp line. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedNoFmtp) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs.push_back(kOpusCodec); SetSendParameters(parameters); @@ -1571,7 +1586,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedNoFmtp) { // Test that we do not use Opus/Red by default. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedDefault) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kRed48000Codec); parameters.codecs[1].params[""] = "111/111"; @@ -1585,7 +1600,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedDefault) { // Test that the RED fmtp line must match the payload type. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpMismatch) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs[0].params[""] = "8/8"; parameters.codecs.push_back(kOpusCodec); @@ -1599,7 +1614,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpMismatch) { // Test that the RED fmtp line must show 2..32 payloads. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs[0].params[""] = "111"; parameters.codecs.push_back(kOpusCodec); @@ -1628,7 +1643,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { // AudioSendStream. TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); @@ -1649,70 +1664,70 @@ TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) { // Test that if clockrate is not 48000 for opus, we fail. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBadClockrate) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].clockrate = 50000; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that if channels=0 for opus, we fail. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad0ChannelsNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 0; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that if channels=0 for opus, we fail. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad0Channels1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 0; parameters.codecs[0].params["stereo"] = "1"; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that if channel is 1 for opus and there's no stereo, we fail. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpus1ChannelNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 1; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that if channel is 1 for opus and stereo=0, we fail. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad1Channel0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 1; parameters.codecs[0].params["stereo"] = "0"; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that if channel is 1 for opus and stereo=1, we fail. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad1Channel1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 1; parameters.codecs[0].params["stereo"] = "1"; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that with bitrate=0 and no stereo, bitrate is 32000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0BitrateNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; SetSendParameters(parameters); @@ -1722,7 +1737,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0BitrateNoStereo) { // Test that with bitrate=0 and stereo=0, bitrate is 32000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].params["stereo"] = "0"; @@ -1733,7 +1748,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate0Stereo) { // Test that with bitrate=invalid and stereo=0, bitrate is 32000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].params["stereo"] = "0"; // bitrate that's out of the range between 6000 and 510000 will be clamped. @@ -1749,7 +1764,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate0Stereo) { // Test that with bitrate=0 and stereo=1, bitrate is 64000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].params["stereo"] = "1"; @@ -1760,7 +1775,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate1Stereo) { // Test that with bitrate=invalid and stereo=1, bitrate is 64000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].params["stereo"] = "1"; // bitrate that's out of the range between 6000 and 510000 will be clamped. @@ -1776,7 +1791,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate1Stereo) { // Test that with bitrate=N and stereo unset, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 96000; SetSendParameters(parameters); @@ -1791,7 +1806,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoStereo) { // Test that with bitrate=N and stereo=0, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 30000; parameters.codecs[0].params["stereo"] = "0"; @@ -1802,7 +1817,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate0Stereo) { // Test that with bitrate=N and without any parameters, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoParameters) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 30000; SetSendParameters(parameters); @@ -1812,7 +1827,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoParameters) { // Test that with bitrate=N and stereo=1, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 30000; parameters.codecs[0].params["stereo"] = "1"; @@ -1855,7 +1870,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthForAudioDoesntAffectBwe) { // Test that we can enable NACK with opus as callee. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); @@ -1872,7 +1887,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackRecvStreams) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); @@ -1885,7 +1900,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackRecvStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); @@ -1901,7 +1916,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) { // Test that NACK is enabled on a new receive stream. TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( @@ -1918,7 +1933,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters opus_parameters; + cricket::AudioSenderParameter opus_parameters; opus_parameters.codecs.push_back(kOpusCodec); SetSendParameters(opus_parameters); { @@ -1927,7 +1942,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { EXPECT_STRCASEEQ("opus", spec.format.name.c_str()); } - cricket::AudioSendParameters pcmu_parameters; + cricket::AudioSenderParameter pcmu_parameters; pcmu_parameters.codecs.push_back(kPcmuCodec); pcmu_parameters.codecs.push_back(kCn16000Codec); pcmu_parameters.codecs.push_back(kOpusCodec); @@ -1949,7 +1964,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { // Test that we handle various ways of specifying bitrate. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); SetSendParameters(parameters); { @@ -1982,15 +1997,15 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) { // Test that we fail if no codecs are specified. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsNoCodecs) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; - EXPECT_FALSE(channel_->SetSendParameters(parameters)); + cricket::AudioSenderParameter parameters; + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); } // Test that we can set send codecs even with telephone-event codec as the first // one on the list. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); @@ -2001,52 +2016,52 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) { EXPECT_EQ(96, spec.payload_type); EXPECT_STRCASEEQ("OPUS", spec.format.name.c_str()); SetSend(true); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); } // Test that CanInsertDtmf() is governed by the send flag TEST_P(WebRtcVoiceEngineTestFake, DTMFControlledBySendFlag) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kPcmuCodec); parameters.codecs[0].id = 98; // DTMF parameters.codecs[1].id = 96; SetSendParameters(parameters); - EXPECT_FALSE(channel_->CanInsertDtmf()); + EXPECT_FALSE(send_channel_->CanInsertDtmf()); SetSend(true); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); SetSend(false); - EXPECT_FALSE(channel_->CanInsertDtmf()); + EXPECT_FALSE(send_channel_->CanInsertDtmf()); } // Test that payload type range is limited for telephone-event codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kTelephoneEventCodec2); parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 0; // DTMF parameters.codecs[1].id = 96; SetSendParameters(parameters); SetSend(true); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); parameters.codecs[0].id = 128; // DTMF - EXPECT_FALSE(channel_->SetSendParameters(parameters)); - EXPECT_FALSE(channel_->CanInsertDtmf()); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->CanInsertDtmf()); parameters.codecs[0].id = 127; SetSendParameters(parameters); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); parameters.codecs[0].id = -1; // DTMF - EXPECT_FALSE(channel_->SetSendParameters(parameters)); - EXPECT_FALSE(channel_->CanInsertDtmf()); + EXPECT_FALSE(send_channel_->SetSendParameters(parameters)); + EXPECT_FALSE(send_channel_->CanInsertDtmf()); } // Test that we can set send codecs even with CN codec as the first // one on the list. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs[0].id = 98; // narrowband CN @@ -2060,7 +2075,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { // Test that we set VAD and DTMF types correctly as caller. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); @@ -2075,13 +2090,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); } // Test that we set VAD and DTMF types correctly as callee. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { EXPECT_TRUE(SetupChannel()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); @@ -2099,14 +2114,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); } // Test that we only apply VAD if we have a CN codec that matches the // send codec clockrate. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; // Set PCMU(8K) and CN(16K). VAD should not be activated. parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); @@ -2139,7 +2154,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { // Test that we perform case-insensitive matching of codec names. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); @@ -2155,7 +2170,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { EXPECT_EQ(1u, send_codec_spec.format.num_channels); EXPECT_EQ(97, send_codec_spec.cng_payload_type); SetSend(true); - EXPECT_TRUE(channel_->CanInsertDtmf()); + EXPECT_TRUE(send_channel_->CanInsertDtmf()); } TEST_P(WebRtcVoiceEngineTestFake, @@ -2249,10 +2264,10 @@ TEST_P(WebRtcVoiceEngineTestFake, SendStateWhenStreamsAreRecreated) { // Test that we can create a channel and start playing out on it. TEST_P(WebRtcVoiceEngineTestFake, Playout) { EXPECT_TRUE(SetupRecvStream()); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); - channel_->SetPlayout(true); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); + receive_channel_->SetPlayout(true); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); - channel_->SetPlayout(false); + receive_channel_->SetPlayout(false); EXPECT_FALSE(GetRecvStream(kSsrcX).started()); } @@ -2291,7 +2306,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) { cricket::StreamParams::CreateLegacy(ssrc))); } - cricket::AudioSendParameters parameters; + cricket::AudioSenderParameter parameters; // Set PCMU and CN(8K). VAD should be activated. parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); @@ -2364,7 +2379,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // We need send codec to be set to get all stats. SetSendParameters(send_parameters_); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); SetAudioSendStreamStats(); SetAudioReceiveStreamStats(); @@ -2373,8 +2388,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); cricket::VoiceMediaSendInfo send_info; cricket::VoiceMediaReceiveInfo receive_info; - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); // We have added 4 send streams. We should see empty stats for all. @@ -2396,8 +2411,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { cricket::VoiceMediaSendInfo send_info; EXPECT_TRUE(receive_channel_->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); EXPECT_EQ(static_cast(arraysize(kSsrcs4)), send_info.senders.size()); @@ -2412,8 +2427,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); SetAudioReceiveStreamStats(); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); EXPECT_EQ(static_cast(arraysize(kSsrcs4)), send_info.senders.size()); @@ -2430,7 +2445,7 @@ TEST_P(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) { // Start playout without a receive stream. SetSendParameters(send_parameters_); - channel_->SetPlayout(true); + receive_channel_->SetPlayout(true); // Adding another stream should enable playout on the new stream only. EXPECT_TRUE(AddRecvStream(kSsrcY)); @@ -2450,12 +2465,12 @@ TEST_P(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) { EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); // Stop playout. - channel_->SetPlayout(false); + receive_channel_->SetPlayout(false); EXPECT_FALSE(GetRecvStream(kSsrcY).started()); EXPECT_FALSE(GetRecvStream(kSsrcZ).started()); // Restart playout and make sure recv streams are played out. - channel_->SetPlayout(true); + receive_channel_->SetPlayout(true); EXPECT_TRUE(GetRecvStream(kSsrcY).started()); EXPECT_TRUE(GetRecvStream(kSsrcZ).started()); @@ -2519,7 +2534,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { // stream has to use a different SSRC. EXPECT_TRUE(AddRecvStream(kSsrcY)); SetSendParameters(send_parameters_); - EXPECT_TRUE(channel_->SetRecvParameters(recv_parameters_)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(recv_parameters_)); SetAudioSendStreamStats(); // Check stats for the added streams. @@ -2527,8 +2542,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); cricket::VoiceMediaSendInfo send_info; cricket::VoiceMediaReceiveInfo receive_info; - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); // We have added one send stream. We should see the stats we've set. @@ -2546,8 +2561,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { cricket::VoiceMediaSendInfo send_info; cricket::VoiceMediaReceiveInfo receive_info; SetAudioReceiveStreamStats(); - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); VerifyVoiceSenderInfo(send_info.senders[0], true); VerifyVoiceSendRecvCodecs(send_info, receive_info); @@ -2559,8 +2574,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); cricket::VoiceMediaSendInfo send_info; cricket::VoiceMediaReceiveInfo receive_info; - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); EXPECT_EQ(1u, send_info.senders.size()); EXPECT_EQ(0u, receive_info.receivers.size()); @@ -2574,8 +2589,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); cricket::VoiceMediaSendInfo send_info; cricket::VoiceMediaReceiveInfo receive_info; - EXPECT_EQ(true, channel_->GetSendStats(&send_info)); - EXPECT_EQ(true, channel_->GetReceiveStats( + EXPECT_EQ(true, send_channel_->GetStats(&send_info)); + EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); EXPECT_EQ(1u, send_info.senders.size()); EXPECT_EQ(1u, receive_info.receivers.size()); @@ -2878,10 +2893,10 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStream) { // those previously passed into SetRecvCodecs. TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamUnsupportedCodec) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); - EXPECT_TRUE(channel_->SetRecvParameters(parameters)); + EXPECT_TRUE(receive_channel_->SetRecvParameters(parameters)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_THAT(GetRecvStreamConfig(kSsrcX).decoder_map, (ContainerEq>( @@ -2898,8 +2913,8 @@ TEST_P(WebRtcVoiceEngineTestFake, StreamCleanup) { EXPECT_EQ(1u, call_.GetAudioSendStreams().size()); EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); - delete channel_; - channel_ = NULL; + send_channel_.reset(); + receive_channel_.reset(); EXPECT_EQ(0u, call_.GetAudioSendStreams().size()); EXPECT_EQ(0u, call_.GetAudioReceiveStreams().size()); } @@ -3045,13 +3060,12 @@ TEST_P(WebRtcVoiceEngineTestFake, InitRecordingOnSend) { EXPECT_CALL(*adm_, Recording()).WillOnce(Return(false)); EXPECT_CALL(*adm_, InitRecording()).Times(1); - std::unique_ptr channel( - engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, cricket::MediaConfig(), - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); + std::unique_ptr send_channel( + engine_->CreateSendChannel( + &call_, cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); - channel->SetSend(true); + send_channel->SetSend(true); } TEST_P(WebRtcVoiceEngineTestFake, SkipInitRecordingOnSend) { @@ -3062,13 +3076,12 @@ TEST_P(WebRtcVoiceEngineTestFake, SkipInitRecordingOnSend) { cricket::AudioOptions options; options.init_recording_on_send = false; - std::unique_ptr channel( - engine_->CreateMediaChannel(cricket::MediaChannel::Role::kBoth, &call_, - cricket::MediaConfig(), options, - webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); + std::unique_ptr send_channel( + engine_->CreateSendChannel(&call_, cricket::MediaConfig(), options, + webrtc::CryptoOptions(), + webrtc::AudioCodecPairId::Create())); - channel->SetSend(true); + send_channel->SetSend(true); } TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { @@ -3089,49 +3102,47 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { EXPECT_CALL(*adm_, Recording()).Times(2).WillRepeatedly(Return(false)); EXPECT_CALL(*adm_, InitRecording()).Times(2).WillRepeatedly(Return(0)); - std::unique_ptr channel1( - engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, cricket::MediaConfig(), - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); - std::unique_ptr channel2( - engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, cricket::MediaConfig(), - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); + std::unique_ptr send_channel1( + engine_->CreateSendChannel( + &call_, cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); + std::unique_ptr send_channel2( + engine_->CreateSendChannel( + &call_, cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); // Have to add a stream to make SetSend work. cricket::StreamParams stream1; stream1.ssrcs.push_back(1); - channel1->AddSendStream(stream1); + send_channel1->AddSendStream(stream1); cricket::StreamParams stream2; stream2.ssrcs.push_back(2); - channel2->AddSendStream(stream2); + send_channel2->AddSendStream(stream2); // AEC and AGC and NS - cricket::AudioSendParameters parameters_options_all = send_parameters_; + cricket::AudioSenderParameter parameters_options_all = send_parameters_; parameters_options_all.options.echo_cancellation = true; parameters_options_all.options.auto_gain_control = true; parameters_options_all.options.noise_suppression = true; - EXPECT_TRUE(channel1->SetSendParameters(parameters_options_all)); + EXPECT_TRUE(send_channel1->SetSendParameters(parameters_options_all)); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); VerifyGainControlEnabledCorrectly(); EXPECT_TRUE(apm_config_.noise_suppression.enabled); EXPECT_EQ(apm_config_.noise_suppression.level, kDefaultNsLevel); EXPECT_EQ(parameters_options_all.options, - SendImplFromPointer(channel1.get())->options()); - EXPECT_TRUE(channel2->SetSendParameters(parameters_options_all)); + SendImplFromPointer(send_channel1.get())->options()); + EXPECT_TRUE(send_channel2->SetSendParameters(parameters_options_all)); VerifyEchoCancellationSettings(/*enabled=*/true); VerifyGainControlEnabledCorrectly(); EXPECT_EQ(parameters_options_all.options, - SendImplFromPointer(channel2.get())->options()); + SendImplFromPointer(send_channel2.get())->options()); } // unset NS - cricket::AudioSendParameters parameters_options_no_ns = send_parameters_; + cricket::AudioSenderParameter parameters_options_no_ns = send_parameters_; parameters_options_no_ns.options.noise_suppression = false; - EXPECT_TRUE(channel1->SetSendParameters(parameters_options_no_ns)); + EXPECT_TRUE(send_channel1->SetSendParameters(parameters_options_no_ns)); cricket::AudioOptions expected_options = parameters_options_all.options; if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); @@ -3141,13 +3152,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { expected_options.echo_cancellation = true; expected_options.auto_gain_control = true; expected_options.noise_suppression = false; - EXPECT_EQ(expected_options, SendImplFromPointer(channel1.get())->options()); + EXPECT_EQ(expected_options, + SendImplFromPointer(send_channel1.get())->options()); } // unset AGC - cricket::AudioSendParameters parameters_options_no_agc = send_parameters_; + cricket::AudioSenderParameter parameters_options_no_agc = send_parameters_; parameters_options_no_agc.options.auto_gain_control = false; - EXPECT_TRUE(channel2->SetSendParameters(parameters_options_no_agc)); + EXPECT_TRUE(send_channel2->SetSendParameters(parameters_options_no_agc)); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); EXPECT_FALSE(apm_config_.gain_controller1.enabled); @@ -3156,10 +3168,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { expected_options.echo_cancellation = true; expected_options.auto_gain_control = false; expected_options.noise_suppression = true; - EXPECT_EQ(expected_options, SendImplFromPointer(channel2.get())->options()); + EXPECT_EQ(expected_options, + SendImplFromPointer(send_channel2.get())->options()); } - EXPECT_TRUE(channel_->SetSendParameters(parameters_options_all)); + EXPECT_TRUE(send_channel_->SetSendParameters(parameters_options_all)); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); VerifyGainControlEnabledCorrectly(); @@ -3167,7 +3180,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { EXPECT_EQ(apm_config_.noise_suppression.level, kDefaultNsLevel); } - channel1->SetSend(true); + send_channel1->SetSend(true); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); VerifyGainControlEnabledCorrectly(); @@ -3175,7 +3188,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { EXPECT_EQ(apm_config_.noise_suppression.level, kDefaultNsLevel); } - channel2->SetSend(true); + send_channel2->SetSend(true); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); EXPECT_FALSE(apm_config_.gain_controller1.enabled); @@ -3184,11 +3197,12 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { } // Make sure settings take effect while we are sending. - cricket::AudioSendParameters parameters_options_no_agc_nor_ns = + cricket::AudioSenderParameter parameters_options_no_agc_nor_ns = send_parameters_; parameters_options_no_agc_nor_ns.options.auto_gain_control = false; parameters_options_no_agc_nor_ns.options.noise_suppression = false; - EXPECT_TRUE(channel2->SetSendParameters(parameters_options_no_agc_nor_ns)); + EXPECT_TRUE( + send_channel2->SetSendParameters(parameters_options_no_agc_nor_ns)); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); EXPECT_FALSE(apm_config_.gain_controller1.enabled); @@ -3197,7 +3211,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { expected_options.echo_cancellation = true; expected_options.auto_gain_control = false; expected_options.noise_suppression = false; - EXPECT_EQ(expected_options, SendImplFromPointer(channel2.get())->options()); + EXPECT_EQ(expected_options, + SendImplFromPointer(send_channel2.get())->options()); } } @@ -3206,23 +3221,21 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { EXPECT_TRUE(SetupSendStream()); cricket::FakeNetworkInterface network_interface; cricket::MediaConfig config; - std::unique_ptr channel; + std::unique_ptr channel; webrtc::RtpParameters parameters; - channel.reset(engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, config, - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); + channel = engine_->CreateSendChannel(&call_, config, cricket::AudioOptions(), + webrtc::CryptoOptions(), + webrtc::AudioCodecPairId::Create()); channel->SetInterface(&network_interface); // Default value when DSCP is disabled should be DSCP_DEFAULT. EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp()); channel->SetInterface(nullptr); config.enable_dscp = true; - channel.reset(engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, config, - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); + channel = engine_->CreateSendChannel(&call_, config, cricket::AudioOptions(), + webrtc::CryptoOptions(), + webrtc::AudioCodecPairId::Create()); channel->SetInterface(&network_interface); EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp()); @@ -3242,18 +3255,16 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { // Packets should also self-identify their dscp in PacketOptions. const uint8_t kData[10] = {0}; - EXPECT_TRUE( - SendImplFromPointer(channel.get())->SendRtcp(kData, sizeof(kData))); + EXPECT_TRUE(SendImplFromPointer(channel.get())->transport()->SendRtcp(kData)); EXPECT_EQ(rtc::DSCP_CS1, network_interface.options().dscp); channel->SetInterface(nullptr); // Verify that setting the option to false resets the // DiffServCodePoint. config.enable_dscp = false; - channel.reset(engine_->CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, &call_, config, - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create())); + channel = engine_->CreateSendChannel(&call_, config, cricket::AudioOptions(), + webrtc::CryptoOptions(), + webrtc::AudioCodecPairId::Create()); channel->SetInterface(&network_interface); // Default value when DSCP is disabled should be DSCP_DEFAULT. EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp()); @@ -3263,12 +3274,12 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) { EXPECT_TRUE(SetupChannel()); - EXPECT_FALSE(channel_->SetOutputVolume(kSsrcY, 0.5)); + EXPECT_FALSE(receive_channel_->SetOutputVolume(kSsrcY, 0.5)); cricket::StreamParams stream; stream.ssrcs.push_back(kSsrcY); EXPECT_TRUE(receive_channel_->AddRecvStream(stream)); EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain()); - EXPECT_TRUE(channel_->SetOutputVolume(kSsrcY, 3)); + EXPECT_TRUE(receive_channel_->SetOutputVolume(kSsrcY, 3)); EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcY).gain()); } @@ -3281,7 +3292,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) { // Should remember the volume "2" which will be set on new unsignaled streams, // and also set the gain to 2 on existing unsignaled streams. - EXPECT_TRUE(channel_->SetDefaultOutputVolume(2)); + EXPECT_TRUE(receive_channel_->SetDefaultOutputVolume(2)); EXPECT_DOUBLE_EQ(2, GetRecvStream(kSsrc1).gain()); // Spawn an unsignaled stream by sending a packet - gain should be 2. @@ -3292,14 +3303,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) { EXPECT_DOUBLE_EQ(2, GetRecvStream(kSsrcX).gain()); // Setting gain for all unsignaled streams. - EXPECT_TRUE(channel_->SetDefaultOutputVolume(3)); + EXPECT_TRUE(receive_channel_->SetDefaultOutputVolume(3)); if (kMaxUnsignaledRecvStreams > 1) { EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrc1).gain()); } EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrcX).gain()); // Setting gain on an individual stream affects only that. - EXPECT_TRUE(channel_->SetOutputVolume(kSsrcX, 4)); + EXPECT_TRUE(receive_channel_->SetOutputVolume(kSsrcX, 4)); if (kMaxUnsignaledRecvStreams > 1) { EXPECT_DOUBLE_EQ(3, GetRecvStream(kSsrc1).gain()); } @@ -3428,9 +3439,9 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { // Set up receive extensions. const std::vector header_extensions = GetDefaultEnabledRtpHeaderExtensions(*engine_); - cricket::AudioRecvParameters recv_parameters; + cricket::AudioReceiverParameters recv_parameters; recv_parameters.extensions = header_extensions; - channel_->SetRecvParameters(recv_parameters); + receive_channel_->SetRecvParameters(recv_parameters); EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); for (uint32_t ssrc : ssrcs) { EXPECT_THAT( @@ -3439,7 +3450,7 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { } // Disable receive extensions. - channel_->SetRecvParameters(cricket::AudioRecvParameters()); + receive_channel_->SetRecvParameters(cricket::AudioReceiverParameters()); for (uint32_t ssrc : ssrcs) { EXPECT_THAT( receive_channel_->GetRtpReceiveParameters(ssrc).header_extensions, @@ -3509,16 +3520,16 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSink) { std::unique_ptr fake_sink_2(new FakeAudioSink()); // Setting the sink before a recv stream exists should do nothing. - channel_->SetRawAudioSink(kSsrcX, std::move(fake_sink_1)); + receive_channel_->SetRawAudioSink(kSsrcX, std::move(fake_sink_1)); EXPECT_TRUE(AddRecvStream(kSsrcX)); EXPECT_EQ(nullptr, GetRecvStream(kSsrcX).sink()); // Now try actually setting the sink. - channel_->SetRawAudioSink(kSsrcX, std::move(fake_sink_2)); + receive_channel_->SetRawAudioSink(kSsrcX, std::move(fake_sink_2)); EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink()); // Now try resetting it. - channel_->SetRawAudioSink(kSsrcX, nullptr); + receive_channel_->SetRawAudioSink(kSsrcX, nullptr); EXPECT_EQ(nullptr, GetRecvStream(kSsrcX).sink()); } @@ -3530,7 +3541,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) { std::unique_ptr fake_sink_4(new FakeAudioSink()); // Should be able to set a default sink even when no stream exists. - channel_->SetDefaultRawAudioSink(std::move(fake_sink_1)); + receive_channel_->SetDefaultRawAudioSink(std::move(fake_sink_1)); // Spawn an unsignaled stream by sending a packet - it should be assigned the // default sink. @@ -3538,11 +3549,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) { EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); // Try resetting the default sink. - channel_->SetDefaultRawAudioSink(nullptr); + receive_channel_->SetDefaultRawAudioSink(nullptr); EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink()); // Try setting the default sink while the default stream exists. - channel_->SetDefaultRawAudioSink(std::move(fake_sink_2)); + receive_channel_->SetDefaultRawAudioSink(std::move(fake_sink_2)); EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); // If we remove and add a default stream, it should get the same sink. @@ -3562,21 +3573,21 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) { EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink()); // Reset the default sink - the second unsignaled stream should lose it. - channel_->SetDefaultRawAudioSink(nullptr); + receive_channel_->SetDefaultRawAudioSink(nullptr); if (kMaxUnsignaledRecvStreams > 1) { EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink()); } EXPECT_EQ(nullptr, GetRecvStream(kSsrcX).sink()); // Try setting the default sink while two streams exists. - channel_->SetDefaultRawAudioSink(std::move(fake_sink_3)); + receive_channel_->SetDefaultRawAudioSink(std::move(fake_sink_3)); if (kMaxUnsignaledRecvStreams > 1) { EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink()); } EXPECT_NE(nullptr, GetRecvStream(kSsrcX).sink()); // Try setting the sink for the first unsignaled stream using its known SSRC. - channel_->SetRawAudioSink(kSsrc1, std::move(fake_sink_4)); + receive_channel_->SetRawAudioSink(kSsrc1, std::move(fake_sink_4)); if (kMaxUnsignaledRecvStreams > 1) { EXPECT_NE(nullptr, GetRecvStream(kSsrc1).sink()); } @@ -3612,15 +3623,15 @@ TEST_P(WebRtcVoiceEngineTestFake, OnReadyToSendSignalsNetworkState) { // Test that playout is still started after changing parameters TEST_P(WebRtcVoiceEngineTestFake, PreservePlayoutWhenRecreateRecvStream) { SetupRecvStream(); - channel_->SetPlayout(true); + receive_channel_->SetPlayout(true); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); // Changing RTP header extensions will recreate the // AudioReceiveStreamInterface. - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.extensions.push_back( webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 12)); - channel_->SetRecvParameters(parameters); + receive_channel_->SetRecvParameters(parameters); EXPECT_TRUE(GetRecvStream(kSsrcX).started()); } @@ -3660,12 +3671,16 @@ TEST(WebRtcVoiceEngineTest, StartupShutdown) { call_config.trials = &field_trials; call_config.task_queue_factory = task_queue_factory.get(); auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - cricket::VoiceMediaChannel* channel = engine.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call.get(), cricket::MediaConfig(), - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create()); - EXPECT_TRUE(channel != nullptr); - delete channel; + std::unique_ptr send_channel = + engine.CreateSendChannel( + call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + EXPECT_TRUE(send_channel); + std::unique_ptr + receive_channel = engine.CreateReceiveChannel( + call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + EXPECT_TRUE(receive_channel); } } @@ -3692,12 +3707,16 @@ TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) { call_config.trials = &field_trials; call_config.task_queue_factory = task_queue_factory.get(); auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - cricket::VoiceMediaChannel* channel = engine.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call.get(), - cricket::MediaConfig(), cricket::AudioOptions(), - webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); - EXPECT_TRUE(channel != nullptr); - delete channel; + std::unique_ptr send_channel = + engine.CreateSendChannel( + call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + EXPECT_TRUE(send_channel); + std::unique_ptr + receive_channel = engine.CreateReceiveChannel( + call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + EXPECT_TRUE(receive_channel); } // The engine/channel should have dropped their references. EXPECT_EQ(adm.release()->Release(), @@ -3779,24 +3798,19 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) { call_config.task_queue_factory = task_queue_factory.get(); auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - cricket::VoiceMediaChannel* channels[32]; - size_t num_channels = 0; - while (num_channels < arraysize(channels)) { - cricket::VoiceMediaChannel* channel = engine.CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call.get(), - cricket::MediaConfig(), cricket::AudioOptions(), - webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + std::vector> + channels; + while (channels.size() < 32) { + std::unique_ptr channel = + engine.CreateSendChannel( + call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); if (!channel) break; - channels[num_channels++] = channel; + channels.emplace_back(std::move(channel)); } - size_t expected = arraysize(channels); - EXPECT_EQ(expected, num_channels); - - while (num_channels > 0) { - delete channels[--num_channels]; - } + EXPECT_EQ(channels.size(), 32u); } } @@ -3833,7 +3847,7 @@ TEST(WebRtcVoiceEngineTest, SetRecvCodecs) { &engine, cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions(), call.get(), webrtc::AudioCodecPairId::Create()); - cricket::AudioRecvParameters parameters; + cricket::AudioReceiverParameters parameters; parameters.codecs = engine.recv_codecs(); EXPECT_TRUE(channel.SetRecvParameters(parameters)); } @@ -3869,7 +3883,7 @@ TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) { &engine, cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions(), call.get(), webrtc::AudioCodecPairId::Create()); { - cricket::AudioSendParameters params; + cricket::AudioSenderParameter params; params.codecs.push_back(cricket::CreateAudioCodec(1, "opus", 48000, 2)); params.extensions.push_back(webrtc::RtpExtension( webrtc::RtpExtension::kTransportSequenceNumberUri, 1)); diff --git a/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build b/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build index 672365609554..307a0943c4a8 100644 --- a/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build +++ b/third_party/libwebrtc/modules/async_audio_processing/async_audio_processing_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build index cfb5a0f7e572..47a31f830319 100644 --- a/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/audio_coding_opus_common_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc index 724bba52d65a..634f14d3702c 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc +++ b/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc @@ -104,7 +104,14 @@ AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl( RTC_CHECK(info.redundant.empty()) << "Cannot use nested redundant encoders."; RTC_DCHECK_EQ(primary_encoded_.size(), info.encoded_bytes); - if (info.encoded_bytes == 0 || info.encoded_bytes >= kRedMaxPacketSize) { + if (info.encoded_bytes == 0) { + return info; + } + if (info.encoded_bytes >= kRedMaxPacketSize) { + // Fallback to the primary encoding if the encoded size is more than + // what RED can encode as redundancy (1024 bytes). This can happen with + // Opus stereo at the highest bitrate which consumes up to 1276 bytes. + encoded->AppendData(primary_encoded_); return info; } RTC_DCHECK_GT(max_packet_length_, info.encoded_bytes); diff --git a/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc index 8161931a7ab7..20d85a1d150d 100644 --- a/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc +++ b/third_party/libwebrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc @@ -315,6 +315,23 @@ TEST_F(AudioEncoderCopyRedTest, CheckPayloadSizes3) { } } +// Checks that packets encoded larger than REDs 1024 maximum are returned as-is. +TEST_F(AudioEncoderCopyRedTest, VeryLargePacket) { + AudioEncoder::EncodedInfo info; + info.payload_type = 63; + info.encoded_bytes = + 1111; // Must be > 1024 which is the maximum size encodable by RED. + info.encoded_timestamp = timestamp_; + + EXPECT_CALL(*mock_encoder_, EncodeImpl(_, _, _)) + .WillOnce(Invoke(MockAudioEncoder::FakeEncoding(info))); + + Encode(); + ASSERT_EQ(0u, encoded_info_.redundant.size()); + ASSERT_EQ(info.encoded_bytes, encoded_info_.encoded_bytes); + ASSERT_EQ(info.payload_type, encoded_info_.payload_type); +} + // Checks that the correct timestamps are returned. TEST_F(AudioEncoderCopyRedTest, CheckTimestamps) { uint32_t primary_timestamp = timestamp_; diff --git a/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build index bd1a9d275900..ae5fb0995ecf 100644 --- a/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/g711_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build index d177f4acd05c..b30fe22abae7 100644 --- a/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/g722_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build index 30c17ef7197f..56458ec40f88 100644 --- a/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/legacy_encoded_audio_frame_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build index f22a2a913cd4..786b516d4142 100644 --- a/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/pcm16b_gn/moz.build @@ -128,6 +128,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build index 20fde0e64ac9..0eca3cd37952 100644 --- a/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/webrtc_multiopus_gn/moz.build @@ -131,6 +131,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build index b24ff8634633..6d9e32b68d83 100644 --- a/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_coding/webrtc_opus_wrapper_gn/moz.build @@ -130,6 +130,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_device/BUILD.gn b/third_party/libwebrtc/modules/audio_device/BUILD.gn index 670432810889..0bfad8420654 100644 --- a/third_party/libwebrtc/modules/audio_device/BUILD.gn +++ b/third_party/libwebrtc/modules/audio_device/BUILD.gn @@ -59,12 +59,15 @@ rtc_source_set("audio_device_api") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } +rtc_library("audio_device_config") { + sources = [ "audio_device_config.h" ] +} + rtc_library("audio_device_buffer") { if (!build_with_mozilla) { # See Bug 1820869. sources = [ "audio_device_buffer.cc", "audio_device_buffer.h", - "audio_device_config.h", "fine_audio_buffer.cc", "fine_audio_buffer.h", ] @@ -187,14 +190,20 @@ if (!build_with_chromium) { sources = [ "include/test_audio_device.cc", "include/test_audio_device.h", + "test_audio_device_impl.cc", + "test_audio_device_impl.h", ] deps = [ ":audio_device_api", + ":audio_device_buffer", ":audio_device_default", + ":audio_device_generic", + ":audio_device_impl", "../../api:array_view", "../../api:make_ref_counted", "../../api:scoped_refptr", "../../api/task_queue", + "../../api/units:time_delta", "../../common_audio", "../../rtc_base:buffer", "../../rtc_base:checks", @@ -209,6 +218,50 @@ if (!build_with_chromium) { "../../rtc_base/synchronization:mutex", "../../rtc_base/task_utils:repeating_task", ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } +} + +rtc_library("audio_device_dummy") { + visibility = [ "*" ] + sources = [ + "dummy/audio_device_dummy.cc", + "dummy/audio_device_dummy.h", + ] + deps = [ + ":audio_device_api", + ":audio_device_buffer", + ":audio_device_generic", + ] +} + +if (!build_with_chromium) { + rtc_library("file_audio_device") { + visibility = [ "*" ] + sources = [ + "dummy/file_audio_device.cc", + "dummy/file_audio_device.h", + "dummy/file_audio_device_factory.cc", + "dummy/file_audio_device_factory.h", + ] + defines = [] + if (rtc_use_dummy_audio_file_devices) { + defines += [ "WEBRTC_DUMMY_FILE_DEVICES" ] + } + deps = [ + ":audio_device_generic", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:platform_thread", + "../../rtc_base:stringutils", + "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:file_wrapper", + "../../system_wrappers", + ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } } @@ -221,7 +274,9 @@ if (!build_with_mozilla) { # See Bug 1820869. deps = [ ":audio_device_api", ":audio_device_buffer", + ":audio_device_config", ":audio_device_default", + ":audio_device_dummy", ":audio_device_generic", "../../api:array_view", "../../api:make_ref_counted", @@ -229,6 +284,7 @@ if (!build_with_mozilla) { # See Bug 1820869. "../../api:scoped_refptr", "../../api:sequence_checker", "../../api/task_queue", + "../../api/units:time_delta", "../../common_audio", "../../common_audio:common_audio_c", "../../rtc_base:buffer", @@ -254,18 +310,13 @@ if (!build_with_mozilla) { # See Bug 1820869. absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/types:optional", ] if (rtc_include_internal_audio_device && is_ios) { deps += [ "../../sdk:audio_device" ] } - sources = [ - "dummy/audio_device_dummy.cc", - "dummy/audio_device_dummy.h", - "dummy/file_audio_device.cc", - "dummy/file_audio_device.h", - "include/fake_audio_device.h", - ] + sources = [ "include/fake_audio_device.h" ] if (build_with_mozilla) { sources -= [ @@ -290,39 +341,7 @@ if (!build_with_mozilla) { # See Bug 1820869. "include/audio_device_data_observer.h", ] if (is_android) { - sources += [ - "android/audio_common.h", - "android/audio_device_template.h", - "android/audio_manager.cc", - "android/audio_manager.h", - "android/audio_record_jni.cc", - "android/audio_record_jni.h", - "android/audio_track_jni.cc", - "android/audio_track_jni.h", - "android/build_info.cc", - "android/build_info.h", - "android/opensles_common.cc", - "android/opensles_common.h", - "android/opensles_player.cc", - "android/opensles_player.h", - "android/opensles_recorder.cc", - "android/opensles_recorder.h", - ] - libs = [ - "log", - "OpenSLES", - ] - if (rtc_enable_android_aaudio) { - sources += [ - "android/aaudio_player.cc", - "android/aaudio_player.h", - "android/aaudio_recorder.cc", - "android/aaudio_recorder.h", - "android/aaudio_wrapper.cc", - "android/aaudio_wrapper.h", - ] - libs += [ "aaudio" ] - } + deps += [ "../../sdk/android:native_api_audio_device_module" ] if (build_with_mozilla) { include_dirs += [ @@ -401,11 +420,10 @@ if (!build_with_mozilla) { # See Bug 1820869. } if (!build_with_chromium) { - sources += [ - # Do not link these into Chrome since they contain static data. - "dummy/file_audio_device_factory.cc", - "dummy/file_audio_device_factory.h", - ] + deps += [ ":file_audio_device" ] + + # TODO(titovartem): remove after downstream is fixed + sources += [ "dummy/file_audio_device_factory.h" ] } } } @@ -453,10 +471,12 @@ if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { sources = [ "fine_audio_buffer_unittest.cc", "include/test_audio_device_unittest.cc", + "test_audio_device_impl_test.cc", ] deps = [ ":audio_device", ":audio_device_buffer", + ":audio_device_generic", ":audio_device_impl", ":mock_audio_device", ":test_audio_device_module", @@ -465,6 +485,8 @@ if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { "../../api:sequence_checker", "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", + "../../api/units:time_delta", + "../../api/units:timestamp", "../../common_audio", "../../rtc_base:buffer", "../../rtc_base:checks", @@ -479,6 +501,7 @@ if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { "../../system_wrappers", "../../test:fileutils", "../../test:test_support", + "../../test/time_controller", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings", @@ -497,12 +520,6 @@ if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { ] } if (is_android) { - sources += [ - "android/audio_device_unittest.cc", - "android/audio_manager_unittest.cc", - "android/ensure_initialized.cc", - "android/ensure_initialized.h", - ] deps += [ "../../sdk/android:internal_jni", "../../sdk/android:libjingle_peerconnection_java", @@ -516,20 +533,3 @@ if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { } } } - -if ((!build_with_chromium && !build_with_mozilla) && is_android) { - rtc_android_library("audio_device_java") { - sources = [ - "android/java/src/org/webrtc/voiceengine/BuildInfo.java", - "android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java", - "android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java", - "android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java", - "android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java", - "android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java", - ] - deps = [ - "../../rtc_base:base_java", - "//third_party/androidx:androidx_annotation_annotation_java", - ] - } -} diff --git a/third_party/libwebrtc/modules/audio_device/DEPS b/third_party/libwebrtc/modules/audio_device/DEPS index 9cc627d33049..b0571deb0eaa 100644 --- a/third_party/libwebrtc/modules/audio_device/DEPS +++ b/third_party/libwebrtc/modules/audio_device/DEPS @@ -9,5 +9,6 @@ specific_include_rules = { ], "audio_device_impl\.cc": [ "+sdk/objc", + "+sdk/android", ], } diff --git a/third_party/libwebrtc/modules/audio_device/android/aaudio_player.cc b/third_party/libwebrtc/modules/audio_device/android/aaudio_player.cc deleted file mode 100644 index 81e5bf54278f..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/aaudio_player.cc +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/aaudio_player.h" - -#include - -#include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/fine_audio_buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -AAudioPlayer::AAudioPlayer(AudioManager* audio_manager) - : main_thread_(TaskQueueBase::Current()), - aaudio_(audio_manager, AAUDIO_DIRECTION_OUTPUT, this) { - RTC_LOG(LS_INFO) << "ctor"; - thread_checker_aaudio_.Detach(); -} - -AAudioPlayer::~AAudioPlayer() { - RTC_LOG(LS_INFO) << "dtor"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - Terminate(); - RTC_LOG(LS_INFO) << "#detected underruns: " << underrun_count_; -} - -int AAudioPlayer::Init() { - RTC_LOG(LS_INFO) << "Init"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - if (aaudio_.audio_parameters().channels() == 2) { - RTC_DLOG(LS_WARNING) << "Stereo mode is enabled"; - } - return 0; -} - -int AAudioPlayer::Terminate() { - RTC_LOG(LS_INFO) << "Terminate"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - StopPlayout(); - return 0; -} - -int AAudioPlayer::InitPlayout() { - RTC_LOG(LS_INFO) << "InitPlayout"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - RTC_DCHECK(!initialized_); - RTC_DCHECK(!playing_); - if (!aaudio_.Init()) { - return -1; - } - initialized_ = true; - return 0; -} - -bool AAudioPlayer::PlayoutIsInitialized() const { - RTC_DCHECK_RUN_ON(&main_thread_checker_); - return initialized_; -} - -int AAudioPlayer::StartPlayout() { - RTC_LOG(LS_INFO) << "StartPlayout"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - RTC_DCHECK(!playing_); - if (!initialized_) { - RTC_DLOG(LS_WARNING) - << "Playout can not start since InitPlayout must succeed first"; - return 0; - } - if (fine_audio_buffer_) { - fine_audio_buffer_->ResetPlayout(); - } - if (!aaudio_.Start()) { - return -1; - } - underrun_count_ = aaudio_.xrun_count(); - first_data_callback_ = true; - playing_ = true; - return 0; -} - -int AAudioPlayer::StopPlayout() { - RTC_LOG(LS_INFO) << "StopPlayout"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - if (!initialized_ || !playing_) { - return 0; - } - if (!aaudio_.Stop()) { - RTC_LOG(LS_ERROR) << "StopPlayout failed"; - return -1; - } - thread_checker_aaudio_.Detach(); - initialized_ = false; - playing_ = false; - return 0; -} - -bool AAudioPlayer::Playing() const { - RTC_DCHECK_RUN_ON(&main_thread_checker_); - return playing_; -} - -void AAudioPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { - RTC_DLOG(LS_INFO) << "AttachAudioBuffer"; - RTC_DCHECK_RUN_ON(&main_thread_checker_); - audio_device_buffer_ = audioBuffer; - const AudioParameters audio_parameters = aaudio_.audio_parameters(); - audio_device_buffer_->SetPlayoutSampleRate(audio_parameters.sample_rate()); - audio_device_buffer_->SetPlayoutChannels(audio_parameters.channels()); - RTC_CHECK(audio_device_buffer_); - // Create a modified audio buffer class which allows us to ask for any number - // of samples (and not only multiple of 10ms) to match the optimal buffer - // size per callback used by AAudio. - fine_audio_buffer_ = std::make_unique(audio_device_buffer_); -} - -int AAudioPlayer::SpeakerVolumeIsAvailable(bool& available) { - available = false; - return 0; -} - -void AAudioPlayer::OnErrorCallback(aaudio_result_t error) { - RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error); - // TODO(henrika): investigate if we can use a thread checker here. Initial - // tests shows that this callback can sometimes be called on a unique thread - // but according to the documentation it should be on the same thread as the - // data callback. - // RTC_DCHECK_RUN_ON(&thread_checker_aaudio_); - if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) { - // The stream is disconnected and any attempt to use it will return - // AAUDIO_ERROR_DISCONNECTED. - RTC_LOG(LS_WARNING) << "Output stream disconnected"; - // AAudio documentation states: "You should not close or reopen the stream - // from the callback, use another thread instead". A message is therefore - // sent to the main thread to do the restart operation. - RTC_DCHECK(main_thread_); - main_thread_->PostTask([this] { HandleStreamDisconnected(); }); - } -} - -aaudio_data_callback_result_t AAudioPlayer::OnDataCallback(void* audio_data, - int32_t num_frames) { - RTC_DCHECK_RUN_ON(&thread_checker_aaudio_); - // Log device id in first data callback to ensure that a valid device is - // utilized. - if (first_data_callback_) { - RTC_LOG(LS_INFO) << "--- First output data callback: " - "device id=" - << aaudio_.device_id(); - first_data_callback_ = false; - } - - // Check if the underrun count has increased. If it has, increase the buffer - // size by adding the size of a burst. It will reduce the risk of underruns - // at the expense of an increased latency. - // TODO(henrika): enable possibility to disable and/or tune the algorithm. - const int32_t underrun_count = aaudio_.xrun_count(); - if (underrun_count > underrun_count_) { - RTC_LOG(LS_ERROR) << "Underrun detected: " << underrun_count; - underrun_count_ = underrun_count; - aaudio_.IncreaseOutputBufferSize(); - } - - // Estimate latency between writing an audio frame to the output stream and - // the time that same frame is played out on the output audio device. - latency_millis_ = aaudio_.EstimateLatencyMillis(); - // TODO(henrika): use for development only. - if (aaudio_.frames_written() % (1000 * aaudio_.frames_per_burst()) == 0) { - RTC_DLOG(LS_INFO) << "output latency: " << latency_millis_ - << ", num_frames: " << num_frames; - } - - // Read audio data from the WebRTC source using the FineAudioBuffer object - // and write that data into `audio_data` to be played out by AAudio. - // Prime output with zeros during a short initial phase to avoid distortion. - // TODO(henrika): do more work to figure out of if the initial forced silence - // period is really needed. - if (aaudio_.frames_written() < 50 * aaudio_.frames_per_burst()) { - const size_t num_bytes = - sizeof(int16_t) * aaudio_.samples_per_frame() * num_frames; - memset(audio_data, 0, num_bytes); - } else { - fine_audio_buffer_->GetPlayoutData( - rtc::MakeArrayView(static_cast(audio_data), - aaudio_.samples_per_frame() * num_frames), - static_cast(latency_millis_ + 0.5)); - } - - // TODO(henrika): possibly add trace here to be included in systrace. - // See https://developer.android.com/studio/profile/systrace-commandline.html. - return AAUDIO_CALLBACK_RESULT_CONTINUE; -} - -void AAudioPlayer::HandleStreamDisconnected() { - RTC_DCHECK_RUN_ON(&main_thread_checker_); - RTC_DLOG(LS_INFO) << "HandleStreamDisconnected"; - if (!initialized_ || !playing_) { - return; - } - // Perform a restart by first closing the disconnected stream and then start - // a new stream; this time using the new (preferred) audio output device. - StopPlayout(); - InitPlayout(); - StartPlayout(); -} -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/aaudio_player.h b/third_party/libwebrtc/modules/audio_device/android/aaudio_player.h deleted file mode 100644 index ea5d57809260..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/aaudio_player.h +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_PLAYER_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_PLAYER_H_ - -#include - -#include - -#include "api/sequence_checker.h" -#include "api/task_queue/task_queue_base.h" -#include "modules/audio_device/android/aaudio_wrapper.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { - -class AudioDeviceBuffer; -class FineAudioBuffer; -class AudioManager; - -// Implements low-latency 16-bit mono PCM audio output support for Android -// using the C based AAudio API. -// -// An instance must be created and destroyed on one and the same thread. -// All public methods must also be called on the same thread. A thread checker -// will DCHECK if any method is called on an invalid thread. Audio buffers -// are requested on a dedicated high-priority thread owned by AAudio. -// -// The existing design forces the user to call InitPlayout() after StopPlayout() -// to be able to call StartPlayout() again. This is in line with how the Java- -// based implementation works. -// -// An audio stream can be disconnected, e.g. when an audio device is removed. -// This implementation will restart the audio stream using the new preferred -// device if such an event happens. -// -// Also supports automatic buffer-size adjustment based on underrun detections -// where the internal AAudio buffer can be increased when needed. It will -// reduce the risk of underruns (~glitches) at the expense of an increased -// latency. -class AAudioPlayer final : public AAudioObserverInterface { - public: - explicit AAudioPlayer(AudioManager* audio_manager); - ~AAudioPlayer(); - - int Init(); - int Terminate(); - - int InitPlayout(); - bool PlayoutIsInitialized() const; - - int StartPlayout(); - int StopPlayout(); - bool Playing() const; - - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - // Not implemented in AAudio. - int SpeakerVolumeIsAvailable(bool& available); // NOLINT - int SetSpeakerVolume(uint32_t volume) { return -1; } - int SpeakerVolume(uint32_t& volume) const { return -1; } // NOLINT - int MaxSpeakerVolume(uint32_t& maxVolume) const { return -1; } // NOLINT - int MinSpeakerVolume(uint32_t& minVolume) const { return -1; } // NOLINT - - protected: - // AAudioObserverInterface implementation. - - // For an output stream, this function should render and write `num_frames` - // of data in the streams current data format to the `audio_data` buffer. - // Called on a real-time thread owned by AAudio. - aaudio_data_callback_result_t OnDataCallback(void* audio_data, - int32_t num_frames) override; - // AAudio calls this functions if any error occurs on a callback thread. - // Called on a real-time thread owned by AAudio. - void OnErrorCallback(aaudio_result_t error) override; - - private: - // Closes the existing stream and starts a new stream. - void HandleStreamDisconnected(); - - // Ensures that methods are called from the same thread as this object is - // created on. - SequenceChecker main_thread_checker_; - - // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a - // real-time thread owned by AAudio. Detached during construction of this - // object. - SequenceChecker thread_checker_aaudio_; - - // The task queue on which this object is created on. - TaskQueueBase* main_thread_; - - // Wraps all AAudio resources. Contains an output stream using the default - // output audio device. Can be accessed on both the main thread and the - // real-time thread owned by AAudio. See separate AAudio documentation about - // thread safety. - AAudioWrapper aaudio_; - - // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data - // in chunks of 10ms. It then allows for this data to be pulled in - // a finer or coarser granularity. I.e. interacting with this class instead - // of directly with the AudioDeviceBuffer one can ask for any number of - // audio data samples. - // Example: native buffer size can be 192 audio frames at 48kHz sample rate. - // WebRTC will provide 480 audio frames per 10ms but AAudio asks for 192 - // in each callback (once every 4th ms). This class can then ask for 192 and - // the FineAudioBuffer will ask WebRTC for new data approximately only every - // second callback and also cache non-utilized audio. - std::unique_ptr fine_audio_buffer_; - - // Counts number of detected underrun events reported by AAudio. - int32_t underrun_count_ = 0; - - // True only for the first data callback in each audio session. - bool first_data_callback_ = true; - - // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the - // AudioDeviceModuleImpl class and set by AudioDeviceModule::Create(). - AudioDeviceBuffer* audio_device_buffer_ RTC_GUARDED_BY(main_thread_checker_) = - nullptr; - - bool initialized_ RTC_GUARDED_BY(main_thread_checker_) = false; - bool playing_ RTC_GUARDED_BY(main_thread_checker_) = false; - - // Estimated latency between writing an audio frame to the output stream and - // the time that same frame is played out on the output audio device. - double latency_millis_ RTC_GUARDED_BY(thread_checker_aaudio_) = 0; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_PLAYER_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/aaudio_recorder.cc b/third_party/libwebrtc/modules/audio_device/android/aaudio_recorder.cc deleted file mode 100644 index 21e5dd8a749c..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/aaudio_recorder.cc +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/aaudio_recorder.h" - -#include - -#include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/fine_audio_buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -AAudioRecorder::AAudioRecorder(AudioManager* audio_manager) - : main_thread_(TaskQueueBase::Current()), - aaudio_(audio_manager, AAUDIO_DIRECTION_INPUT, this) { - RTC_LOG(LS_INFO) << "ctor"; - thread_checker_aaudio_.Detach(); -} - -AAudioRecorder::~AAudioRecorder() { - RTC_LOG(LS_INFO) << "dtor"; - RTC_DCHECK(thread_checker_.IsCurrent()); - Terminate(); - RTC_LOG(LS_INFO) << "detected owerflows: " << overflow_count_; -} - -int AAudioRecorder::Init() { - RTC_LOG(LS_INFO) << "Init"; - RTC_DCHECK(thread_checker_.IsCurrent()); - if (aaudio_.audio_parameters().channels() == 2) { - RTC_DLOG(LS_WARNING) << "Stereo mode is enabled"; - } - return 0; -} - -int AAudioRecorder::Terminate() { - RTC_LOG(LS_INFO) << "Terminate"; - RTC_DCHECK(thread_checker_.IsCurrent()); - StopRecording(); - return 0; -} - -int AAudioRecorder::InitRecording() { - RTC_LOG(LS_INFO) << "InitRecording"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - RTC_DCHECK(!recording_); - if (!aaudio_.Init()) { - return -1; - } - initialized_ = true; - return 0; -} - -int AAudioRecorder::StartRecording() { - RTC_LOG(LS_INFO) << "StartRecording"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(initialized_); - RTC_DCHECK(!recording_); - if (fine_audio_buffer_) { - fine_audio_buffer_->ResetPlayout(); - } - if (!aaudio_.Start()) { - return -1; - } - overflow_count_ = aaudio_.xrun_count(); - first_data_callback_ = true; - recording_ = true; - return 0; -} - -int AAudioRecorder::StopRecording() { - RTC_LOG(LS_INFO) << "StopRecording"; - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_ || !recording_) { - return 0; - } - if (!aaudio_.Stop()) { - return -1; - } - thread_checker_aaudio_.Detach(); - initialized_ = false; - recording_ = false; - return 0; -} - -void AAudioRecorder::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { - RTC_LOG(LS_INFO) << "AttachAudioBuffer"; - RTC_DCHECK(thread_checker_.IsCurrent()); - audio_device_buffer_ = audioBuffer; - const AudioParameters audio_parameters = aaudio_.audio_parameters(); - audio_device_buffer_->SetRecordingSampleRate(audio_parameters.sample_rate()); - audio_device_buffer_->SetRecordingChannels(audio_parameters.channels()); - RTC_CHECK(audio_device_buffer_); - // Create a modified audio buffer class which allows us to deliver any number - // of samples (and not only multiples of 10ms which WebRTC uses) to match the - // native AAudio buffer size. - fine_audio_buffer_ = std::make_unique(audio_device_buffer_); -} - -int AAudioRecorder::EnableBuiltInAEC(bool enable) { - RTC_LOG(LS_INFO) << "EnableBuiltInAEC: " << enable; - RTC_LOG(LS_ERROR) << "Not implemented"; - return -1; -} - -int AAudioRecorder::EnableBuiltInAGC(bool enable) { - RTC_LOG(LS_INFO) << "EnableBuiltInAGC: " << enable; - RTC_LOG(LS_ERROR) << "Not implemented"; - return -1; -} - -int AAudioRecorder::EnableBuiltInNS(bool enable) { - RTC_LOG(LS_INFO) << "EnableBuiltInNS: " << enable; - RTC_LOG(LS_ERROR) << "Not implemented"; - return -1; -} - -void AAudioRecorder::OnErrorCallback(aaudio_result_t error) { - RTC_LOG(LS_ERROR) << "OnErrorCallback: " << AAudio_convertResultToText(error); - // RTC_DCHECK(thread_checker_aaudio_.IsCurrent()); - if (aaudio_.stream_state() == AAUDIO_STREAM_STATE_DISCONNECTED) { - // The stream is disconnected and any attempt to use it will return - // AAUDIO_ERROR_DISCONNECTED.. - RTC_LOG(LS_WARNING) << "Input stream disconnected => restart is required"; - // AAudio documentation states: "You should not close or reopen the stream - // from the callback, use another thread instead". A message is therefore - // sent to the main thread to do the restart operation. - RTC_DCHECK(main_thread_); - main_thread_->PostTask([this] { HandleStreamDisconnected(); }); - } -} - -// Read and process `num_frames` of data from the `audio_data` buffer. -// TODO(henrika): possibly add trace here to be included in systrace. -// See https://developer.android.com/studio/profile/systrace-commandline.html. -aaudio_data_callback_result_t AAudioRecorder::OnDataCallback( - void* audio_data, - int32_t num_frames) { - // TODO(henrika): figure out why we sometimes hit this one. - // RTC_DCHECK(thread_checker_aaudio_.IsCurrent()); - // RTC_LOG(LS_INFO) << "OnDataCallback: " << num_frames; - // Drain the input buffer at first callback to ensure that it does not - // contain any old data. Will also ensure that the lowest possible latency - // is obtained. - if (first_data_callback_) { - RTC_LOG(LS_INFO) << "--- First input data callback: " - "device id=" - << aaudio_.device_id(); - aaudio_.ClearInputStream(audio_data, num_frames); - first_data_callback_ = false; - } - // Check if the overflow counter has increased and if so log a warning. - // TODO(henrika): possible add UMA stat or capacity extension. - const int32_t overflow_count = aaudio_.xrun_count(); - if (overflow_count > overflow_count_) { - RTC_LOG(LS_ERROR) << "Overflow detected: " << overflow_count; - overflow_count_ = overflow_count; - } - // Estimated time between an audio frame was recorded by the input device and - // it can read on the input stream. - latency_millis_ = aaudio_.EstimateLatencyMillis(); - // TODO(henrika): use for development only. - if (aaudio_.frames_read() % (1000 * aaudio_.frames_per_burst()) == 0) { - RTC_DLOG(LS_INFO) << "input latency: " << latency_millis_ - << ", num_frames: " << num_frames; - } - // Copy recorded audio in `audio_data` to the WebRTC sink using the - // FineAudioBuffer object. - fine_audio_buffer_->DeliverRecordedData( - rtc::MakeArrayView(static_cast(audio_data), - aaudio_.samples_per_frame() * num_frames), - static_cast(latency_millis_ + 0.5)); - - return AAUDIO_CALLBACK_RESULT_CONTINUE; -} - -void AAudioRecorder::HandleStreamDisconnected() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_LOG(LS_INFO) << "HandleStreamDisconnected"; - if (!initialized_ || !recording_) { - return; - } - // Perform a restart by first closing the disconnected stream and then start - // a new stream; this time using the new (preferred) audio input device. - // TODO(henrika): resolve issue where a one restart attempt leads to a long - // sequence of new calls to OnErrorCallback(). - // See b/73148976 for details. - StopRecording(); - InitRecording(); - StartRecording(); -} -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/aaudio_recorder.h b/third_party/libwebrtc/modules/audio_device/android/aaudio_recorder.h deleted file mode 100644 index 6df7eed07639..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/aaudio_recorder.h +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_RECORDER_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_RECORDER_H_ - -#include - -#include - -#include "api/sequence_checker.h" -#include "api/task_queue/task_queue_base.h" -#include "modules/audio_device/android/aaudio_wrapper.h" -#include "modules/audio_device/include/audio_device_defines.h" - -namespace webrtc { - -class AudioDeviceBuffer; -class FineAudioBuffer; -class AudioManager; - -// Implements low-latency 16-bit mono PCM audio input support for Android -// using the C based AAudio API. -// -// An instance must be created and destroyed on one and the same thread. -// All public methods must also be called on the same thread. A thread checker -// will RTC_DCHECK if any method is called on an invalid thread. Audio buffers -// are delivered on a dedicated high-priority thread owned by AAudio. -// -// The existing design forces the user to call InitRecording() after -// StopRecording() to be able to call StartRecording() again. This is in line -// with how the Java- based implementation works. -// -// TODO(henrika): add comments about device changes and adaptive buffer -// management. -class AAudioRecorder : public AAudioObserverInterface { - public: - explicit AAudioRecorder(AudioManager* audio_manager); - ~AAudioRecorder(); - - int Init(); - int Terminate(); - - int InitRecording(); - bool RecordingIsInitialized() const { return initialized_; } - - int StartRecording(); - int StopRecording(); - bool Recording() const { return recording_; } - - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - double latency_millis() const { return latency_millis_; } - - // TODO(henrika): add support using AAudio APIs when available. - int EnableBuiltInAEC(bool enable); - int EnableBuiltInAGC(bool enable); - int EnableBuiltInNS(bool enable); - - protected: - // AAudioObserverInterface implementation. - - // For an input stream, this function should read `num_frames` of recorded - // data, in the stream's current data format, from the `audio_data` buffer. - // Called on a real-time thread owned by AAudio. - aaudio_data_callback_result_t OnDataCallback(void* audio_data, - int32_t num_frames) override; - - // AAudio calls this function if any error occurs on a callback thread. - // Called on a real-time thread owned by AAudio. - void OnErrorCallback(aaudio_result_t error) override; - - private: - // Closes the existing stream and starts a new stream. - void HandleStreamDisconnected(); - - // Ensures that methods are called from the same thread as this object is - // created on. - SequenceChecker thread_checker_; - - // Stores thread ID in first call to AAudioPlayer::OnDataCallback from a - // real-time thread owned by AAudio. Detached during construction of this - // object. - SequenceChecker thread_checker_aaudio_; - - // The thread on which this object is created on. - TaskQueueBase* main_thread_; - - // Wraps all AAudio resources. Contains an input stream using the default - // input audio device. - AAudioWrapper aaudio_; - - // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the - // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). - AudioDeviceBuffer* audio_device_buffer_ = nullptr; - - bool initialized_ = false; - bool recording_ = false; - - // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms - // chunks of audio. - std::unique_ptr fine_audio_buffer_; - - // Counts number of detected overflow events reported by AAudio. - int32_t overflow_count_ = 0; - - // Estimated time between an audio frame was recorded by the input device and - // it can read on the input stream. - double latency_millis_ = 0; - - // True only for the first data callback in each audio session. - bool first_data_callback_ = true; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_RECORDER_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/aaudio_wrapper.cc b/third_party/libwebrtc/modules/audio_device/android/aaudio_wrapper.cc deleted file mode 100644 index 3d824b5c57c0..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/aaudio_wrapper.cc +++ /dev/null @@ -1,499 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/aaudio_wrapper.h" - -#include "modules/audio_device/android/audio_manager.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" - -#define LOG_ON_ERROR(op) \ - do { \ - aaudio_result_t result = (op); \ - if (result != AAUDIO_OK) { \ - RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \ - } \ - } while (0) - -#define RETURN_ON_ERROR(op, ...) \ - do { \ - aaudio_result_t result = (op); \ - if (result != AAUDIO_OK) { \ - RTC_LOG(LS_ERROR) << #op << ": " << AAudio_convertResultToText(result); \ - return __VA_ARGS__; \ - } \ - } while (0) - -namespace webrtc { - -namespace { - -const char* DirectionToString(aaudio_direction_t direction) { - switch (direction) { - case AAUDIO_DIRECTION_OUTPUT: - return "OUTPUT"; - case AAUDIO_DIRECTION_INPUT: - return "INPUT"; - default: - return "UNKNOWN"; - } -} - -const char* SharingModeToString(aaudio_sharing_mode_t mode) { - switch (mode) { - case AAUDIO_SHARING_MODE_EXCLUSIVE: - return "EXCLUSIVE"; - case AAUDIO_SHARING_MODE_SHARED: - return "SHARED"; - default: - return "UNKNOWN"; - } -} - -const char* PerformanceModeToString(aaudio_performance_mode_t mode) { - switch (mode) { - case AAUDIO_PERFORMANCE_MODE_NONE: - return "NONE"; - case AAUDIO_PERFORMANCE_MODE_POWER_SAVING: - return "POWER_SAVING"; - case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY: - return "LOW_LATENCY"; - default: - return "UNKNOWN"; - } -} - -const char* FormatToString(int32_t id) { - switch (id) { - case AAUDIO_FORMAT_INVALID: - return "INVALID"; - case AAUDIO_FORMAT_UNSPECIFIED: - return "UNSPECIFIED"; - case AAUDIO_FORMAT_PCM_I16: - return "PCM_I16"; - case AAUDIO_FORMAT_PCM_FLOAT: - return "FLOAT"; - default: - return "UNKNOWN"; - } -} - -void ErrorCallback(AAudioStream* stream, - void* user_data, - aaudio_result_t error) { - RTC_DCHECK(user_data); - AAudioWrapper* aaudio_wrapper = reinterpret_cast(user_data); - RTC_LOG(LS_WARNING) << "ErrorCallback: " - << DirectionToString(aaudio_wrapper->direction()); - RTC_DCHECK(aaudio_wrapper->observer()); - aaudio_wrapper->observer()->OnErrorCallback(error); -} - -aaudio_data_callback_result_t DataCallback(AAudioStream* stream, - void* user_data, - void* audio_data, - int32_t num_frames) { - RTC_DCHECK(user_data); - RTC_DCHECK(audio_data); - AAudioWrapper* aaudio_wrapper = reinterpret_cast(user_data); - RTC_DCHECK(aaudio_wrapper->observer()); - return aaudio_wrapper->observer()->OnDataCallback(audio_data, num_frames); -} - -// Wraps the stream builder object to ensure that it is released properly when -// the stream builder goes out of scope. -class ScopedStreamBuilder { - public: - ScopedStreamBuilder() { - LOG_ON_ERROR(AAudio_createStreamBuilder(&builder_)); - RTC_DCHECK(builder_); - } - ~ScopedStreamBuilder() { - if (builder_) { - LOG_ON_ERROR(AAudioStreamBuilder_delete(builder_)); - } - } - - AAudioStreamBuilder* get() const { return builder_; } - - private: - AAudioStreamBuilder* builder_ = nullptr; -}; - -} // namespace - -AAudioWrapper::AAudioWrapper(AudioManager* audio_manager, - aaudio_direction_t direction, - AAudioObserverInterface* observer) - : direction_(direction), observer_(observer) { - RTC_LOG(LS_INFO) << "ctor"; - RTC_DCHECK(observer_); - direction_ == AAUDIO_DIRECTION_OUTPUT - ? audio_parameters_ = audio_manager->GetPlayoutAudioParameters() - : audio_parameters_ = audio_manager->GetRecordAudioParameters(); - aaudio_thread_checker_.Detach(); - RTC_LOG(LS_INFO) << audio_parameters_.ToString(); -} - -AAudioWrapper::~AAudioWrapper() { - RTC_LOG(LS_INFO) << "dtor"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!stream_); -} - -bool AAudioWrapper::Init() { - RTC_LOG(LS_INFO) << "Init"; - RTC_DCHECK(thread_checker_.IsCurrent()); - // Creates a stream builder which can be used to open an audio stream. - ScopedStreamBuilder builder; - // Configures the stream builder using audio parameters given at construction. - SetStreamConfiguration(builder.get()); - // Opens a stream based on options in the stream builder. - if (!OpenStream(builder.get())) { - return false; - } - // Ensures that the opened stream could activate the requested settings. - if (!VerifyStreamConfiguration()) { - return false; - } - // Optimizes the buffer scheme for lowest possible latency and creates - // additional buffer logic to match the 10ms buffer size used in WebRTC. - if (!OptimizeBuffers()) { - return false; - } - LogStreamState(); - return true; -} - -bool AAudioWrapper::Start() { - RTC_LOG(LS_INFO) << "Start"; - RTC_DCHECK(thread_checker_.IsCurrent()); - // TODO(henrika): this state check might not be needed. - aaudio_stream_state_t current_state = AAudioStream_getState(stream_); - if (current_state != AAUDIO_STREAM_STATE_OPEN) { - RTC_LOG(LS_ERROR) << "Invalid state: " - << AAudio_convertStreamStateToText(current_state); - return false; - } - // Asynchronous request for the stream to start. - RETURN_ON_ERROR(AAudioStream_requestStart(stream_), false); - LogStreamState(); - return true; -} - -bool AAudioWrapper::Stop() { - RTC_LOG(LS_INFO) << "Stop: " << DirectionToString(direction()); - RTC_DCHECK(thread_checker_.IsCurrent()); - // Asynchronous request for the stream to stop. - RETURN_ON_ERROR(AAudioStream_requestStop(stream_), false); - CloseStream(); - aaudio_thread_checker_.Detach(); - return true; -} - -double AAudioWrapper::EstimateLatencyMillis() const { - RTC_DCHECK(stream_); - double latency_millis = 0.0; - if (direction() == AAUDIO_DIRECTION_INPUT) { - // For input streams. Best guess we can do is to use the current burst size - // as delay estimate. - latency_millis = static_cast(frames_per_burst()) / sample_rate() * - rtc::kNumMillisecsPerSec; - } else { - int64_t existing_frame_index; - int64_t existing_frame_presentation_time; - // Get the time at which a particular frame was presented to audio hardware. - aaudio_result_t result = AAudioStream_getTimestamp( - stream_, CLOCK_MONOTONIC, &existing_frame_index, - &existing_frame_presentation_time); - // Results are only valid when the stream is in AAUDIO_STREAM_STATE_STARTED. - if (result == AAUDIO_OK) { - // Get write index for next audio frame. - int64_t next_frame_index = frames_written(); - // Number of frames between next frame and the existing frame. - int64_t frame_index_delta = next_frame_index - existing_frame_index; - // Assume the next frame will be written now. - int64_t next_frame_write_time = rtc::TimeNanos(); - // Calculate time when next frame will be presented to the hardware taking - // sample rate into account. - int64_t frame_time_delta = - (frame_index_delta * rtc::kNumNanosecsPerSec) / sample_rate(); - int64_t next_frame_presentation_time = - existing_frame_presentation_time + frame_time_delta; - // Derive a latency estimate given results above. - latency_millis = static_cast(next_frame_presentation_time - - next_frame_write_time) / - rtc::kNumNanosecsPerMillisec; - } - } - return latency_millis; -} - -// Returns new buffer size or a negative error value if buffer size could not -// be increased. -bool AAudioWrapper::IncreaseOutputBufferSize() { - RTC_LOG(LS_INFO) << "IncreaseBufferSize"; - RTC_DCHECK(stream_); - RTC_DCHECK(aaudio_thread_checker_.IsCurrent()); - RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_OUTPUT); - aaudio_result_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_); - // Try to increase size of buffer with one burst to reduce risk of underrun. - buffer_size += frames_per_burst(); - // Verify that the new buffer size is not larger than max capacity. - // TODO(henrika): keep track of case when we reach the capacity limit. - const int32_t max_buffer_size = buffer_capacity_in_frames(); - if (buffer_size > max_buffer_size) { - RTC_LOG(LS_ERROR) << "Required buffer size (" << buffer_size - << ") is higher than max: " << max_buffer_size; - return false; - } - RTC_LOG(LS_INFO) << "Updating buffer size to: " << buffer_size - << " (max=" << max_buffer_size << ")"; - buffer_size = AAudioStream_setBufferSizeInFrames(stream_, buffer_size); - if (buffer_size < 0) { - RTC_LOG(LS_ERROR) << "Failed to change buffer size: " - << AAudio_convertResultToText(buffer_size); - return false; - } - RTC_LOG(LS_INFO) << "Buffer size changed to: " << buffer_size; - return true; -} - -void AAudioWrapper::ClearInputStream(void* audio_data, int32_t num_frames) { - RTC_LOG(LS_INFO) << "ClearInputStream"; - RTC_DCHECK(stream_); - RTC_DCHECK(aaudio_thread_checker_.IsCurrent()); - RTC_DCHECK_EQ(direction(), AAUDIO_DIRECTION_INPUT); - aaudio_result_t cleared_frames = 0; - do { - cleared_frames = AAudioStream_read(stream_, audio_data, num_frames, 0); - } while (cleared_frames > 0); -} - -AAudioObserverInterface* AAudioWrapper::observer() const { - return observer_; -} - -AudioParameters AAudioWrapper::audio_parameters() const { - return audio_parameters_; -} - -int32_t AAudioWrapper::samples_per_frame() const { - RTC_DCHECK(stream_); - return AAudioStream_getSamplesPerFrame(stream_); -} - -int32_t AAudioWrapper::buffer_size_in_frames() const { - RTC_DCHECK(stream_); - return AAudioStream_getBufferSizeInFrames(stream_); -} - -int32_t AAudioWrapper::buffer_capacity_in_frames() const { - RTC_DCHECK(stream_); - return AAudioStream_getBufferCapacityInFrames(stream_); -} - -int32_t AAudioWrapper::device_id() const { - RTC_DCHECK(stream_); - return AAudioStream_getDeviceId(stream_); -} - -int32_t AAudioWrapper::xrun_count() const { - RTC_DCHECK(stream_); - return AAudioStream_getXRunCount(stream_); -} - -int32_t AAudioWrapper::format() const { - RTC_DCHECK(stream_); - return AAudioStream_getFormat(stream_); -} - -int32_t AAudioWrapper::sample_rate() const { - RTC_DCHECK(stream_); - return AAudioStream_getSampleRate(stream_); -} - -int32_t AAudioWrapper::channel_count() const { - RTC_DCHECK(stream_); - return AAudioStream_getChannelCount(stream_); -} - -int32_t AAudioWrapper::frames_per_callback() const { - RTC_DCHECK(stream_); - return AAudioStream_getFramesPerDataCallback(stream_); -} - -aaudio_sharing_mode_t AAudioWrapper::sharing_mode() const { - RTC_DCHECK(stream_); - return AAudioStream_getSharingMode(stream_); -} - -aaudio_performance_mode_t AAudioWrapper::performance_mode() const { - RTC_DCHECK(stream_); - return AAudioStream_getPerformanceMode(stream_); -} - -aaudio_stream_state_t AAudioWrapper::stream_state() const { - RTC_DCHECK(stream_); - return AAudioStream_getState(stream_); -} - -int64_t AAudioWrapper::frames_written() const { - RTC_DCHECK(stream_); - return AAudioStream_getFramesWritten(stream_); -} - -int64_t AAudioWrapper::frames_read() const { - RTC_DCHECK(stream_); - return AAudioStream_getFramesRead(stream_); -} - -void AAudioWrapper::SetStreamConfiguration(AAudioStreamBuilder* builder) { - RTC_LOG(LS_INFO) << "SetStreamConfiguration"; - RTC_DCHECK(builder); - RTC_DCHECK(thread_checker_.IsCurrent()); - // Request usage of default primary output/input device. - // TODO(henrika): verify that default device follows Java APIs. - // https://developer.android.com/reference/android/media/AudioDeviceInfo.html. - AAudioStreamBuilder_setDeviceId(builder, AAUDIO_UNSPECIFIED); - // Use preferred sample rate given by the audio parameters. - AAudioStreamBuilder_setSampleRate(builder, audio_parameters().sample_rate()); - // Use preferred channel configuration given by the audio parameters. - AAudioStreamBuilder_setChannelCount(builder, audio_parameters().channels()); - // Always use 16-bit PCM audio sample format. - AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_I16); - // TODO(henrika): investigate effect of using AAUDIO_SHARING_MODE_EXCLUSIVE. - // Ask for exclusive mode since this will give us the lowest possible latency. - // If exclusive mode isn't available, shared mode will be used instead. - AAudioStreamBuilder_setSharingMode(builder, AAUDIO_SHARING_MODE_SHARED); - // Use the direction that was given at construction. - AAudioStreamBuilder_setDirection(builder, direction_); - // TODO(henrika): investigate performance using different performance modes. - AAudioStreamBuilder_setPerformanceMode(builder, - AAUDIO_PERFORMANCE_MODE_LOW_LATENCY); - // Given that WebRTC applications require low latency, our audio stream uses - // an asynchronous callback function to transfer data to and from the - // application. AAudio executes the callback in a higher-priority thread that - // has better performance. - AAudioStreamBuilder_setDataCallback(builder, DataCallback, this); - // Request that AAudio calls this functions if any error occurs on a callback - // thread. - AAudioStreamBuilder_setErrorCallback(builder, ErrorCallback, this); -} - -bool AAudioWrapper::OpenStream(AAudioStreamBuilder* builder) { - RTC_LOG(LS_INFO) << "OpenStream"; - RTC_DCHECK(builder); - AAudioStream* stream = nullptr; - RETURN_ON_ERROR(AAudioStreamBuilder_openStream(builder, &stream), false); - stream_ = stream; - LogStreamConfiguration(); - return true; -} - -void AAudioWrapper::CloseStream() { - RTC_LOG(LS_INFO) << "CloseStream"; - RTC_DCHECK(stream_); - LOG_ON_ERROR(AAudioStream_close(stream_)); - stream_ = nullptr; -} - -void AAudioWrapper::LogStreamConfiguration() { - RTC_DCHECK(stream_); - char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); - ss << "Stream Configuration: "; - ss << "sample rate=" << sample_rate() << ", channels=" << channel_count(); - ss << ", samples per frame=" << samples_per_frame(); - ss << ", format=" << FormatToString(format()); - ss << ", sharing mode=" << SharingModeToString(sharing_mode()); - ss << ", performance mode=" << PerformanceModeToString(performance_mode()); - ss << ", direction=" << DirectionToString(direction()); - ss << ", device id=" << AAudioStream_getDeviceId(stream_); - ss << ", frames per callback=" << frames_per_callback(); - RTC_LOG(LS_INFO) << ss.str(); -} - -void AAudioWrapper::LogStreamState() { - RTC_LOG(LS_INFO) << "AAudio stream state: " - << AAudio_convertStreamStateToText(stream_state()); -} - -bool AAudioWrapper::VerifyStreamConfiguration() { - RTC_LOG(LS_INFO) << "VerifyStreamConfiguration"; - RTC_DCHECK(stream_); - // TODO(henrika): should we verify device ID as well? - if (AAudioStream_getSampleRate(stream_) != audio_parameters().sample_rate()) { - RTC_LOG(LS_ERROR) << "Stream unable to use requested sample rate"; - return false; - } - if (AAudioStream_getChannelCount(stream_) != - static_cast(audio_parameters().channels())) { - RTC_LOG(LS_ERROR) << "Stream unable to use requested channel count"; - return false; - } - if (AAudioStream_getFormat(stream_) != AAUDIO_FORMAT_PCM_I16) { - RTC_LOG(LS_ERROR) << "Stream unable to use requested format"; - return false; - } - if (AAudioStream_getSharingMode(stream_) != AAUDIO_SHARING_MODE_SHARED) { - RTC_LOG(LS_ERROR) << "Stream unable to use requested sharing mode"; - return false; - } - if (AAudioStream_getPerformanceMode(stream_) != - AAUDIO_PERFORMANCE_MODE_LOW_LATENCY) { - RTC_LOG(LS_ERROR) << "Stream unable to use requested performance mode"; - return false; - } - if (AAudioStream_getDirection(stream_) != direction()) { - RTC_LOG(LS_ERROR) << "Stream direction could not be set"; - return false; - } - if (AAudioStream_getSamplesPerFrame(stream_) != - static_cast(audio_parameters().channels())) { - RTC_LOG(LS_ERROR) << "Invalid number of samples per frame"; - return false; - } - return true; -} - -bool AAudioWrapper::OptimizeBuffers() { - RTC_LOG(LS_INFO) << "OptimizeBuffers"; - RTC_DCHECK(stream_); - // Maximum number of frames that can be filled without blocking. - RTC_LOG(LS_INFO) << "max buffer capacity in frames: " - << buffer_capacity_in_frames(); - // Query the number of frames that the application should read or write at - // one time for optimal performance. - int32_t frames_per_burst = AAudioStream_getFramesPerBurst(stream_); - RTC_LOG(LS_INFO) << "frames per burst for optimal performance: " - << frames_per_burst; - frames_per_burst_ = frames_per_burst; - if (direction() == AAUDIO_DIRECTION_INPUT) { - // There is no point in calling setBufferSizeInFrames() for input streams - // since it has no effect on the performance (latency in this case). - return true; - } - // Set buffer size to same as burst size to guarantee lowest possible latency. - // This size might change for output streams if underruns are detected and - // automatic buffer adjustment is enabled. - AAudioStream_setBufferSizeInFrames(stream_, frames_per_burst); - int32_t buffer_size = AAudioStream_getBufferSizeInFrames(stream_); - if (buffer_size != frames_per_burst) { - RTC_LOG(LS_ERROR) << "Failed to use optimal buffer burst size"; - return false; - } - // Maximum number of frames that can be filled without blocking. - RTC_LOG(LS_INFO) << "buffer burst size in frames: " << buffer_size; - return true; -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/aaudio_wrapper.h b/third_party/libwebrtc/modules/audio_device/android/aaudio_wrapper.h deleted file mode 100644 index 1f925b96d315..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/aaudio_wrapper.h +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_WRAPPER_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_WRAPPER_H_ - -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/include/audio_device_defines.h" - -namespace webrtc { - -class AudioManager; - -// AAudio callback interface for audio transport to/from the AAudio stream. -// The interface also contains an error callback method for notifications of -// e.g. device changes. -class AAudioObserverInterface { - public: - // Audio data will be passed in our out of this function dependning on the - // direction of the audio stream. This callback function will be called on a - // real-time thread owned by AAudio. - virtual aaudio_data_callback_result_t OnDataCallback(void* audio_data, - int32_t num_frames) = 0; - // AAudio will call this functions if any error occurs on a callback thread. - // In response, this function could signal or launch another thread to reopen - // a stream on another device. Do not reopen the stream in this callback. - virtual void OnErrorCallback(aaudio_result_t error) = 0; - - protected: - virtual ~AAudioObserverInterface() {} -}; - -// Utility class which wraps the C-based AAudio API into a more handy C++ class -// where the underlying resources (AAudioStreamBuilder and AAudioStream) are -// encapsulated. User must set the direction (in or out) at construction since -// it defines the stream type and the direction of the data flow in the -// AAudioObserverInterface. -// -// AAudio is a new Android C API introduced in the Android O (26) release. -// It is designed for high-performance audio applications that require low -// latency. Applications communicate with AAudio by reading and writing data -// to streams. -// -// Each stream is attached to a single audio device, where each audio device -// has a unique ID. The ID can be used to bind an audio stream to a specific -// audio device but this implementation lets AAudio choose the default primary -// device instead (device selection takes place in Java). A stream can only -// move data in one direction. When a stream is opened, Android checks to -// ensure that the audio device and stream direction agree. -class AAudioWrapper { - public: - AAudioWrapper(AudioManager* audio_manager, - aaudio_direction_t direction, - AAudioObserverInterface* observer); - ~AAudioWrapper(); - - bool Init(); - bool Start(); - bool Stop(); - - // For output streams: estimates latency between writing an audio frame to - // the output stream and the time that same frame is played out on the output - // audio device. - // For input streams: estimates latency between reading an audio frame from - // the input stream and the time that same frame was recorded on the input - // audio device. - double EstimateLatencyMillis() const; - - // Increases the internal buffer size for output streams by one burst size to - // reduce the risk of underruns. Can be used while a stream is active. - bool IncreaseOutputBufferSize(); - - // Drains the recording stream of any existing data by reading from it until - // it's empty. Can be used to clear out old data before starting a new audio - // session. - void ClearInputStream(void* audio_data, int32_t num_frames); - - AAudioObserverInterface* observer() const; - AudioParameters audio_parameters() const; - int32_t samples_per_frame() const; - int32_t buffer_size_in_frames() const; - int32_t buffer_capacity_in_frames() const; - int32_t device_id() const; - int32_t xrun_count() const; - int32_t format() const; - int32_t sample_rate() const; - int32_t channel_count() const; - int32_t frames_per_callback() const; - aaudio_sharing_mode_t sharing_mode() const; - aaudio_performance_mode_t performance_mode() const; - aaudio_stream_state_t stream_state() const; - int64_t frames_written() const; - int64_t frames_read() const; - aaudio_direction_t direction() const { return direction_; } - AAudioStream* stream() const { return stream_; } - int32_t frames_per_burst() const { return frames_per_burst_; } - - private: - void SetStreamConfiguration(AAudioStreamBuilder* builder); - bool OpenStream(AAudioStreamBuilder* builder); - void CloseStream(); - void LogStreamConfiguration(); - void LogStreamState(); - bool VerifyStreamConfiguration(); - bool OptimizeBuffers(); - - SequenceChecker thread_checker_; - SequenceChecker aaudio_thread_checker_; - AudioParameters audio_parameters_; - const aaudio_direction_t direction_; - AAudioObserverInterface* observer_ = nullptr; - AAudioStream* stream_ = nullptr; - int32_t frames_per_burst_ = 0; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AAUDIO_WRAPPER_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_common.h b/third_party/libwebrtc/modules/audio_device/android/audio_common.h deleted file mode 100644 index 81ea733aa49c..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_common.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_COMMON_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_COMMON_H_ - -namespace webrtc { - -const int kDefaultSampleRate = 44100; -// Delay estimates for the two different supported modes. These values are based -// on real-time round-trip delay estimates on a large set of devices and they -// are lower bounds since the filter length is 128 ms, so the AEC works for -// delays in the range [50, ~170] ms and [150, ~270] ms. Note that, in most -// cases, the lowest delay estimate will not be utilized since devices that -// support low-latency output audio often supports HW AEC as well. -const int kLowLatencyModeDelayEstimateInMilliseconds = 50; -const int kHighLatencyModeDelayEstimateInMilliseconds = 150; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_COMMON_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_device_template.h b/third_party/libwebrtc/modules/audio_device/android/audio_device_template.h deleted file mode 100644 index 999c5878c6d0..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_device_template.h +++ /dev/null @@ -1,435 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ - -#include "api/sequence_checker.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/audio_device_generic.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -// InputType/OutputType can be any class that implements the capturing/rendering -// part of the AudioDeviceGeneric API. -// Construction and destruction must be done on one and the same thread. Each -// internal implementation of InputType and OutputType will RTC_DCHECK if that -// is not the case. All implemented methods must also be called on the same -// thread. See comments in each InputType/OutputType class for more info. -// It is possible to call the two static methods (SetAndroidAudioDeviceObjects -// and ClearAndroidAudioDeviceObjects) from a different thread but both will -// RTC_CHECK that the calling thread is attached to a Java VM. - -template -class AudioDeviceTemplate : public AudioDeviceGeneric { - public: - AudioDeviceTemplate(AudioDeviceModule::AudioLayer audio_layer, - AudioManager* audio_manager) - : audio_layer_(audio_layer), - audio_manager_(audio_manager), - output_(audio_manager_), - input_(audio_manager_), - initialized_(false) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - RTC_CHECK(audio_manager); - audio_manager_->SetActiveAudioLayer(audio_layer); - } - - virtual ~AudioDeviceTemplate() { RTC_LOG(LS_INFO) << __FUNCTION__; } - - int32_t ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - audioLayer = audio_layer_; - return 0; - } - - InitStatus Init() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - if (!audio_manager_->Init()) { - return InitStatus::OTHER_ERROR; - } - if (output_.Init() != 0) { - audio_manager_->Close(); - return InitStatus::PLAYOUT_ERROR; - } - if (input_.Init() != 0) { - output_.Terminate(); - audio_manager_->Close(); - return InitStatus::RECORDING_ERROR; - } - initialized_ = true; - return InitStatus::OK; - } - - int32_t Terminate() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - RTC_DCHECK(thread_checker_.IsCurrent()); - int32_t err = input_.Terminate(); - err |= output_.Terminate(); - err |= !audio_manager_->Close(); - initialized_ = false; - RTC_DCHECK_EQ(err, 0); - return err; - } - - bool Initialized() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - RTC_DCHECK(thread_checker_.IsCurrent()); - return initialized_; - } - - int16_t PlayoutDevices() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return 1; - } - - int16_t RecordingDevices() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return 1; - } - - int32_t PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t SetPlayoutDevice(uint16_t index) override { - // OK to use but it has no effect currently since device selection is - // done using Andoid APIs instead. - RTC_DLOG(LS_INFO) << __FUNCTION__; - return 0; - } - - int32_t SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t SetRecordingDevice(uint16_t index) override { - // OK to use but it has no effect currently since device selection is - // done using Andoid APIs instead. - RTC_DLOG(LS_INFO) << __FUNCTION__; - return 0; - } - - int32_t SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t PlayoutIsAvailable(bool& available) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - available = true; - return 0; - } - - int32_t InitPlayout() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.InitPlayout(); - } - - bool PlayoutIsInitialized() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.PlayoutIsInitialized(); - } - - int32_t RecordingIsAvailable(bool& available) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - available = true; - return 0; - } - - int32_t InitRecording() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return input_.InitRecording(); - } - - bool RecordingIsInitialized() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return input_.RecordingIsInitialized(); - } - - int32_t StartPlayout() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - if (!audio_manager_->IsCommunicationModeEnabled()) { - RTC_LOG(LS_WARNING) - << "The application should use MODE_IN_COMMUNICATION audio mode!"; - } - return output_.StartPlayout(); - } - - int32_t StopPlayout() override { - // Avoid using audio manger (JNI/Java cost) if playout was inactive. - if (!Playing()) - return 0; - RTC_DLOG(LS_INFO) << __FUNCTION__; - int32_t err = output_.StopPlayout(); - return err; - } - - bool Playing() const override { - RTC_LOG(LS_INFO) << __FUNCTION__; - return output_.Playing(); - } - - int32_t StartRecording() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - if (!audio_manager_->IsCommunicationModeEnabled()) { - RTC_LOG(LS_WARNING) - << "The application should use MODE_IN_COMMUNICATION audio mode!"; - } - return input_.StartRecording(); - } - - int32_t StopRecording() override { - // Avoid using audio manger (JNI/Java cost) if recording was inactive. - RTC_DLOG(LS_INFO) << __FUNCTION__; - if (!Recording()) - return 0; - int32_t err = input_.StopRecording(); - return err; - } - - bool Recording() const override { return input_.Recording(); } - - int32_t InitSpeaker() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return 0; - } - - bool SpeakerIsInitialized() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return true; - } - - int32_t InitMicrophone() override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return 0; - } - - bool MicrophoneIsInitialized() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return true; - } - - int32_t SpeakerVolumeIsAvailable(bool& available) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.SpeakerVolumeIsAvailable(available); - } - - int32_t SetSpeakerVolume(uint32_t volume) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.SetSpeakerVolume(volume); - } - - int32_t SpeakerVolume(uint32_t& volume) const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.SpeakerVolume(volume); - } - - int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.MaxSpeakerVolume(maxVolume); - } - - int32_t MinSpeakerVolume(uint32_t& minVolume) const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return output_.MinSpeakerVolume(minVolume); - } - - int32_t MicrophoneVolumeIsAvailable(bool& available) override { - available = false; - return -1; - } - - int32_t SetMicrophoneVolume(uint32_t volume) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t MicrophoneVolume(uint32_t& volume) const override { - RTC_CHECK_NOTREACHED(); - return -1; - } - - int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override { - RTC_CHECK_NOTREACHED(); - } - - int32_t MinMicrophoneVolume(uint32_t& minVolume) const override { - RTC_CHECK_NOTREACHED(); - } - - int32_t SpeakerMuteIsAvailable(bool& available) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t SetSpeakerMute(bool enable) override { RTC_CHECK_NOTREACHED(); } - - int32_t SpeakerMute(bool& enabled) const override { RTC_CHECK_NOTREACHED(); } - - int32_t MicrophoneMuteIsAvailable(bool& available) override { - RTC_CHECK_NOTREACHED(); - } - - int32_t SetMicrophoneMute(bool enable) override { RTC_CHECK_NOTREACHED(); } - - int32_t MicrophoneMute(bool& enabled) const override { - RTC_CHECK_NOTREACHED(); - } - - // Returns true if the audio manager has been configured to support stereo - // and false otherwised. Default is mono. - int32_t StereoPlayoutIsAvailable(bool& available) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - available = audio_manager_->IsStereoPlayoutSupported(); - return 0; - } - - int32_t SetStereoPlayout(bool enable) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - bool available = audio_manager_->IsStereoPlayoutSupported(); - // Android does not support changes between mono and stero on the fly. - // Instead, the native audio layer is configured via the audio manager - // to either support mono or stereo. It is allowed to call this method - // if that same state is not modified. - return (enable == available) ? 0 : -1; - } - - int32_t StereoPlayout(bool& enabled) const override { - enabled = audio_manager_->IsStereoPlayoutSupported(); - return 0; - } - - int32_t StereoRecordingIsAvailable(bool& available) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - available = audio_manager_->IsStereoRecordSupported(); - return 0; - } - - int32_t SetStereoRecording(bool enable) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - bool available = audio_manager_->IsStereoRecordSupported(); - // Android does not support changes between mono and stero on the fly. - // Instead, the native audio layer is configured via the audio manager - // to either support mono or stereo. It is allowed to call this method - // if that same state is not modified. - return (enable == available) ? 0 : -1; - } - - int32_t StereoRecording(bool& enabled) const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - enabled = audio_manager_->IsStereoRecordSupported(); - return 0; - } - - int32_t PlayoutDelay(uint16_t& delay_ms) const override { - // Best guess we can do is to use half of the estimated total delay. - delay_ms = audio_manager_->GetDelayEstimateInMilliseconds() / 2; - RTC_DCHECK_GT(delay_ms, 0); - return 0; - } - - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - output_.AttachAudioBuffer(audioBuffer); - input_.AttachAudioBuffer(audioBuffer); - } - - // Returns true if the device both supports built in AEC and the device - // is not blacklisted. - // Currently, if OpenSL ES is used in both directions, this method will still - // report the correct value and it has the correct effect. As an example: - // a device supports built in AEC and this method returns true. Libjingle - // will then disable the WebRTC based AEC and that will work for all devices - // (mainly Nexus) even when OpenSL ES is used for input since our current - // implementation will enable built-in AEC by default also for OpenSL ES. - // The only "bad" thing that happens today is that when Libjingle calls - // OpenSLESRecorder::EnableBuiltInAEC() it will not have any real effect and - // a "Not Implemented" log will be filed. This non-perfect state will remain - // until I have added full support for audio effects based on OpenSL ES APIs. - bool BuiltInAECIsAvailable() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return audio_manager_->IsAcousticEchoCancelerSupported(); - } - - // TODO(henrika): add implementation for OpenSL ES based audio as well. - int32_t EnableBuiltInAEC(bool enable) override { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - RTC_CHECK(BuiltInAECIsAvailable()) << "HW AEC is not available"; - return input_.EnableBuiltInAEC(enable); - } - - // Returns true if the device both supports built in AGC and the device - // is not blacklisted. - // TODO(henrika): add implementation for OpenSL ES based audio as well. - // In addition, see comments for BuiltInAECIsAvailable(). - bool BuiltInAGCIsAvailable() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return audio_manager_->IsAutomaticGainControlSupported(); - } - - // TODO(henrika): add implementation for OpenSL ES based audio as well. - int32_t EnableBuiltInAGC(bool enable) override { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - RTC_CHECK(BuiltInAGCIsAvailable()) << "HW AGC is not available"; - return input_.EnableBuiltInAGC(enable); - } - - // Returns true if the device both supports built in NS and the device - // is not blacklisted. - // TODO(henrika): add implementation for OpenSL ES based audio as well. - // In addition, see comments for BuiltInAECIsAvailable(). - bool BuiltInNSIsAvailable() const override { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return audio_manager_->IsNoiseSuppressorSupported(); - } - - // TODO(henrika): add implementation for OpenSL ES based audio as well. - int32_t EnableBuiltInNS(bool enable) override { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - RTC_CHECK(BuiltInNSIsAvailable()) << "HW NS is not available"; - return input_.EnableBuiltInNS(enable); - } - - private: - SequenceChecker thread_checker_; - - // Local copy of the audio layer set during construction of the - // AudioDeviceModuleImpl instance. Read only value. - const AudioDeviceModule::AudioLayer audio_layer_; - - // Non-owning raw pointer to AudioManager instance given to use at - // construction. The real object is owned by AudioDeviceModuleImpl and the - // life time is the same as that of the AudioDeviceModuleImpl, hence there - // is no risk of reading a NULL pointer at any time in this class. - AudioManager* const audio_manager_; - - OutputType output_; - - InputType input_; - - bool initialized_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_TEMPLATE_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_device_unittest.cc b/third_party/libwebrtc/modules/audio_device/android/audio_device_unittest.cc deleted file mode 100644 index d9d52cdcdce4..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_device_unittest.cc +++ /dev/null @@ -1,1018 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/include/audio_device.h" - -#include -#include -#include -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/scoped_refptr.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/android/build_info.h" -#include "modules/audio_device/android/ensure_initialized.h" -#include "modules/audio_device/audio_device_impl.h" -#include "modules/audio_device/include/mock_audio_transport.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/event.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/time_utils.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -using std::cout; -using std::endl; -using ::testing::_; -using ::testing::AtLeast; -using ::testing::Gt; -using ::testing::Invoke; -using ::testing::NiceMock; -using ::testing::NotNull; -using ::testing::Return; - -// #define ENABLE_DEBUG_PRINTF -#ifdef ENABLE_DEBUG_PRINTF -#define PRINTD(...) fprintf(stderr, __VA_ARGS__); -#else -#define PRINTD(...) ((void)0) -#endif -#define PRINT(...) fprintf(stderr, __VA_ARGS__); - -namespace webrtc { - -// Number of callbacks (input or output) the tests waits for before we set -// an event indicating that the test was OK. -static const size_t kNumCallbacks = 10; -// Max amount of time we wait for an event to be set while counting callbacks. -static constexpr TimeDelta kTestTimeOut = TimeDelta::Seconds(10); -// Average number of audio callbacks per second assuming 10ms packet size. -static const size_t kNumCallbacksPerSecond = 100; -// Play out a test file during this time (unit is in seconds). -static const int kFilePlayTimeInSec = 5; -static const size_t kBitsPerSample = 16; -static const size_t kBytesPerSample = kBitsPerSample / 8; -// Run the full-duplex test during this time (unit is in seconds). -// Note that first `kNumIgnoreFirstCallbacks` are ignored. -static constexpr TimeDelta kFullDuplexTime = TimeDelta::Seconds(5); -// Wait for the callback sequence to stabilize by ignoring this amount of the -// initial callbacks (avoids initial FIFO access). -// Only used in the RunPlayoutAndRecordingInFullDuplex test. -static const size_t kNumIgnoreFirstCallbacks = 50; -// Sets the number of impulses per second in the latency test. -static const int kImpulseFrequencyInHz = 1; -// Length of round-trip latency measurements. Number of transmitted impulses -// is kImpulseFrequencyInHz * kMeasureLatencyTime - 1. -static constexpr TimeDelta kMeasureLatencyTime = TimeDelta::Seconds(11); -// Utilized in round-trip latency measurements to avoid capturing noise samples. -static const int kImpulseThreshold = 1000; -static const char kTag[] = "[..........] "; - -enum TransportType { - kPlayout = 0x1, - kRecording = 0x2, -}; - -// Interface for processing the audio stream. Real implementations can e.g. -// run audio in loopback, read audio from a file or perform latency -// measurements. -class AudioStreamInterface { - public: - virtual void Write(const void* source, size_t num_frames) = 0; - virtual void Read(void* destination, size_t num_frames) = 0; - - protected: - virtual ~AudioStreamInterface() {} -}; - -// Reads audio samples from a PCM file where the file is stored in memory at -// construction. -class FileAudioStream : public AudioStreamInterface { - public: - FileAudioStream(size_t num_callbacks, - absl::string_view file_name, - int sample_rate) - : file_size_in_bytes_(0), sample_rate_(sample_rate), file_pos_(0) { - file_size_in_bytes_ = test::GetFileSize(file_name); - sample_rate_ = sample_rate; - EXPECT_GE(file_size_in_callbacks(), num_callbacks) - << "Size of test file is not large enough to last during the test."; - const size_t num_16bit_samples = - test::GetFileSize(file_name) / kBytesPerSample; - file_.reset(new int16_t[num_16bit_samples]); - FILE* audio_file = fopen(std::string(file_name).c_str(), "rb"); - EXPECT_NE(audio_file, nullptr); - size_t num_samples_read = - fread(file_.get(), sizeof(int16_t), num_16bit_samples, audio_file); - EXPECT_EQ(num_samples_read, num_16bit_samples); - fclose(audio_file); - } - - // AudioStreamInterface::Write() is not implemented. - void Write(const void* source, size_t num_frames) override {} - - // Read samples from file stored in memory (at construction) and copy - // `num_frames` (<=> 10ms) to the `destination` byte buffer. - void Read(void* destination, size_t num_frames) override { - memcpy(destination, static_cast(&file_[file_pos_]), - num_frames * sizeof(int16_t)); - file_pos_ += num_frames; - } - - int file_size_in_seconds() const { - return static_cast(file_size_in_bytes_ / - (kBytesPerSample * sample_rate_)); - } - size_t file_size_in_callbacks() const { - return file_size_in_seconds() * kNumCallbacksPerSecond; - } - - private: - size_t file_size_in_bytes_; - int sample_rate_; - std::unique_ptr file_; - size_t file_pos_; -}; - -// Simple first in first out (FIFO) class that wraps a list of 16-bit audio -// buffers of fixed size and allows Write and Read operations. The idea is to -// store recorded audio buffers (using Write) and then read (using Read) these -// stored buffers with as short delay as possible when the audio layer needs -// data to play out. The number of buffers in the FIFO will stabilize under -// normal conditions since there will be a balance between Write and Read calls. -// The container is a std::list container and access is protected with a lock -// since both sides (playout and recording) are driven by its own thread. -class FifoAudioStream : public AudioStreamInterface { - public: - explicit FifoAudioStream(size_t frames_per_buffer) - : frames_per_buffer_(frames_per_buffer), - bytes_per_buffer_(frames_per_buffer_ * sizeof(int16_t)), - fifo_(new AudioBufferList), - largest_size_(0), - total_written_elements_(0), - write_count_(0) { - EXPECT_NE(fifo_.get(), nullptr); - } - - ~FifoAudioStream() { Flush(); } - - // Allocate new memory, copy `num_frames` samples from `source` into memory - // and add pointer to the memory location to end of the list. - // Increases the size of the FIFO by one element. - void Write(const void* source, size_t num_frames) override { - ASSERT_EQ(num_frames, frames_per_buffer_); - PRINTD("+"); - if (write_count_++ < kNumIgnoreFirstCallbacks) { - return; - } - int16_t* memory = new int16_t[frames_per_buffer_]; - memcpy(static_cast(&memory[0]), source, bytes_per_buffer_); - MutexLock lock(&lock_); - fifo_->push_back(memory); - const size_t size = fifo_->size(); - if (size > largest_size_) { - largest_size_ = size; - PRINTD("(%zu)", largest_size_); - } - total_written_elements_ += size; - } - - // Read pointer to data buffer from front of list, copy `num_frames` of stored - // data into `destination` and delete the utilized memory allocation. - // Decreases the size of the FIFO by one element. - void Read(void* destination, size_t num_frames) override { - ASSERT_EQ(num_frames, frames_per_buffer_); - PRINTD("-"); - MutexLock lock(&lock_); - if (fifo_->empty()) { - memset(destination, 0, bytes_per_buffer_); - } else { - int16_t* memory = fifo_->front(); - fifo_->pop_front(); - memcpy(destination, static_cast(&memory[0]), bytes_per_buffer_); - delete memory; - } - } - - size_t size() const { return fifo_->size(); } - - size_t largest_size() const { return largest_size_; } - - size_t average_size() const { - return (total_written_elements_ == 0) - ? 0.0 - : 0.5 + static_cast(total_written_elements_) / - (write_count_ - kNumIgnoreFirstCallbacks); - } - - private: - void Flush() { - for (auto it = fifo_->begin(); it != fifo_->end(); ++it) { - delete *it; - } - fifo_->clear(); - } - - using AudioBufferList = std::list; - Mutex lock_; - const size_t frames_per_buffer_; - const size_t bytes_per_buffer_; - std::unique_ptr fifo_; - size_t largest_size_; - size_t total_written_elements_; - size_t write_count_; -}; - -// Inserts periodic impulses and measures the latency between the time of -// transmission and time of receiving the same impulse. -// Usage requires a special hardware called Audio Loopback Dongle. -// See http://source.android.com/devices/audio/loopback.html for details. -class LatencyMeasuringAudioStream : public AudioStreamInterface { - public: - explicit LatencyMeasuringAudioStream(size_t frames_per_buffer) - : frames_per_buffer_(frames_per_buffer), - bytes_per_buffer_(frames_per_buffer_ * sizeof(int16_t)), - play_count_(0), - rec_count_(0), - pulse_time_(0) {} - - // Insert periodic impulses in first two samples of `destination`. - void Read(void* destination, size_t num_frames) override { - ASSERT_EQ(num_frames, frames_per_buffer_); - if (play_count_ == 0) { - PRINT("["); - } - play_count_++; - memset(destination, 0, bytes_per_buffer_); - if (play_count_ % (kNumCallbacksPerSecond / kImpulseFrequencyInHz) == 0) { - if (pulse_time_ == 0) { - pulse_time_ = rtc::TimeMillis(); - } - PRINT("."); - const int16_t impulse = std::numeric_limits::max(); - int16_t* ptr16 = static_cast(destination); - for (size_t i = 0; i < 2; ++i) { - ptr16[i] = impulse; - } - } - } - - // Detect received impulses in `source`, derive time between transmission and - // detection and add the calculated delay to list of latencies. - void Write(const void* source, size_t num_frames) override { - ASSERT_EQ(num_frames, frames_per_buffer_); - rec_count_++; - if (pulse_time_ == 0) { - // Avoid detection of new impulse response until a new impulse has - // been transmitted (sets `pulse_time_` to value larger than zero). - return; - } - const int16_t* ptr16 = static_cast(source); - std::vector vec(ptr16, ptr16 + num_frames); - // Find max value in the audio buffer. - int max = *std::max_element(vec.begin(), vec.end()); - // Find index (element position in vector) of the max element. - int index_of_max = - std::distance(vec.begin(), std::find(vec.begin(), vec.end(), max)); - if (max > kImpulseThreshold) { - PRINTD("(%d,%d)", max, index_of_max); - int64_t now_time = rtc::TimeMillis(); - int extra_delay = IndexToMilliseconds(static_cast(index_of_max)); - PRINTD("[%d]", static_cast(now_time - pulse_time_)); - PRINTD("[%d]", extra_delay); - // Total latency is the difference between transmit time and detection - // tome plus the extra delay within the buffer in which we detected the - // received impulse. It is transmitted at sample 0 but can be received - // at sample N where N > 0. The term `extra_delay` accounts for N and it - // is a value between 0 and 10ms. - latencies_.push_back(now_time - pulse_time_ + extra_delay); - pulse_time_ = 0; - } else { - PRINTD("-"); - } - } - - size_t num_latency_values() const { return latencies_.size(); } - - int min_latency() const { - if (latencies_.empty()) - return 0; - return *std::min_element(latencies_.begin(), latencies_.end()); - } - - int max_latency() const { - if (latencies_.empty()) - return 0; - return *std::max_element(latencies_.begin(), latencies_.end()); - } - - int average_latency() const { - if (latencies_.empty()) - return 0; - return 0.5 + static_cast( - std::accumulate(latencies_.begin(), latencies_.end(), 0)) / - latencies_.size(); - } - - void PrintResults() const { - PRINT("] "); - for (auto it = latencies_.begin(); it != latencies_.end(); ++it) { - PRINT("%d ", *it); - } - PRINT("\n"); - PRINT("%s[min, max, avg]=[%d, %d, %d] ms\n", kTag, min_latency(), - max_latency(), average_latency()); - } - - int IndexToMilliseconds(double index) const { - return static_cast(10.0 * (index / frames_per_buffer_) + 0.5); - } - - private: - const size_t frames_per_buffer_; - const size_t bytes_per_buffer_; - size_t play_count_; - size_t rec_count_; - int64_t pulse_time_; - std::vector latencies_; -}; - -// Mocks the AudioTransport object and proxies actions for the two callbacks -// (RecordedDataIsAvailable and NeedMorePlayData) to different implementations -// of AudioStreamInterface. -class MockAudioTransportAndroid : public test::MockAudioTransport { - public: - explicit MockAudioTransportAndroid(int type) - : num_callbacks_(0), - type_(type), - play_count_(0), - rec_count_(0), - audio_stream_(nullptr) {} - - virtual ~MockAudioTransportAndroid() {} - - // Set default actions of the mock object. We are delegating to fake - // implementations (of AudioStreamInterface) here. - void HandleCallbacks(rtc::Event* test_is_done, - AudioStreamInterface* audio_stream, - int num_callbacks) { - test_is_done_ = test_is_done; - audio_stream_ = audio_stream; - num_callbacks_ = num_callbacks; - if (play_mode()) { - ON_CALL(*this, NeedMorePlayData(_, _, _, _, _, _, _, _)) - .WillByDefault( - Invoke(this, &MockAudioTransportAndroid::RealNeedMorePlayData)); - } - if (rec_mode()) { - ON_CALL(*this, RecordedDataIsAvailable(_, _, _, _, _, _, _, _, _, _)) - .WillByDefault(Invoke( - this, &MockAudioTransportAndroid::RealRecordedDataIsAvailable)); - } - } - - int32_t RealRecordedDataIsAvailable(const void* audioSamples, - const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - const uint32_t totalDelayMS, - const int32_t clockDrift, - const uint32_t currentMicLevel, - const bool keyPressed, - uint32_t& newMicLevel) { // NOLINT - EXPECT_TRUE(rec_mode()) << "No test is expecting these callbacks."; - rec_count_++; - // Process the recorded audio stream if an AudioStreamInterface - // implementation exists. - if (audio_stream_) { - audio_stream_->Write(audioSamples, nSamples); - } - if (ReceivedEnoughCallbacks()) { - test_is_done_->Set(); - } - return 0; - } - - int32_t RealNeedMorePlayData(const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - void* audioSamples, - size_t& nSamplesOut, // NOLINT - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) { - EXPECT_TRUE(play_mode()) << "No test is expecting these callbacks."; - play_count_++; - nSamplesOut = nSamples; - // Read (possibly processed) audio stream samples to be played out if an - // AudioStreamInterface implementation exists. - if (audio_stream_) { - audio_stream_->Read(audioSamples, nSamples); - } - if (ReceivedEnoughCallbacks()) { - test_is_done_->Set(); - } - return 0; - } - - bool ReceivedEnoughCallbacks() { - bool recording_done = false; - if (rec_mode()) - recording_done = rec_count_ >= num_callbacks_; - else - recording_done = true; - - bool playout_done = false; - if (play_mode()) - playout_done = play_count_ >= num_callbacks_; - else - playout_done = true; - - return recording_done && playout_done; - } - - bool play_mode() const { return type_ & kPlayout; } - bool rec_mode() const { return type_ & kRecording; } - - private: - rtc::Event* test_is_done_; - size_t num_callbacks_; - int type_; - size_t play_count_; - size_t rec_count_; - AudioStreamInterface* audio_stream_; - std::unique_ptr latency_audio_stream_; -}; - -// AudioDeviceTest test fixture. -class AudioDeviceTest : public ::testing::Test { - protected: - AudioDeviceTest() : task_queue_factory_(CreateDefaultTaskQueueFactory()) { - // One-time initialization of JVM and application context. Ensures that we - // can do calls between C++ and Java. Initializes both Java and OpenSL ES - // implementations. - webrtc::audiodevicemodule::EnsureInitialized(); - // Creates an audio device using a default audio layer. - audio_device_ = CreateAudioDevice(AudioDeviceModule::kPlatformDefaultAudio); - EXPECT_NE(audio_device_.get(), nullptr); - EXPECT_EQ(0, audio_device_->Init()); - playout_parameters_ = audio_manager()->GetPlayoutAudioParameters(); - record_parameters_ = audio_manager()->GetRecordAudioParameters(); - build_info_.reset(new BuildInfo()); - } - virtual ~AudioDeviceTest() { EXPECT_EQ(0, audio_device_->Terminate()); } - - int playout_sample_rate() const { return playout_parameters_.sample_rate(); } - int record_sample_rate() const { return record_parameters_.sample_rate(); } - size_t playout_channels() const { return playout_parameters_.channels(); } - size_t record_channels() const { return record_parameters_.channels(); } - size_t playout_frames_per_10ms_buffer() const { - return playout_parameters_.frames_per_10ms_buffer(); - } - size_t record_frames_per_10ms_buffer() const { - return record_parameters_.frames_per_10ms_buffer(); - } - - int total_delay_ms() const { - return audio_manager()->GetDelayEstimateInMilliseconds(); - } - - rtc::scoped_refptr audio_device() const { - return audio_device_; - } - - AudioDeviceModuleImpl* audio_device_impl() const { - return static_cast(audio_device_.get()); - } - - AudioManager* audio_manager() const { - return audio_device_impl()->GetAndroidAudioManagerForTest(); - } - - AudioManager* GetAudioManager(AudioDeviceModule* adm) const { - return static_cast(adm) - ->GetAndroidAudioManagerForTest(); - } - - AudioDeviceBuffer* audio_device_buffer() const { - return audio_device_impl()->GetAudioDeviceBuffer(); - } - - rtc::scoped_refptr CreateAudioDevice( - AudioDeviceModule::AudioLayer audio_layer) { - rtc::scoped_refptr module( - AudioDeviceModule::Create(audio_layer, task_queue_factory_.get())); - return module; - } - - // Returns file name relative to the resource root given a sample rate. - std::string GetFileName(int sample_rate) { - EXPECT_TRUE(sample_rate == 48000 || sample_rate == 44100); - char fname[64]; - snprintf(fname, sizeof(fname), "audio_device/audio_short%d", - sample_rate / 1000); - std::string file_name(webrtc::test::ResourcePath(fname, "pcm")); - EXPECT_TRUE(test::FileExists(file_name)); -#ifdef ENABLE_PRINTF - PRINT("file name: %s\n", file_name.c_str()); - const size_t bytes = test::GetFileSize(file_name); - PRINT("file size: %zu [bytes]\n", bytes); - PRINT("file size: %zu [samples]\n", bytes / kBytesPerSample); - const int seconds = - static_cast(bytes / (sample_rate * kBytesPerSample)); - PRINT("file size: %d [secs]\n", seconds); - PRINT("file size: %zu [callbacks]\n", seconds * kNumCallbacksPerSecond); -#endif - return file_name; - } - - AudioDeviceModule::AudioLayer GetActiveAudioLayer() const { - AudioDeviceModule::AudioLayer audio_layer; - EXPECT_EQ(0, audio_device()->ActiveAudioLayer(&audio_layer)); - return audio_layer; - } - - int TestDelayOnAudioLayer( - const AudioDeviceModule::AudioLayer& layer_to_test) { - rtc::scoped_refptr audio_device; - audio_device = CreateAudioDevice(layer_to_test); - EXPECT_NE(audio_device.get(), nullptr); - AudioManager* audio_manager = GetAudioManager(audio_device.get()); - EXPECT_NE(audio_manager, nullptr); - return audio_manager->GetDelayEstimateInMilliseconds(); - } - - AudioDeviceModule::AudioLayer TestActiveAudioLayer( - const AudioDeviceModule::AudioLayer& layer_to_test) { - rtc::scoped_refptr audio_device; - audio_device = CreateAudioDevice(layer_to_test); - EXPECT_NE(audio_device.get(), nullptr); - AudioDeviceModule::AudioLayer active; - EXPECT_EQ(0, audio_device->ActiveAudioLayer(&active)); - return active; - } - - bool DisableTestForThisDevice(absl::string_view model) { - return (build_info_->GetDeviceModel() == model); - } - - // Volume control is currently only supported for the Java output audio layer. - // For OpenSL ES, the internal stream volume is always on max level and there - // is no need for this test to set it to max. - bool AudioLayerSupportsVolumeControl() const { - return GetActiveAudioLayer() == AudioDeviceModule::kAndroidJavaAudio; - } - - void SetMaxPlayoutVolume() { - if (!AudioLayerSupportsVolumeControl()) - return; - uint32_t max_volume; - EXPECT_EQ(0, audio_device()->MaxSpeakerVolume(&max_volume)); - EXPECT_EQ(0, audio_device()->SetSpeakerVolume(max_volume)); - } - - void DisableBuiltInAECIfAvailable() { - if (audio_device()->BuiltInAECIsAvailable()) { - EXPECT_EQ(0, audio_device()->EnableBuiltInAEC(false)); - } - } - - void StartPlayout() { - EXPECT_FALSE(audio_device()->PlayoutIsInitialized()); - EXPECT_FALSE(audio_device()->Playing()); - EXPECT_EQ(0, audio_device()->InitPlayout()); - EXPECT_TRUE(audio_device()->PlayoutIsInitialized()); - EXPECT_EQ(0, audio_device()->StartPlayout()); - EXPECT_TRUE(audio_device()->Playing()); - } - - void StopPlayout() { - EXPECT_EQ(0, audio_device()->StopPlayout()); - EXPECT_FALSE(audio_device()->Playing()); - EXPECT_FALSE(audio_device()->PlayoutIsInitialized()); - } - - void StartRecording() { - EXPECT_FALSE(audio_device()->RecordingIsInitialized()); - EXPECT_FALSE(audio_device()->Recording()); - EXPECT_EQ(0, audio_device()->InitRecording()); - EXPECT_TRUE(audio_device()->RecordingIsInitialized()); - EXPECT_EQ(0, audio_device()->StartRecording()); - EXPECT_TRUE(audio_device()->Recording()); - } - - void StopRecording() { - EXPECT_EQ(0, audio_device()->StopRecording()); - EXPECT_FALSE(audio_device()->Recording()); - } - - int GetMaxSpeakerVolume() const { - uint32_t max_volume(0); - EXPECT_EQ(0, audio_device()->MaxSpeakerVolume(&max_volume)); - return max_volume; - } - - int GetMinSpeakerVolume() const { - uint32_t min_volume(0); - EXPECT_EQ(0, audio_device()->MinSpeakerVolume(&min_volume)); - return min_volume; - } - - int GetSpeakerVolume() const { - uint32_t volume(0); - EXPECT_EQ(0, audio_device()->SpeakerVolume(&volume)); - return volume; - } - - rtc::Event test_is_done_; - std::unique_ptr task_queue_factory_; - rtc::scoped_refptr audio_device_; - AudioParameters playout_parameters_; - AudioParameters record_parameters_; - std::unique_ptr build_info_; -}; - -TEST_F(AudioDeviceTest, ConstructDestruct) { - // Using the test fixture to create and destruct the audio device module. -} - -// We always ask for a default audio layer when the ADM is constructed. But the -// ADM will then internally set the best suitable combination of audio layers, -// for input and output based on if low-latency output and/or input audio in -// combination with OpenSL ES is supported or not. This test ensures that the -// correct selection is done. -TEST_F(AudioDeviceTest, VerifyDefaultAudioLayer) { - const AudioDeviceModule::AudioLayer audio_layer = GetActiveAudioLayer(); - bool low_latency_output = audio_manager()->IsLowLatencyPlayoutSupported(); - bool low_latency_input = audio_manager()->IsLowLatencyRecordSupported(); - bool aaudio = audio_manager()->IsAAudioSupported(); - AudioDeviceModule::AudioLayer expected_audio_layer; - if (aaudio) { - expected_audio_layer = AudioDeviceModule::kAndroidAAudioAudio; - } else if (low_latency_output && low_latency_input) { - expected_audio_layer = AudioDeviceModule::kAndroidOpenSLESAudio; - } else if (low_latency_output && !low_latency_input) { - expected_audio_layer = - AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio; - } else { - expected_audio_layer = AudioDeviceModule::kAndroidJavaAudio; - } - EXPECT_EQ(expected_audio_layer, audio_layer); -} - -// Verify that it is possible to explicitly create the two types of supported -// ADMs. These two tests overrides the default selection of native audio layer -// by ignoring if the device supports low-latency output or not. -TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForCombinedJavaOpenSLCombo) { - AudioDeviceModule::AudioLayer expected_layer = - AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio; - AudioDeviceModule::AudioLayer active_layer = - TestActiveAudioLayer(expected_layer); - EXPECT_EQ(expected_layer, active_layer); -} - -TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForJavaInBothDirections) { - AudioDeviceModule::AudioLayer expected_layer = - AudioDeviceModule::kAndroidJavaAudio; - AudioDeviceModule::AudioLayer active_layer = - TestActiveAudioLayer(expected_layer); - EXPECT_EQ(expected_layer, active_layer); -} - -TEST_F(AudioDeviceTest, CorrectAudioLayerIsUsedForOpenSLInBothDirections) { - AudioDeviceModule::AudioLayer expected_layer = - AudioDeviceModule::kAndroidOpenSLESAudio; - AudioDeviceModule::AudioLayer active_layer = - TestActiveAudioLayer(expected_layer); - EXPECT_EQ(expected_layer, active_layer); -} - -// TODO(bugs.webrtc.org/8914) -#if !defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) -#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \ - DISABLED_CorrectAudioLayerIsUsedForAAudioInBothDirections -#else -#define MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections \ - CorrectAudioLayerIsUsedForAAudioInBothDirections -#endif -TEST_F(AudioDeviceTest, - MAYBE_CorrectAudioLayerIsUsedForAAudioInBothDirections) { - AudioDeviceModule::AudioLayer expected_layer = - AudioDeviceModule::kAndroidAAudioAudio; - AudioDeviceModule::AudioLayer active_layer = - TestActiveAudioLayer(expected_layer); - EXPECT_EQ(expected_layer, active_layer); -} - -// TODO(bugs.webrtc.org/8914) -#if !defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) -#define MAYBE_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo \ - DISABLED_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo -#else -#define MAYBE_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo \ - CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo -#endif -TEST_F(AudioDeviceTest, - MAYBE_CorrectAudioLayerIsUsedForCombinedJavaAAudioCombo) { - AudioDeviceModule::AudioLayer expected_layer = - AudioDeviceModule::kAndroidJavaInputAndAAudioOutputAudio; - AudioDeviceModule::AudioLayer active_layer = - TestActiveAudioLayer(expected_layer); - EXPECT_EQ(expected_layer, active_layer); -} - -// The Android ADM supports two different delay reporting modes. One for the -// low-latency output path (in combination with OpenSL ES), and one for the -// high-latency output path (Java backends in both directions). These two tests -// verifies that the audio manager reports correct delay estimate given the -// selected audio layer. Note that, this delay estimate will only be utilized -// if the HW AEC is disabled. -TEST_F(AudioDeviceTest, UsesCorrectDelayEstimateForHighLatencyOutputPath) { - EXPECT_EQ(kHighLatencyModeDelayEstimateInMilliseconds, - TestDelayOnAudioLayer(AudioDeviceModule::kAndroidJavaAudio)); -} - -TEST_F(AudioDeviceTest, UsesCorrectDelayEstimateForLowLatencyOutputPath) { - EXPECT_EQ(kLowLatencyModeDelayEstimateInMilliseconds, - TestDelayOnAudioLayer( - AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio)); -} - -// Ensure that the ADM internal audio device buffer is configured to use the -// correct set of parameters. -TEST_F(AudioDeviceTest, VerifyAudioDeviceBufferParameters) { - EXPECT_EQ(playout_parameters_.sample_rate(), - static_cast(audio_device_buffer()->PlayoutSampleRate())); - EXPECT_EQ(record_parameters_.sample_rate(), - static_cast(audio_device_buffer()->RecordingSampleRate())); - EXPECT_EQ(playout_parameters_.channels(), - audio_device_buffer()->PlayoutChannels()); - EXPECT_EQ(record_parameters_.channels(), - audio_device_buffer()->RecordingChannels()); -} - -TEST_F(AudioDeviceTest, InitTerminate) { - // Initialization is part of the test fixture. - EXPECT_TRUE(audio_device()->Initialized()); - EXPECT_EQ(0, audio_device()->Terminate()); - EXPECT_FALSE(audio_device()->Initialized()); -} - -TEST_F(AudioDeviceTest, Devices) { - // Device enumeration is not supported. Verify fixed values only. - EXPECT_EQ(1, audio_device()->PlayoutDevices()); - EXPECT_EQ(1, audio_device()->RecordingDevices()); -} - -TEST_F(AudioDeviceTest, SpeakerVolumeShouldBeAvailable) { - // The OpenSL ES output audio path does not support volume control. - if (!AudioLayerSupportsVolumeControl()) - return; - bool available; - EXPECT_EQ(0, audio_device()->SpeakerVolumeIsAvailable(&available)); - EXPECT_TRUE(available); -} - -TEST_F(AudioDeviceTest, MaxSpeakerVolumeIsPositive) { - // The OpenSL ES output audio path does not support volume control. - if (!AudioLayerSupportsVolumeControl()) - return; - StartPlayout(); - EXPECT_GT(GetMaxSpeakerVolume(), 0); - StopPlayout(); -} - -TEST_F(AudioDeviceTest, MinSpeakerVolumeIsZero) { - // The OpenSL ES output audio path does not support volume control. - if (!AudioLayerSupportsVolumeControl()) - return; - EXPECT_EQ(GetMinSpeakerVolume(), 0); -} - -TEST_F(AudioDeviceTest, DefaultSpeakerVolumeIsWithinMinMax) { - // The OpenSL ES output audio path does not support volume control. - if (!AudioLayerSupportsVolumeControl()) - return; - const int default_volume = GetSpeakerVolume(); - EXPECT_GE(default_volume, GetMinSpeakerVolume()); - EXPECT_LE(default_volume, GetMaxSpeakerVolume()); -} - -TEST_F(AudioDeviceTest, SetSpeakerVolumeActuallySetsVolume) { - // The OpenSL ES output audio path does not support volume control. - if (!AudioLayerSupportsVolumeControl()) - return; - const int default_volume = GetSpeakerVolume(); - const int max_volume = GetMaxSpeakerVolume(); - EXPECT_EQ(0, audio_device()->SetSpeakerVolume(max_volume)); - int new_volume = GetSpeakerVolume(); - EXPECT_EQ(new_volume, max_volume); - EXPECT_EQ(0, audio_device()->SetSpeakerVolume(default_volume)); -} - -// Tests that playout can be initiated, started and stopped. No audio callback -// is registered in this test. -TEST_F(AudioDeviceTest, StartStopPlayout) { - StartPlayout(); - StopPlayout(); - StartPlayout(); - StopPlayout(); -} - -// Tests that recording can be initiated, started and stopped. No audio callback -// is registered in this test. -TEST_F(AudioDeviceTest, StartStopRecording) { - StartRecording(); - StopRecording(); - StartRecording(); - StopRecording(); -} - -// Verify that calling StopPlayout() will leave us in an uninitialized state -// which will require a new call to InitPlayout(). This test does not call -// StartPlayout() while being uninitialized since doing so will hit a -// RTC_DCHECK and death tests are not supported on Android. -TEST_F(AudioDeviceTest, StopPlayoutRequiresInitToRestart) { - EXPECT_EQ(0, audio_device()->InitPlayout()); - EXPECT_EQ(0, audio_device()->StartPlayout()); - EXPECT_EQ(0, audio_device()->StopPlayout()); - EXPECT_FALSE(audio_device()->PlayoutIsInitialized()); -} - -// Verify that calling StopRecording() will leave us in an uninitialized state -// which will require a new call to InitRecording(). This test does not call -// StartRecording() while being uninitialized since doing so will hit a -// RTC_DCHECK and death tests are not supported on Android. -TEST_F(AudioDeviceTest, StopRecordingRequiresInitToRestart) { - EXPECT_EQ(0, audio_device()->InitRecording()); - EXPECT_EQ(0, audio_device()->StartRecording()); - EXPECT_EQ(0, audio_device()->StopRecording()); - EXPECT_FALSE(audio_device()->RecordingIsInitialized()); -} - -// Start playout and verify that the native audio layer starts asking for real -// audio samples to play out using the NeedMorePlayData callback. -TEST_F(AudioDeviceTest, StartPlayoutVerifyCallbacks) { - MockAudioTransportAndroid mock(kPlayout); - mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks); - EXPECT_CALL(mock, NeedMorePlayData(playout_frames_per_10ms_buffer(), - kBytesPerSample, playout_channels(), - playout_sample_rate(), NotNull(), _, _, _)) - .Times(AtLeast(kNumCallbacks)); - EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock)); - StartPlayout(); - test_is_done_.Wait(kTestTimeOut); - StopPlayout(); -} - -// Start recording and verify that the native audio layer starts feeding real -// audio samples via the RecordedDataIsAvailable callback. -// TODO(henrika): investigate if it is possible to perform a sanity check of -// delay estimates as well (argument #6). -TEST_F(AudioDeviceTest, StartRecordingVerifyCallbacks) { - MockAudioTransportAndroid mock(kRecording); - mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks); - EXPECT_CALL( - mock, RecordedDataIsAvailable(NotNull(), record_frames_per_10ms_buffer(), - kBytesPerSample, record_channels(), - record_sample_rate(), _, 0, 0, false, _, _)) - .Times(AtLeast(kNumCallbacks)); - - EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock)); - StartRecording(); - test_is_done_.Wait(kTestTimeOut); - StopRecording(); -} - -// Start playout and recording (full-duplex audio) and verify that audio is -// active in both directions. -TEST_F(AudioDeviceTest, StartPlayoutAndRecordingVerifyCallbacks) { - MockAudioTransportAndroid mock(kPlayout | kRecording); - mock.HandleCallbacks(&test_is_done_, nullptr, kNumCallbacks); - EXPECT_CALL(mock, NeedMorePlayData(playout_frames_per_10ms_buffer(), - kBytesPerSample, playout_channels(), - playout_sample_rate(), NotNull(), _, _, _)) - .Times(AtLeast(kNumCallbacks)); - EXPECT_CALL( - mock, RecordedDataIsAvailable(NotNull(), record_frames_per_10ms_buffer(), - kBytesPerSample, record_channels(), - record_sample_rate(), _, 0, 0, false, _, _)) - .Times(AtLeast(kNumCallbacks)); - EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock)); - StartPlayout(); - StartRecording(); - test_is_done_.Wait(kTestTimeOut); - StopRecording(); - StopPlayout(); -} - -// Start playout and read audio from an external PCM file when the audio layer -// asks for data to play out. Real audio is played out in this test but it does -// not contain any explicit verification that the audio quality is perfect. -TEST_F(AudioDeviceTest, RunPlayoutWithFileAsSource) { - // TODO(henrika): extend test when mono output is supported. - EXPECT_EQ(1u, playout_channels()); - NiceMock mock(kPlayout); - const int num_callbacks = kFilePlayTimeInSec * kNumCallbacksPerSecond; - std::string file_name = GetFileName(playout_sample_rate()); - std::unique_ptr file_audio_stream( - new FileAudioStream(num_callbacks, file_name, playout_sample_rate())); - mock.HandleCallbacks(&test_is_done_, file_audio_stream.get(), num_callbacks); - // SetMaxPlayoutVolume(); - EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock)); - StartPlayout(); - test_is_done_.Wait(kTestTimeOut); - StopPlayout(); -} - -// Start playout and recording and store recorded data in an intermediate FIFO -// buffer from which the playout side then reads its samples in the same order -// as they were stored. Under ideal circumstances, a callback sequence would -// look like: ...+-+-+-+-+-+-+-..., where '+' means 'packet recorded' and '-' -// means 'packet played'. Under such conditions, the FIFO would only contain -// one packet on average. However, under more realistic conditions, the size -// of the FIFO will vary more due to an unbalance between the two sides. -// This test tries to verify that the device maintains a balanced callback- -// sequence by running in loopback for ten seconds while measuring the size -// (max and average) of the FIFO. The size of the FIFO is increased by the -// recording side and decreased by the playout side. -// TODO(henrika): tune the final test parameters after running tests on several -// different devices. -// Disabling this test on bots since it is difficult to come up with a robust -// test condition that all worked as intended. The main issue is that, when -// swarming is used, an initial latency can be built up when the both sides -// starts at different times. Hence, the test can fail even if audio works -// as intended. Keeping the test so it can be enabled manually. -// http://bugs.webrtc.org/7744 -TEST_F(AudioDeviceTest, DISABLED_RunPlayoutAndRecordingInFullDuplex) { - EXPECT_EQ(record_channels(), playout_channels()); - EXPECT_EQ(record_sample_rate(), playout_sample_rate()); - NiceMock mock(kPlayout | kRecording); - std::unique_ptr fifo_audio_stream( - new FifoAudioStream(playout_frames_per_10ms_buffer())); - mock.HandleCallbacks(&test_is_done_, fifo_audio_stream.get(), - kFullDuplexTime.seconds() * kNumCallbacksPerSecond); - SetMaxPlayoutVolume(); - EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock)); - StartRecording(); - StartPlayout(); - test_is_done_.Wait(std::max(kTestTimeOut, kFullDuplexTime)); - StopPlayout(); - StopRecording(); - - // These thresholds are set rather high to accomodate differences in hardware - // in several devices, so this test can be used in swarming. - // See http://bugs.webrtc.org/6464 - EXPECT_LE(fifo_audio_stream->average_size(), 60u); - EXPECT_LE(fifo_audio_stream->largest_size(), 70u); -} - -// Measures loopback latency and reports the min, max and average values for -// a full duplex audio session. -// The latency is measured like so: -// - Insert impulses periodically on the output side. -// - Detect the impulses on the input side. -// - Measure the time difference between the transmit time and receive time. -// - Store time differences in a vector and calculate min, max and average. -// This test requires a special hardware called Audio Loopback Dongle. -// See http://source.android.com/devices/audio/loopback.html for details. -TEST_F(AudioDeviceTest, DISABLED_MeasureLoopbackLatency) { - EXPECT_EQ(record_channels(), playout_channels()); - EXPECT_EQ(record_sample_rate(), playout_sample_rate()); - NiceMock mock(kPlayout | kRecording); - std::unique_ptr latency_audio_stream( - new LatencyMeasuringAudioStream(playout_frames_per_10ms_buffer())); - mock.HandleCallbacks(&test_is_done_, latency_audio_stream.get(), - kMeasureLatencyTime.seconds() * kNumCallbacksPerSecond); - EXPECT_EQ(0, audio_device()->RegisterAudioCallback(&mock)); - SetMaxPlayoutVolume(); - DisableBuiltInAECIfAvailable(); - StartRecording(); - StartPlayout(); - test_is_done_.Wait(std::max(kTestTimeOut, kMeasureLatencyTime)); - StopPlayout(); - StopRecording(); - // Verify that the correct number of transmitted impulses are detected. - EXPECT_EQ(latency_audio_stream->num_latency_values(), - static_cast( - kImpulseFrequencyInHz * kMeasureLatencyTime.seconds() - 1)); - latency_audio_stream->PrintResults(); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_manager.cc b/third_party/libwebrtc/modules/audio_device/android/audio_manager.cc deleted file mode 100644 index 0b5549661916..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_manager.cc +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/audio_manager.h" - -#include - -#include "modules/audio_device/android/audio_common.h" -#include "modules/utility/include/helpers_android.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" - -namespace webrtc { - -// AudioManager::JavaAudioManager implementation -AudioManager::JavaAudioManager::JavaAudioManager( - NativeRegistration* native_reg, - std::unique_ptr audio_manager) - : audio_manager_(std::move(audio_manager)), - init_(native_reg->GetMethodId("init", "()Z")), - dispose_(native_reg->GetMethodId("dispose", "()V")), - is_communication_mode_enabled_( - native_reg->GetMethodId("isCommunicationModeEnabled", "()Z")), - is_device_blacklisted_for_open_sles_usage_( - native_reg->GetMethodId("isDeviceBlacklistedForOpenSLESUsage", - "()Z")) { - RTC_LOG(LS_INFO) << "JavaAudioManager::ctor"; -} - -AudioManager::JavaAudioManager::~JavaAudioManager() { - RTC_LOG(LS_INFO) << "JavaAudioManager::~dtor"; -} - -bool AudioManager::JavaAudioManager::Init() { - return audio_manager_->CallBooleanMethod(init_); -} - -void AudioManager::JavaAudioManager::Close() { - audio_manager_->CallVoidMethod(dispose_); -} - -bool AudioManager::JavaAudioManager::IsCommunicationModeEnabled() { - return audio_manager_->CallBooleanMethod(is_communication_mode_enabled_); -} - -bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() { - return audio_manager_->CallBooleanMethod( - is_device_blacklisted_for_open_sles_usage_); -} - -// AudioManager implementation -AudioManager::AudioManager() - : j_environment_(JVM::GetInstance()->environment()), - audio_layer_(AudioDeviceModule::kPlatformDefaultAudio), - initialized_(false), - hardware_aec_(false), - hardware_agc_(false), - hardware_ns_(false), - low_latency_playout_(false), - low_latency_record_(false), - delay_estimate_in_milliseconds_(0) { - RTC_LOG(LS_INFO) << "ctor"; - RTC_CHECK(j_environment_); - JNINativeMethod native_methods[] = { - {"nativeCacheAudioParameters", "(IIIZZZZZZZIIJ)V", - reinterpret_cast(&webrtc::AudioManager::CacheAudioParameters)}}; - j_native_registration_ = j_environment_->RegisterNatives( - "org/webrtc/voiceengine/WebRtcAudioManager", native_methods, - arraysize(native_methods)); - j_audio_manager_.reset( - new JavaAudioManager(j_native_registration_.get(), - j_native_registration_->NewObject( - "", "(J)V", PointerTojlong(this)))); -} - -AudioManager::~AudioManager() { - RTC_LOG(LS_INFO) << "dtor"; - RTC_DCHECK(thread_checker_.IsCurrent()); - Close(); -} - -void AudioManager::SetActiveAudioLayer( - AudioDeviceModule::AudioLayer audio_layer) { - RTC_LOG(LS_INFO) << "SetActiveAudioLayer: " << audio_layer; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - // Store the currently utilized audio layer. - audio_layer_ = audio_layer; - // The delay estimate can take one of two fixed values depending on if the - // device supports low-latency output or not. However, it is also possible - // that the user explicitly selects the high-latency audio path, hence we use - // the selected `audio_layer` here to set the delay estimate. - delay_estimate_in_milliseconds_ = - (audio_layer == AudioDeviceModule::kAndroidJavaAudio) - ? kHighLatencyModeDelayEstimateInMilliseconds - : kLowLatencyModeDelayEstimateInMilliseconds; - RTC_LOG(LS_INFO) << "delay_estimate_in_milliseconds: " - << delay_estimate_in_milliseconds_; -} - -SLObjectItf AudioManager::GetOpenSLEngine() { - RTC_LOG(LS_INFO) << "GetOpenSLEngine"; - RTC_DCHECK(thread_checker_.IsCurrent()); - // Only allow usage of OpenSL ES if such an audio layer has been specified. - if (audio_layer_ != AudioDeviceModule::kAndroidOpenSLESAudio && - audio_layer_ != - AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio) { - RTC_LOG(LS_INFO) - << "Unable to create OpenSL engine for the current audio layer: " - << audio_layer_; - return nullptr; - } - // OpenSL ES for Android only supports a single engine per application. - // If one already has been created, return existing object instead of - // creating a new. - if (engine_object_.Get() != nullptr) { - RTC_LOG(LS_WARNING) - << "The OpenSL ES engine object has already been created"; - return engine_object_.Get(); - } - // Create the engine object in thread safe mode. - const SLEngineOption option[] = { - {SL_ENGINEOPTION_THREADSAFE, static_cast(SL_BOOLEAN_TRUE)}}; - SLresult result = - slCreateEngine(engine_object_.Receive(), 1, option, 0, NULL, NULL); - if (result != SL_RESULT_SUCCESS) { - RTC_LOG(LS_ERROR) << "slCreateEngine() failed: " - << GetSLErrorString(result); - engine_object_.Reset(); - return nullptr; - } - // Realize the SL Engine in synchronous mode. - result = engine_object_->Realize(engine_object_.Get(), SL_BOOLEAN_FALSE); - if (result != SL_RESULT_SUCCESS) { - RTC_LOG(LS_ERROR) << "Realize() failed: " << GetSLErrorString(result); - engine_object_.Reset(); - return nullptr; - } - // Finally return the SLObjectItf interface of the engine object. - return engine_object_.Get(); -} - -bool AudioManager::Init() { - RTC_LOG(LS_INFO) << "Init"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - RTC_DCHECK_NE(audio_layer_, AudioDeviceModule::kPlatformDefaultAudio); - if (!j_audio_manager_->Init()) { - RTC_LOG(LS_ERROR) << "Init() failed"; - return false; - } - initialized_ = true; - return true; -} - -bool AudioManager::Close() { - RTC_LOG(LS_INFO) << "Close"; - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_) - return true; - j_audio_manager_->Close(); - initialized_ = false; - return true; -} - -bool AudioManager::IsCommunicationModeEnabled() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return j_audio_manager_->IsCommunicationModeEnabled(); -} - -bool AudioManager::IsAcousticEchoCancelerSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return hardware_aec_; -} - -bool AudioManager::IsAutomaticGainControlSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return hardware_agc_; -} - -bool AudioManager::IsNoiseSuppressorSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return hardware_ns_; -} - -bool AudioManager::IsLowLatencyPlayoutSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - // Some devices are blacklisted for usage of OpenSL ES even if they report - // that low-latency playout is supported. See b/21485703 for details. - return j_audio_manager_->IsDeviceBlacklistedForOpenSLESUsage() - ? false - : low_latency_playout_; -} - -bool AudioManager::IsLowLatencyRecordSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return low_latency_record_; -} - -bool AudioManager::IsProAudioSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - // TODO(henrika): return the state independently of if OpenSL ES is - // blacklisted or not for now. We could use the same approach as in - // IsLowLatencyPlayoutSupported() but I can't see the need for it yet. - return pro_audio_; -} - -// TODO(henrika): improve comments... -bool AudioManager::IsAAudioSupported() const { -#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) - return a_audio_; -#else - return false; -#endif -} - -bool AudioManager::IsStereoPlayoutSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return (playout_parameters_.channels() == 2); -} - -bool AudioManager::IsStereoRecordSupported() const { - RTC_DCHECK(thread_checker_.IsCurrent()); - return (record_parameters_.channels() == 2); -} - -int AudioManager::GetDelayEstimateInMilliseconds() const { - return delay_estimate_in_milliseconds_; -} - -JNI_FUNCTION_ALIGN -void JNICALL AudioManager::CacheAudioParameters(JNIEnv* env, - jobject obj, - jint sample_rate, - jint output_channels, - jint input_channels, - jboolean hardware_aec, - jboolean hardware_agc, - jboolean hardware_ns, - jboolean low_latency_output, - jboolean low_latency_input, - jboolean pro_audio, - jboolean a_audio, - jint output_buffer_size, - jint input_buffer_size, - jlong native_audio_manager) { - webrtc::AudioManager* this_object = - reinterpret_cast(native_audio_manager); - this_object->OnCacheAudioParameters( - env, sample_rate, output_channels, input_channels, hardware_aec, - hardware_agc, hardware_ns, low_latency_output, low_latency_input, - pro_audio, a_audio, output_buffer_size, input_buffer_size); -} - -void AudioManager::OnCacheAudioParameters(JNIEnv* env, - jint sample_rate, - jint output_channels, - jint input_channels, - jboolean hardware_aec, - jboolean hardware_agc, - jboolean hardware_ns, - jboolean low_latency_output, - jboolean low_latency_input, - jboolean pro_audio, - jboolean a_audio, - jint output_buffer_size, - jint input_buffer_size) { - RTC_LOG(LS_INFO) - << "OnCacheAudioParameters: " - "hardware_aec: " - << static_cast(hardware_aec) - << ", hardware_agc: " << static_cast(hardware_agc) - << ", hardware_ns: " << static_cast(hardware_ns) - << ", low_latency_output: " << static_cast(low_latency_output) - << ", low_latency_input: " << static_cast(low_latency_input) - << ", pro_audio: " << static_cast(pro_audio) - << ", a_audio: " << static_cast(a_audio) - << ", sample_rate: " << static_cast(sample_rate) - << ", output_channels: " << static_cast(output_channels) - << ", input_channels: " << static_cast(input_channels) - << ", output_buffer_size: " << static_cast(output_buffer_size) - << ", input_buffer_size: " << static_cast(input_buffer_size); - RTC_DCHECK(thread_checker_.IsCurrent()); - hardware_aec_ = hardware_aec; - hardware_agc_ = hardware_agc; - hardware_ns_ = hardware_ns; - low_latency_playout_ = low_latency_output; - low_latency_record_ = low_latency_input; - pro_audio_ = pro_audio; - a_audio_ = a_audio; - playout_parameters_.reset(sample_rate, static_cast(output_channels), - static_cast(output_buffer_size)); - record_parameters_.reset(sample_rate, static_cast(input_channels), - static_cast(input_buffer_size)); -} - -const AudioParameters& AudioManager::GetPlayoutAudioParameters() { - RTC_CHECK(playout_parameters_.is_valid()); - RTC_DCHECK(thread_checker_.IsCurrent()); - return playout_parameters_; -} - -const AudioParameters& AudioManager::GetRecordAudioParameters() { - RTC_CHECK(record_parameters_.is_valid()); - RTC_DCHECK(thread_checker_.IsCurrent()); - return record_parameters_; -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_manager.h b/third_party/libwebrtc/modules/audio_device/android/audio_manager.h deleted file mode 100644 index 900fc78a68ad..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_manager.h +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ - -#include -#include - -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/opensles_common.h" -#include "modules/audio_device/audio_device_config.h" -#include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "modules/utility/include/helpers_android.h" -#include "modules/utility/include/jvm_android.h" - -namespace webrtc { - -// Implements support for functions in the WebRTC audio stack for Android that -// relies on the AudioManager in android.media. It also populates an -// AudioParameter structure with native audio parameters detected at -// construction. This class does not make any audio-related modifications -// unless Init() is called. Caching audio parameters makes no changes but only -// reads data from the Java side. -class AudioManager { - public: - // Wraps the Java specific parts of the AudioManager into one helper class. - // Stores method IDs for all supported methods at construction and then - // allows calls like JavaAudioManager::Close() while hiding the Java/JNI - // parts that are associated with this call. - class JavaAudioManager { - public: - JavaAudioManager(NativeRegistration* native_registration, - std::unique_ptr audio_manager); - ~JavaAudioManager(); - - bool Init(); - void Close(); - bool IsCommunicationModeEnabled(); - bool IsDeviceBlacklistedForOpenSLESUsage(); - - private: - std::unique_ptr audio_manager_; - jmethodID init_; - jmethodID dispose_; - jmethodID is_communication_mode_enabled_; - jmethodID is_device_blacklisted_for_open_sles_usage_; - }; - - AudioManager(); - ~AudioManager(); - - // Sets the currently active audio layer combination. Must be called before - // Init(). - void SetActiveAudioLayer(AudioDeviceModule::AudioLayer audio_layer); - - // Creates and realizes the main (global) Open SL engine object and returns - // a reference to it. The engine object is only created at the first call - // since OpenSL ES for Android only supports a single engine per application. - // Subsequent calls returns the already created engine. The SL engine object - // is destroyed when the AudioManager object is deleted. It means that the - // engine object will be the first OpenSL ES object to be created and last - // object to be destroyed. - // Note that NULL will be returned unless the audio layer is specified as - // AudioDeviceModule::kAndroidOpenSLESAudio or - // AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio. - SLObjectItf GetOpenSLEngine(); - - // Initializes the audio manager and stores the current audio mode. - bool Init(); - // Revert any setting done by Init(). - bool Close(); - - // Returns true if current audio mode is AudioManager.MODE_IN_COMMUNICATION. - bool IsCommunicationModeEnabled() const; - - // Native audio parameters stored during construction. - const AudioParameters& GetPlayoutAudioParameters(); - const AudioParameters& GetRecordAudioParameters(); - - // Returns true if the device supports built-in audio effects for AEC, AGC - // and NS. Some devices can also be blacklisted for use in combination with - // platform effects and these devices will return false. - // Can currently only be used in combination with a Java based audio backend - // for the recoring side (i.e. using the android.media.AudioRecord API). - bool IsAcousticEchoCancelerSupported() const; - bool IsAutomaticGainControlSupported() const; - bool IsNoiseSuppressorSupported() const; - - // Returns true if the device supports the low-latency audio paths in - // combination with OpenSL ES. - bool IsLowLatencyPlayoutSupported() const; - bool IsLowLatencyRecordSupported() const; - - // Returns true if the device supports (and has been configured for) stereo. - // Call the Java API WebRtcAudioManager.setStereoOutput/Input() with true as - // paramter to enable stereo. Default is mono in both directions and the - // setting is set once and for all when the audio manager object is created. - // TODO(henrika): stereo is not supported in combination with OpenSL ES. - bool IsStereoPlayoutSupported() const; - bool IsStereoRecordSupported() const; - - // Returns true if the device supports pro-audio features in combination with - // OpenSL ES. - bool IsProAudioSupported() const; - - // Returns true if the device supports AAudio. - bool IsAAudioSupported() const; - - // Returns the estimated total delay of this device. Unit is in milliseconds. - // The vaule is set once at construction and never changes after that. - // Possible values are webrtc::kLowLatencyModeDelayEstimateInMilliseconds and - // webrtc::kHighLatencyModeDelayEstimateInMilliseconds. - int GetDelayEstimateInMilliseconds() const; - - private: - // Called from Java side so we can cache the native audio parameters. - // This method will be called by the WebRtcAudioManager constructor, i.e. - // on the same thread that this object is created on. - static void JNICALL CacheAudioParameters(JNIEnv* env, - jobject obj, - jint sample_rate, - jint output_channels, - jint input_channels, - jboolean hardware_aec, - jboolean hardware_agc, - jboolean hardware_ns, - jboolean low_latency_output, - jboolean low_latency_input, - jboolean pro_audio, - jboolean a_audio, - jint output_buffer_size, - jint input_buffer_size, - jlong native_audio_manager); - void OnCacheAudioParameters(JNIEnv* env, - jint sample_rate, - jint output_channels, - jint input_channels, - jboolean hardware_aec, - jboolean hardware_agc, - jboolean hardware_ns, - jboolean low_latency_output, - jboolean low_latency_input, - jboolean pro_audio, - jboolean a_audio, - jint output_buffer_size, - jint input_buffer_size); - - // Stores thread ID in the constructor. - // We can then use RTC_DCHECK_RUN_ON(&thread_checker_) to ensure that - // other methods are called from the same thread. - SequenceChecker thread_checker_; - - // Calls JavaVM::AttachCurrentThread() if this thread is not attached at - // construction. - // Also ensures that DetachCurrentThread() is called at destruction. - JvmThreadConnector attach_thread_if_needed_; - - // Wraps the JNI interface pointer and methods associated with it. - std::unique_ptr j_environment_; - - // Contains factory method for creating the Java object. - std::unique_ptr j_native_registration_; - - // Wraps the Java specific parts of the AudioManager. - std::unique_ptr j_audio_manager_; - - // Contains the selected audio layer specified by the AudioLayer enumerator - // in the AudioDeviceModule class. - AudioDeviceModule::AudioLayer audio_layer_; - - // This object is the global entry point of the OpenSL ES API. - // After creating the engine object, the application can obtain this object‘s - // SLEngineItf interface. This interface contains creation methods for all - // the other object types in the API. None of these interface are realized - // by this class. It only provides access to the global engine object. - webrtc::ScopedSLObjectItf engine_object_; - - // Set to true by Init() and false by Close(). - bool initialized_; - - // True if device supports hardware (or built-in) AEC. - bool hardware_aec_; - // True if device supports hardware (or built-in) AGC. - bool hardware_agc_; - // True if device supports hardware (or built-in) NS. - bool hardware_ns_; - - // True if device supports the low-latency OpenSL ES audio path for output. - bool low_latency_playout_; - - // True if device supports the low-latency OpenSL ES audio path for input. - bool low_latency_record_; - - // True if device supports the low-latency OpenSL ES pro-audio path. - bool pro_audio_; - - // True if device supports the low-latency AAudio audio path. - bool a_audio_; - - // The delay estimate can take one of two fixed values depending on if the - // device supports low-latency output or not. - int delay_estimate_in_milliseconds_; - - // Contains native parameters (e.g. sample rate, channel configuration). - // Set at construction in OnCacheAudioParameters() which is called from - // Java on the same thread as this object is created on. - AudioParameters playout_parameters_; - AudioParameters record_parameters_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_manager_unittest.cc b/third_party/libwebrtc/modules/audio_device/android/audio_manager_unittest.cc deleted file mode 100644 index 093eddd2e83a..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_manager_unittest.cc +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/audio_manager.h" - -#include - -#include "modules/audio_device/android/build_info.h" -#include "modules/audio_device/android/ensure_initialized.h" -#include "rtc_base/arraysize.h" -#include "test/gtest.h" - -#define PRINT(...) fprintf(stderr, __VA_ARGS__); - -namespace webrtc { - -static const char kTag[] = " "; - -class AudioManagerTest : public ::testing::Test { - protected: - AudioManagerTest() { - // One-time initialization of JVM and application context. Ensures that we - // can do calls between C++ and Java. - webrtc::audiodevicemodule::EnsureInitialized(); - audio_manager_.reset(new AudioManager()); - SetActiveAudioLayer(); - playout_parameters_ = audio_manager()->GetPlayoutAudioParameters(); - record_parameters_ = audio_manager()->GetRecordAudioParameters(); - } - - AudioManager* audio_manager() const { return audio_manager_.get(); } - - // A valid audio layer must always be set before calling Init(), hence we - // might as well make it a part of the test fixture. - void SetActiveAudioLayer() { - EXPECT_EQ(0, audio_manager()->GetDelayEstimateInMilliseconds()); - audio_manager()->SetActiveAudioLayer(AudioDeviceModule::kAndroidJavaAudio); - EXPECT_NE(0, audio_manager()->GetDelayEstimateInMilliseconds()); - } - - // One way to ensure that the engine object is valid is to create an - // SL Engine interface since it exposes creation methods of all the OpenSL ES - // object types and it is only supported on the engine object. This method - // also verifies that the engine interface supports at least one interface. - // Note that, the test below is not a full test of the SLEngineItf object - // but only a simple sanity test to check that the global engine object is OK. - void ValidateSLEngine(SLObjectItf engine_object) { - EXPECT_NE(nullptr, engine_object); - // Get the SL Engine interface which is exposed by the engine object. - SLEngineItf engine; - SLresult result = - (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine); - EXPECT_EQ(result, SL_RESULT_SUCCESS) << "GetInterface() on engine failed"; - // Ensure that the SL Engine interface exposes at least one interface. - SLuint32 object_id = SL_OBJECTID_ENGINE; - SLuint32 num_supported_interfaces = 0; - result = (*engine)->QueryNumSupportedInterfaces(engine, object_id, - &num_supported_interfaces); - EXPECT_EQ(result, SL_RESULT_SUCCESS) - << "QueryNumSupportedInterfaces() failed"; - EXPECT_GE(num_supported_interfaces, 1u); - } - - std::unique_ptr audio_manager_; - AudioParameters playout_parameters_; - AudioParameters record_parameters_; -}; - -TEST_F(AudioManagerTest, ConstructDestruct) {} - -// It should not be possible to create an OpenSL engine object if Java based -// audio is requested in both directions. -TEST_F(AudioManagerTest, GetOpenSLEngineShouldFailForJavaAudioLayer) { - audio_manager()->SetActiveAudioLayer(AudioDeviceModule::kAndroidJavaAudio); - SLObjectItf engine_object = audio_manager()->GetOpenSLEngine(); - EXPECT_EQ(nullptr, engine_object); -} - -// It should be possible to create an OpenSL engine object if OpenSL ES based -// audio is requested in any direction. -TEST_F(AudioManagerTest, GetOpenSLEngineShouldSucceedForOpenSLESAudioLayer) { - // List of supported audio layers that uses OpenSL ES audio. - const AudioDeviceModule::AudioLayer opensles_audio[] = { - AudioDeviceModule::kAndroidOpenSLESAudio, - AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio}; - // Verify that the global (singleton) OpenSL Engine can be acquired for all - // audio layes that uses OpenSL ES. Note that the engine is only created once. - for (const AudioDeviceModule::AudioLayer audio_layer : opensles_audio) { - audio_manager()->SetActiveAudioLayer(audio_layer); - SLObjectItf engine_object = audio_manager()->GetOpenSLEngine(); - EXPECT_NE(nullptr, engine_object); - // Perform a simple sanity check of the created engine object. - ValidateSLEngine(engine_object); - } -} - -TEST_F(AudioManagerTest, InitClose) { - EXPECT_TRUE(audio_manager()->Init()); - EXPECT_TRUE(audio_manager()->Close()); -} - -TEST_F(AudioManagerTest, IsAcousticEchoCancelerSupported) { - PRINT("%sAcoustic Echo Canceler support: %s\n", kTag, - audio_manager()->IsAcousticEchoCancelerSupported() ? "Yes" : "No"); -} - -TEST_F(AudioManagerTest, IsAutomaticGainControlSupported) { - EXPECT_FALSE(audio_manager()->IsAutomaticGainControlSupported()); -} - -TEST_F(AudioManagerTest, IsNoiseSuppressorSupported) { - PRINT("%sNoise Suppressor support: %s\n", kTag, - audio_manager()->IsNoiseSuppressorSupported() ? "Yes" : "No"); -} - -TEST_F(AudioManagerTest, IsLowLatencyPlayoutSupported) { - PRINT("%sLow latency output support: %s\n", kTag, - audio_manager()->IsLowLatencyPlayoutSupported() ? "Yes" : "No"); -} - -TEST_F(AudioManagerTest, IsLowLatencyRecordSupported) { - PRINT("%sLow latency input support: %s\n", kTag, - audio_manager()->IsLowLatencyRecordSupported() ? "Yes" : "No"); -} - -TEST_F(AudioManagerTest, IsProAudioSupported) { - PRINT("%sPro audio support: %s\n", kTag, - audio_manager()->IsProAudioSupported() ? "Yes" : "No"); -} - -// Verify that playout side is configured for mono by default. -TEST_F(AudioManagerTest, IsStereoPlayoutSupported) { - EXPECT_FALSE(audio_manager()->IsStereoPlayoutSupported()); -} - -// Verify that recording side is configured for mono by default. -TEST_F(AudioManagerTest, IsStereoRecordSupported) { - EXPECT_FALSE(audio_manager()->IsStereoRecordSupported()); -} - -TEST_F(AudioManagerTest, ShowAudioParameterInfo) { - const bool low_latency_out = audio_manager()->IsLowLatencyPlayoutSupported(); - const bool low_latency_in = audio_manager()->IsLowLatencyRecordSupported(); - PRINT("PLAYOUT:\n"); - PRINT("%saudio layer: %s\n", kTag, - low_latency_out ? "Low latency OpenSL" : "Java/JNI based AudioTrack"); - PRINT("%ssample rate: %d Hz\n", kTag, playout_parameters_.sample_rate()); - PRINT("%schannels: %zu\n", kTag, playout_parameters_.channels()); - PRINT("%sframes per buffer: %zu <=> %.2f ms\n", kTag, - playout_parameters_.frames_per_buffer(), - playout_parameters_.GetBufferSizeInMilliseconds()); - PRINT("RECORD: \n"); - PRINT("%saudio layer: %s\n", kTag, - low_latency_in ? "Low latency OpenSL" : "Java/JNI based AudioRecord"); - PRINT("%ssample rate: %d Hz\n", kTag, record_parameters_.sample_rate()); - PRINT("%schannels: %zu\n", kTag, record_parameters_.channels()); - PRINT("%sframes per buffer: %zu <=> %.2f ms\n", kTag, - record_parameters_.frames_per_buffer(), - record_parameters_.GetBufferSizeInMilliseconds()); -} - -// The audio device module only suppors the same sample rate in both directions. -// In addition, in full-duplex low-latency mode (OpenSL ES), both input and -// output must use the same native buffer size to allow for usage of the fast -// audio track in Android. -TEST_F(AudioManagerTest, VerifyAudioParameters) { - const bool low_latency_out = audio_manager()->IsLowLatencyPlayoutSupported(); - const bool low_latency_in = audio_manager()->IsLowLatencyRecordSupported(); - EXPECT_EQ(playout_parameters_.sample_rate(), - record_parameters_.sample_rate()); - if (low_latency_out && low_latency_in) { - EXPECT_EQ(playout_parameters_.frames_per_buffer(), - record_parameters_.frames_per_buffer()); - } -} - -// Add device-specific information to the test for logging purposes. -TEST_F(AudioManagerTest, ShowDeviceInfo) { - BuildInfo build_info; - PRINT("%smodel: %s\n", kTag, build_info.GetDeviceModel().c_str()); - PRINT("%sbrand: %s\n", kTag, build_info.GetBrand().c_str()); - PRINT("%smanufacturer: %s\n", kTag, - build_info.GetDeviceManufacturer().c_str()); -} - -// Add Android build information to the test for logging purposes. -TEST_F(AudioManagerTest, ShowBuildInfo) { - BuildInfo build_info; - PRINT("%sbuild release: %s\n", kTag, build_info.GetBuildRelease().c_str()); - PRINT("%sbuild id: %s\n", kTag, build_info.GetAndroidBuildId().c_str()); - PRINT("%sbuild type: %s\n", kTag, build_info.GetBuildType().c_str()); - PRINT("%sSDK version: %d\n", kTag, build_info.GetSdkVersion()); -} - -// Basic test of the AudioParameters class using default construction where -// all members are set to zero. -TEST_F(AudioManagerTest, AudioParametersWithDefaultConstruction) { - AudioParameters params; - EXPECT_FALSE(params.is_valid()); - EXPECT_EQ(0, params.sample_rate()); - EXPECT_EQ(0U, params.channels()); - EXPECT_EQ(0U, params.frames_per_buffer()); - EXPECT_EQ(0U, params.frames_per_10ms_buffer()); - EXPECT_EQ(0U, params.GetBytesPerFrame()); - EXPECT_EQ(0U, params.GetBytesPerBuffer()); - EXPECT_EQ(0U, params.GetBytesPer10msBuffer()); - EXPECT_EQ(0.0f, params.GetBufferSizeInMilliseconds()); -} - -// Basic test of the AudioParameters class using non default construction. -TEST_F(AudioManagerTest, AudioParametersWithNonDefaultConstruction) { - const int kSampleRate = 48000; - const size_t kChannels = 1; - const size_t kFramesPerBuffer = 480; - const size_t kFramesPer10msBuffer = 480; - const size_t kBytesPerFrame = 2; - const float kBufferSizeInMs = 10.0f; - AudioParameters params(kSampleRate, kChannels, kFramesPerBuffer); - EXPECT_TRUE(params.is_valid()); - EXPECT_EQ(kSampleRate, params.sample_rate()); - EXPECT_EQ(kChannels, params.channels()); - EXPECT_EQ(kFramesPerBuffer, params.frames_per_buffer()); - EXPECT_EQ(static_cast(kSampleRate / 100), - params.frames_per_10ms_buffer()); - EXPECT_EQ(kBytesPerFrame, params.GetBytesPerFrame()); - EXPECT_EQ(kBytesPerFrame * kFramesPerBuffer, params.GetBytesPerBuffer()); - EXPECT_EQ(kBytesPerFrame * kFramesPer10msBuffer, - params.GetBytesPer10msBuffer()); - EXPECT_EQ(kBufferSizeInMs, params.GetBufferSizeInMilliseconds()); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_record_jni.cc b/third_party/libwebrtc/modules/audio_device/android/audio_record_jni.cc deleted file mode 100644 index 919eabb983ce..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_record_jni.cc +++ /dev/null @@ -1,280 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/audio_record_jni.h" - -#include -#include - -#include "modules/audio_device/android/audio_common.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/time_utils.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { - -namespace { -// Scoped class which logs its time of life as a UMA statistic. It generates -// a histogram which measures the time it takes for a method/scope to execute. -class ScopedHistogramTimer { - public: - explicit ScopedHistogramTimer(const std::string& name) - : histogram_name_(name), start_time_ms_(rtc::TimeMillis()) {} - ~ScopedHistogramTimer() { - const int64_t life_time_ms = rtc::TimeSince(start_time_ms_); - RTC_HISTOGRAM_COUNTS_1000(histogram_name_, life_time_ms); - RTC_LOG(LS_INFO) << histogram_name_ << ": " << life_time_ms; - } - - private: - const std::string histogram_name_; - int64_t start_time_ms_; -}; -} // namespace - -// AudioRecordJni::JavaAudioRecord implementation. -AudioRecordJni::JavaAudioRecord::JavaAudioRecord( - NativeRegistration* native_reg, - std::unique_ptr audio_record) - : audio_record_(std::move(audio_record)), - init_recording_(native_reg->GetMethodId("initRecording", "(II)I")), - start_recording_(native_reg->GetMethodId("startRecording", "()Z")), - stop_recording_(native_reg->GetMethodId("stopRecording", "()Z")), - enable_built_in_aec_(native_reg->GetMethodId("enableBuiltInAEC", "(Z)Z")), - enable_built_in_ns_(native_reg->GetMethodId("enableBuiltInNS", "(Z)Z")) {} - -AudioRecordJni::JavaAudioRecord::~JavaAudioRecord() {} - -int AudioRecordJni::JavaAudioRecord::InitRecording(int sample_rate, - size_t channels) { - return audio_record_->CallIntMethod(init_recording_, - static_cast(sample_rate), - static_cast(channels)); -} - -bool AudioRecordJni::JavaAudioRecord::StartRecording() { - return audio_record_->CallBooleanMethod(start_recording_); -} - -bool AudioRecordJni::JavaAudioRecord::StopRecording() { - return audio_record_->CallBooleanMethod(stop_recording_); -} - -bool AudioRecordJni::JavaAudioRecord::EnableBuiltInAEC(bool enable) { - return audio_record_->CallBooleanMethod(enable_built_in_aec_, - static_cast(enable)); -} - -bool AudioRecordJni::JavaAudioRecord::EnableBuiltInNS(bool enable) { - return audio_record_->CallBooleanMethod(enable_built_in_ns_, - static_cast(enable)); -} - -// AudioRecordJni implementation. -AudioRecordJni::AudioRecordJni(AudioManager* audio_manager) - : j_environment_(JVM::GetInstance()->environment()), - audio_manager_(audio_manager), - audio_parameters_(audio_manager->GetRecordAudioParameters()), - total_delay_in_milliseconds_(0), - direct_buffer_address_(nullptr), - direct_buffer_capacity_in_bytes_(0), - frames_per_buffer_(0), - initialized_(false), - recording_(false), - audio_device_buffer_(nullptr) { - RTC_LOG(LS_INFO) << "ctor"; - RTC_DCHECK(audio_parameters_.is_valid()); - RTC_CHECK(j_environment_); - JNINativeMethod native_methods[] = { - {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V", - reinterpret_cast( - &webrtc::AudioRecordJni::CacheDirectBufferAddress)}, - {"nativeDataIsRecorded", "(IJ)V", - reinterpret_cast(&webrtc::AudioRecordJni::DataIsRecorded)}}; - j_native_registration_ = j_environment_->RegisterNatives( - "org/webrtc/voiceengine/WebRtcAudioRecord", native_methods, - arraysize(native_methods)); - j_audio_record_.reset( - new JavaAudioRecord(j_native_registration_.get(), - j_native_registration_->NewObject( - "", "(J)V", PointerTojlong(this)))); - // Detach from this thread since we want to use the checker to verify calls - // from the Java based audio thread. - thread_checker_java_.Detach(); -} - -AudioRecordJni::~AudioRecordJni() { - RTC_LOG(LS_INFO) << "dtor"; - RTC_DCHECK(thread_checker_.IsCurrent()); - Terminate(); -} - -int32_t AudioRecordJni::Init() { - RTC_LOG(LS_INFO) << "Init"; - RTC_DCHECK(thread_checker_.IsCurrent()); - return 0; -} - -int32_t AudioRecordJni::Terminate() { - RTC_LOG(LS_INFO) << "Terminate"; - RTC_DCHECK(thread_checker_.IsCurrent()); - StopRecording(); - return 0; -} - -int32_t AudioRecordJni::InitRecording() { - RTC_LOG(LS_INFO) << "InitRecording"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - RTC_DCHECK(!recording_); - ScopedHistogramTimer timer("WebRTC.Audio.InitRecordingDurationMs"); - int frames_per_buffer = j_audio_record_->InitRecording( - audio_parameters_.sample_rate(), audio_parameters_.channels()); - if (frames_per_buffer < 0) { - direct_buffer_address_ = nullptr; - RTC_LOG(LS_ERROR) << "InitRecording failed"; - return -1; - } - frames_per_buffer_ = static_cast(frames_per_buffer); - RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_; - const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t); - RTC_CHECK_EQ(direct_buffer_capacity_in_bytes_, - frames_per_buffer_ * bytes_per_frame); - RTC_CHECK_EQ(frames_per_buffer_, audio_parameters_.frames_per_10ms_buffer()); - initialized_ = true; - return 0; -} - -int32_t AudioRecordJni::StartRecording() { - RTC_LOG(LS_INFO) << "StartRecording"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!recording_); - if (!initialized_) { - RTC_DLOG(LS_WARNING) - << "Recording can not start since InitRecording must succeed first"; - return 0; - } - ScopedHistogramTimer timer("WebRTC.Audio.StartRecordingDurationMs"); - if (!j_audio_record_->StartRecording()) { - RTC_LOG(LS_ERROR) << "StartRecording failed"; - return -1; - } - recording_ = true; - return 0; -} - -int32_t AudioRecordJni::StopRecording() { - RTC_LOG(LS_INFO) << "StopRecording"; - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_ || !recording_) { - return 0; - } - if (!j_audio_record_->StopRecording()) { - RTC_LOG(LS_ERROR) << "StopRecording failed"; - return -1; - } - // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded() - // next time StartRecording() is called since it will create a new Java - // thread. - thread_checker_java_.Detach(); - initialized_ = false; - recording_ = false; - direct_buffer_address_ = nullptr; - return 0; -} - -void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { - RTC_LOG(LS_INFO) << "AttachAudioBuffer"; - RTC_DCHECK(thread_checker_.IsCurrent()); - audio_device_buffer_ = audioBuffer; - const int sample_rate_hz = audio_parameters_.sample_rate(); - RTC_LOG(LS_INFO) << "SetRecordingSampleRate(" << sample_rate_hz << ")"; - audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz); - const size_t channels = audio_parameters_.channels(); - RTC_LOG(LS_INFO) << "SetRecordingChannels(" << channels << ")"; - audio_device_buffer_->SetRecordingChannels(channels); - total_delay_in_milliseconds_ = - audio_manager_->GetDelayEstimateInMilliseconds(); - RTC_DCHECK_GT(total_delay_in_milliseconds_, 0); - RTC_LOG(LS_INFO) << "total_delay_in_milliseconds: " - << total_delay_in_milliseconds_; -} - -int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) { - RTC_LOG(LS_INFO) << "EnableBuiltInAEC(" << enable << ")"; - RTC_DCHECK(thread_checker_.IsCurrent()); - return j_audio_record_->EnableBuiltInAEC(enable) ? 0 : -1; -} - -int32_t AudioRecordJni::EnableBuiltInAGC(bool enable) { - // TODO(henrika): possibly remove when no longer used by any client. - RTC_CHECK_NOTREACHED(); -} - -int32_t AudioRecordJni::EnableBuiltInNS(bool enable) { - RTC_LOG(LS_INFO) << "EnableBuiltInNS(" << enable << ")"; - RTC_DCHECK(thread_checker_.IsCurrent()); - return j_audio_record_->EnableBuiltInNS(enable) ? 0 : -1; -} - -JNI_FUNCTION_ALIGN -void JNICALL AudioRecordJni::CacheDirectBufferAddress(JNIEnv* env, - jobject obj, - jobject byte_buffer, - jlong nativeAudioRecord) { - webrtc::AudioRecordJni* this_object = - reinterpret_cast(nativeAudioRecord); - this_object->OnCacheDirectBufferAddress(env, byte_buffer); -} - -void AudioRecordJni::OnCacheDirectBufferAddress(JNIEnv* env, - jobject byte_buffer) { - RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!direct_buffer_address_); - direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer); - jlong capacity = env->GetDirectBufferCapacity(byte_buffer); - RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity; - direct_buffer_capacity_in_bytes_ = static_cast(capacity); -} - -JNI_FUNCTION_ALIGN -void JNICALL AudioRecordJni::DataIsRecorded(JNIEnv* env, - jobject obj, - jint length, - jlong nativeAudioRecord) { - webrtc::AudioRecordJni* this_object = - reinterpret_cast(nativeAudioRecord); - this_object->OnDataIsRecorded(length); -} - -// This method is called on a high-priority thread from Java. The name of -// the thread is 'AudioRecordThread'. -void AudioRecordJni::OnDataIsRecorded(int length) { - RTC_DCHECK(thread_checker_java_.IsCurrent()); - if (!audio_device_buffer_) { - RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called"; - return; - } - audio_device_buffer_->SetRecordedBuffer(direct_buffer_address_, - frames_per_buffer_); - // We provide one (combined) fixed delay estimate for the APM and use the - // `playDelayMs` parameter only. Components like the AEC only sees the sum - // of `playDelayMs` and `recDelayMs`, hence the distributions does not matter. - audio_device_buffer_->SetVQEData(total_delay_in_milliseconds_, 0); - if (audio_device_buffer_->DeliverRecordedData() == -1) { - RTC_LOG(LS_INFO) << "AudioDeviceBuffer::DeliverRecordedData failed"; - } -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_record_jni.h b/third_party/libwebrtc/modules/audio_device/android/audio_record_jni.h deleted file mode 100644 index 66a6a89f41e6..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_record_jni.h +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_ - -#include - -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "modules/utility/include/helpers_android.h" -#include "modules/utility/include/jvm_android.h" - -namespace webrtc { - -// Implements 16-bit mono PCM audio input support for Android using the Java -// AudioRecord interface. Most of the work is done by its Java counterpart in -// WebRtcAudioRecord.java. This class is created and lives on a thread in -// C++-land, but recorded audio buffers are delivered on a high-priority -// thread managed by the Java class. -// -// The Java class makes use of AudioEffect features (mainly AEC) which are -// first available in Jelly Bean. If it is instantiated running against earlier -// SDKs, the AEC provided by the APM in WebRTC must be used and enabled -// separately instead. -// -// An instance must be created and destroyed on one and the same thread. -// All public methods must also be called on the same thread. A thread checker -// will RTC_DCHECK if any method is called on an invalid thread. -// -// This class uses JvmThreadConnector to attach to a Java VM if needed -// and detach when the object goes out of scope. Additional thread checking -// guarantees that no other (possibly non attached) thread is used. -class AudioRecordJni { - public: - // Wraps the Java specific parts of the AudioRecordJni into one helper class. - class JavaAudioRecord { - public: - JavaAudioRecord(NativeRegistration* native_registration, - std::unique_ptr audio_track); - ~JavaAudioRecord(); - - int InitRecording(int sample_rate, size_t channels); - bool StartRecording(); - bool StopRecording(); - bool EnableBuiltInAEC(bool enable); - bool EnableBuiltInNS(bool enable); - - private: - std::unique_ptr audio_record_; - jmethodID init_recording_; - jmethodID start_recording_; - jmethodID stop_recording_; - jmethodID enable_built_in_aec_; - jmethodID enable_built_in_ns_; - }; - - explicit AudioRecordJni(AudioManager* audio_manager); - ~AudioRecordJni(); - - int32_t Init(); - int32_t Terminate(); - - int32_t InitRecording(); - bool RecordingIsInitialized() const { return initialized_; } - - int32_t StartRecording(); - int32_t StopRecording(); - bool Recording() const { return recording_; } - - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - int32_t EnableBuiltInAEC(bool enable); - int32_t EnableBuiltInAGC(bool enable); - int32_t EnableBuiltInNS(bool enable); - - private: - // Called from Java side so we can cache the address of the Java-manged - // `byte_buffer` in `direct_buffer_address_`. The size of the buffer - // is also stored in `direct_buffer_capacity_in_bytes_`. - // This method will be called by the WebRtcAudioRecord constructor, i.e., - // on the same thread that this object is created on. - static void JNICALL CacheDirectBufferAddress(JNIEnv* env, - jobject obj, - jobject byte_buffer, - jlong nativeAudioRecord); - void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer); - - // Called periodically by the Java based WebRtcAudioRecord object when - // recording has started. Each call indicates that there are `length` new - // bytes recorded in the memory area `direct_buffer_address_` and it is - // now time to send these to the consumer. - // This method is called on a high-priority thread from Java. The name of - // the thread is 'AudioRecordThread'. - static void JNICALL DataIsRecorded(JNIEnv* env, - jobject obj, - jint length, - jlong nativeAudioRecord); - void OnDataIsRecorded(int length); - - // Stores thread ID in constructor. - SequenceChecker thread_checker_; - - // Stores thread ID in first call to OnDataIsRecorded() from high-priority - // thread in Java. Detached during construction of this object. - SequenceChecker thread_checker_java_; - - // Calls JavaVM::AttachCurrentThread() if this thread is not attached at - // construction. - // Also ensures that DetachCurrentThread() is called at destruction. - JvmThreadConnector attach_thread_if_needed_; - - // Wraps the JNI interface pointer and methods associated with it. - std::unique_ptr j_environment_; - - // Contains factory method for creating the Java object. - std::unique_ptr j_native_registration_; - - // Wraps the Java specific parts of the AudioRecordJni class. - std::unique_ptr j_audio_record_; - - // Raw pointer to the audio manger. - const AudioManager* audio_manager_; - - // Contains audio parameters provided to this class at construction by the - // AudioManager. - const AudioParameters audio_parameters_; - - // Delay estimate of the total round-trip delay (input + output). - // Fixed value set once in AttachAudioBuffer() and it can take one out of two - // possible values. See audio_common.h for details. - int total_delay_in_milliseconds_; - - // Cached copy of address to direct audio buffer owned by `j_audio_record_`. - void* direct_buffer_address_; - - // Number of bytes in the direct audio buffer owned by `j_audio_record_`. - size_t direct_buffer_capacity_in_bytes_; - - // Number audio frames per audio buffer. Each audio frame corresponds to - // one sample of PCM mono data at 16 bits per sample. Hence, each audio - // frame contains 2 bytes (given that the Java layer only supports mono). - // Example: 480 for 48000 Hz or 441 for 44100 Hz. - size_t frames_per_buffer_; - - bool initialized_; - - bool recording_; - - // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the - // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). - AudioDeviceBuffer* audio_device_buffer_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_track_jni.cc b/third_party/libwebrtc/modules/audio_device/android/audio_track_jni.cc deleted file mode 100644 index 5afa1ec25231..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_track_jni.cc +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/audio_track_jni.h" - -#include - -#include "modules/audio_device/android/audio_manager.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" -#include "system_wrappers/include/field_trial.h" -#include "system_wrappers/include/metrics.h" - -namespace webrtc { - -// AudioTrackJni::JavaAudioTrack implementation. -AudioTrackJni::JavaAudioTrack::JavaAudioTrack( - NativeRegistration* native_reg, - std::unique_ptr audio_track) - : audio_track_(std::move(audio_track)), - init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")), - start_playout_(native_reg->GetMethodId("startPlayout", "()Z")), - stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")), - set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")), - get_stream_max_volume_( - native_reg->GetMethodId("getStreamMaxVolume", "()I")), - get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")), - get_buffer_size_in_frames_( - native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {} - -AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {} - -bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) { - double buffer_size_factor = - strtod(webrtc::field_trial::FindFullName( - "WebRTC-AudioDevicePlayoutBufferSizeFactor") - .c_str(), - nullptr); - if (buffer_size_factor == 0) - buffer_size_factor = 1.0; - int requested_buffer_size_bytes = audio_track_->CallIntMethod( - init_playout_, sample_rate, channels, buffer_size_factor); - // Update UMA histograms for both the requested and actual buffer size. - if (requested_buffer_size_bytes >= 0) { - // To avoid division by zero, we assume the sample rate is 48k if an invalid - // value is found. - sample_rate = sample_rate <= 0 ? 48000 : sample_rate; - // This calculation assumes that audio is mono. - const int requested_buffer_size_ms = - (requested_buffer_size_bytes * 1000) / (2 * sample_rate); - RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs", - requested_buffer_size_ms, 0, 1000, 100); - int actual_buffer_size_frames = - audio_track_->CallIntMethod(get_buffer_size_in_frames_); - if (actual_buffer_size_frames >= 0) { - const int actual_buffer_size_ms = - actual_buffer_size_frames * 1000 / sample_rate; - RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs", - actual_buffer_size_ms, 0, 1000, 100); - } - return true; - } - return false; -} - -bool AudioTrackJni::JavaAudioTrack::StartPlayout() { - return audio_track_->CallBooleanMethod(start_playout_); -} - -bool AudioTrackJni::JavaAudioTrack::StopPlayout() { - return audio_track_->CallBooleanMethod(stop_playout_); -} - -bool AudioTrackJni::JavaAudioTrack::SetStreamVolume(int volume) { - return audio_track_->CallBooleanMethod(set_stream_volume_, volume); -} - -int AudioTrackJni::JavaAudioTrack::GetStreamMaxVolume() { - return audio_track_->CallIntMethod(get_stream_max_volume_); -} - -int AudioTrackJni::JavaAudioTrack::GetStreamVolume() { - return audio_track_->CallIntMethod(get_stream_volume_); -} - -// TODO(henrika): possible extend usage of AudioManager and add it as member. -AudioTrackJni::AudioTrackJni(AudioManager* audio_manager) - : j_environment_(JVM::GetInstance()->environment()), - audio_parameters_(audio_manager->GetPlayoutAudioParameters()), - direct_buffer_address_(nullptr), - direct_buffer_capacity_in_bytes_(0), - frames_per_buffer_(0), - initialized_(false), - playing_(false), - audio_device_buffer_(nullptr) { - RTC_LOG(LS_INFO) << "ctor"; - RTC_DCHECK(audio_parameters_.is_valid()); - RTC_CHECK(j_environment_); - JNINativeMethod native_methods[] = { - {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V", - reinterpret_cast( - &webrtc::AudioTrackJni::CacheDirectBufferAddress)}, - {"nativeGetPlayoutData", "(IJ)V", - reinterpret_cast(&webrtc::AudioTrackJni::GetPlayoutData)}}; - j_native_registration_ = j_environment_->RegisterNatives( - "org/webrtc/voiceengine/WebRtcAudioTrack", native_methods, - arraysize(native_methods)); - j_audio_track_.reset( - new JavaAudioTrack(j_native_registration_.get(), - j_native_registration_->NewObject( - "", "(J)V", PointerTojlong(this)))); - // Detach from this thread since we want to use the checker to verify calls - // from the Java based audio thread. - thread_checker_java_.Detach(); -} - -AudioTrackJni::~AudioTrackJni() { - RTC_LOG(LS_INFO) << "dtor"; - RTC_DCHECK(thread_checker_.IsCurrent()); - Terminate(); -} - -int32_t AudioTrackJni::Init() { - RTC_LOG(LS_INFO) << "Init"; - RTC_DCHECK(thread_checker_.IsCurrent()); - return 0; -} - -int32_t AudioTrackJni::Terminate() { - RTC_LOG(LS_INFO) << "Terminate"; - RTC_DCHECK(thread_checker_.IsCurrent()); - StopPlayout(); - return 0; -} - -int32_t AudioTrackJni::InitPlayout() { - RTC_LOG(LS_INFO) << "InitPlayout"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - RTC_DCHECK(!playing_); - if (!j_audio_track_->InitPlayout(audio_parameters_.sample_rate(), - audio_parameters_.channels())) { - RTC_LOG(LS_ERROR) << "InitPlayout failed"; - return -1; - } - initialized_ = true; - return 0; -} - -int32_t AudioTrackJni::StartPlayout() { - RTC_LOG(LS_INFO) << "StartPlayout"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!playing_); - if (!initialized_) { - RTC_DLOG(LS_WARNING) - << "Playout can not start since InitPlayout must succeed first"; - return 0; - } - if (!j_audio_track_->StartPlayout()) { - RTC_LOG(LS_ERROR) << "StartPlayout failed"; - return -1; - } - playing_ = true; - return 0; -} - -int32_t AudioTrackJni::StopPlayout() { - RTC_LOG(LS_INFO) << "StopPlayout"; - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_ || !playing_) { - return 0; - } - if (!j_audio_track_->StopPlayout()) { - RTC_LOG(LS_ERROR) << "StopPlayout failed"; - return -1; - } - // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded() - // next time StartRecording() is called since it will create a new Java - // thread. - thread_checker_java_.Detach(); - initialized_ = false; - playing_ = false; - direct_buffer_address_ = nullptr; - return 0; -} - -int AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) { - available = true; - return 0; -} - -int AudioTrackJni::SetSpeakerVolume(uint32_t volume) { - RTC_LOG(LS_INFO) << "SetSpeakerVolume(" << volume << ")"; - RTC_DCHECK(thread_checker_.IsCurrent()); - return j_audio_track_->SetStreamVolume(volume) ? 0 : -1; -} - -int AudioTrackJni::MaxSpeakerVolume(uint32_t& max_volume) const { - RTC_DCHECK(thread_checker_.IsCurrent()); - max_volume = j_audio_track_->GetStreamMaxVolume(); - return 0; -} - -int AudioTrackJni::MinSpeakerVolume(uint32_t& min_volume) const { - RTC_DCHECK(thread_checker_.IsCurrent()); - min_volume = 0; - return 0; -} - -int AudioTrackJni::SpeakerVolume(uint32_t& volume) const { - RTC_DCHECK(thread_checker_.IsCurrent()); - volume = j_audio_track_->GetStreamVolume(); - RTC_LOG(LS_INFO) << "SpeakerVolume: " << volume; - return 0; -} - -// TODO(henrika): possibly add stereo support. -void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { - RTC_LOG(LS_INFO) << "AttachAudioBuffer"; - RTC_DCHECK(thread_checker_.IsCurrent()); - audio_device_buffer_ = audioBuffer; - const int sample_rate_hz = audio_parameters_.sample_rate(); - RTC_LOG(LS_INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")"; - audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); - const size_t channels = audio_parameters_.channels(); - RTC_LOG(LS_INFO) << "SetPlayoutChannels(" << channels << ")"; - audio_device_buffer_->SetPlayoutChannels(channels); -} - -JNI_FUNCTION_ALIGN -void JNICALL AudioTrackJni::CacheDirectBufferAddress(JNIEnv* env, - jobject obj, - jobject byte_buffer, - jlong nativeAudioTrack) { - webrtc::AudioTrackJni* this_object = - reinterpret_cast(nativeAudioTrack); - this_object->OnCacheDirectBufferAddress(env, byte_buffer); -} - -void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env, - jobject byte_buffer) { - RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress"; - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!direct_buffer_address_); - direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer); - jlong capacity = env->GetDirectBufferCapacity(byte_buffer); - RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity; - direct_buffer_capacity_in_bytes_ = static_cast(capacity); - const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t); - frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame; - RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_; -} - -JNI_FUNCTION_ALIGN -void JNICALL AudioTrackJni::GetPlayoutData(JNIEnv* env, - jobject obj, - jint length, - jlong nativeAudioTrack) { - webrtc::AudioTrackJni* this_object = - reinterpret_cast(nativeAudioTrack); - this_object->OnGetPlayoutData(static_cast(length)); -} - -// This method is called on a high-priority thread from Java. The name of -// the thread is 'AudioRecordTrack'. -void AudioTrackJni::OnGetPlayoutData(size_t length) { - RTC_DCHECK(thread_checker_java_.IsCurrent()); - const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t); - RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame); - if (!audio_device_buffer_) { - RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called"; - return; - } - // Pull decoded data (in 16-bit PCM format) from jitter buffer. - int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_); - if (samples <= 0) { - RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed"; - return; - } - RTC_DCHECK_EQ(samples, frames_per_buffer_); - // Copy decoded data into common byte buffer to ensure that it can be - // written to the Java based audio track. - samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_); - RTC_DCHECK_EQ(length, bytes_per_frame * samples); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/audio_track_jni.h b/third_party/libwebrtc/modules/audio_device/android/audio_track_jni.h deleted file mode 100644 index 7eb69082b400..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/audio_track_jni.h +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_ - -#include - -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "modules/utility/include/helpers_android.h" -#include "modules/utility/include/jvm_android.h" - -namespace webrtc { - -// Implements 16-bit mono PCM audio output support for Android using the Java -// AudioTrack interface. Most of the work is done by its Java counterpart in -// WebRtcAudioTrack.java. This class is created and lives on a thread in -// C++-land, but decoded audio buffers are requested on a high-priority -// thread managed by the Java class. -// -// An instance must be created and destroyed on one and the same thread. -// All public methods must also be called on the same thread. A thread checker -// will RTC_DCHECK if any method is called on an invalid thread. -// -// This class uses JvmThreadConnector to attach to a Java VM if needed -// and detach when the object goes out of scope. Additional thread checking -// guarantees that no other (possibly non attached) thread is used. -class AudioTrackJni { - public: - // Wraps the Java specific parts of the AudioTrackJni into one helper class. - class JavaAudioTrack { - public: - JavaAudioTrack(NativeRegistration* native_registration, - std::unique_ptr audio_track); - ~JavaAudioTrack(); - - bool InitPlayout(int sample_rate, int channels); - bool StartPlayout(); - bool StopPlayout(); - bool SetStreamVolume(int volume); - int GetStreamMaxVolume(); - int GetStreamVolume(); - - private: - std::unique_ptr audio_track_; - jmethodID init_playout_; - jmethodID start_playout_; - jmethodID stop_playout_; - jmethodID set_stream_volume_; - jmethodID get_stream_max_volume_; - jmethodID get_stream_volume_; - jmethodID get_buffer_size_in_frames_; - }; - - explicit AudioTrackJni(AudioManager* audio_manager); - ~AudioTrackJni(); - - int32_t Init(); - int32_t Terminate(); - - int32_t InitPlayout(); - bool PlayoutIsInitialized() const { return initialized_; } - - int32_t StartPlayout(); - int32_t StopPlayout(); - bool Playing() const { return playing_; } - - int SpeakerVolumeIsAvailable(bool& available); - int SetSpeakerVolume(uint32_t volume); - int SpeakerVolume(uint32_t& volume) const; - int MaxSpeakerVolume(uint32_t& max_volume) const; - int MinSpeakerVolume(uint32_t& min_volume) const; - - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - private: - // Called from Java side so we can cache the address of the Java-manged - // `byte_buffer` in `direct_buffer_address_`. The size of the buffer - // is also stored in `direct_buffer_capacity_in_bytes_`. - // Called on the same thread as the creating thread. - static void JNICALL CacheDirectBufferAddress(JNIEnv* env, - jobject obj, - jobject byte_buffer, - jlong nativeAudioTrack); - void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer); - - // Called periodically by the Java based WebRtcAudioTrack object when - // playout has started. Each call indicates that `length` new bytes should - // be written to the memory area `direct_buffer_address_` for playout. - // This method is called on a high-priority thread from Java. The name of - // the thread is 'AudioTrackThread'. - static void JNICALL GetPlayoutData(JNIEnv* env, - jobject obj, - jint length, - jlong nativeAudioTrack); - void OnGetPlayoutData(size_t length); - - // Stores thread ID in constructor. - SequenceChecker thread_checker_; - - // Stores thread ID in first call to OnGetPlayoutData() from high-priority - // thread in Java. Detached during construction of this object. - SequenceChecker thread_checker_java_; - - // Calls JavaVM::AttachCurrentThread() if this thread is not attached at - // construction. - // Also ensures that DetachCurrentThread() is called at destruction. - JvmThreadConnector attach_thread_if_needed_; - - // Wraps the JNI interface pointer and methods associated with it. - std::unique_ptr j_environment_; - - // Contains factory method for creating the Java object. - std::unique_ptr j_native_registration_; - - // Wraps the Java specific parts of the AudioTrackJni class. - std::unique_ptr j_audio_track_; - - // Contains audio parameters provided to this class at construction by the - // AudioManager. - const AudioParameters audio_parameters_; - - // Cached copy of address to direct audio buffer owned by `j_audio_track_`. - void* direct_buffer_address_; - - // Number of bytes in the direct audio buffer owned by `j_audio_track_`. - size_t direct_buffer_capacity_in_bytes_; - - // Number of audio frames per audio buffer. Each audio frame corresponds to - // one sample of PCM mono data at 16 bits per sample. Hence, each audio - // frame contains 2 bytes (given that the Java layer only supports mono). - // Example: 480 for 48000 Hz or 441 for 44100 Hz. - size_t frames_per_buffer_; - - bool initialized_; - - bool playing_; - - // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the - // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). - // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance - // and therefore outlives this object. - AudioDeviceBuffer* audio_device_buffer_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/build_info.cc b/third_party/libwebrtc/modules/audio_device/android/build_info.cc deleted file mode 100644 index 916be8244ebc..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/build_info.cc +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/build_info.h" - -#include "modules/utility/include/helpers_android.h" - -namespace webrtc { - -BuildInfo::BuildInfo() - : j_environment_(JVM::GetInstance()->environment()), - j_build_info_( - JVM::GetInstance()->GetClass("org/webrtc/voiceengine/BuildInfo")) {} - -std::string BuildInfo::GetStringFromJava(const char* name) { - jmethodID id = j_build_info_.GetStaticMethodId(name, "()Ljava/lang/String;"); - jstring j_string = - static_cast(j_build_info_.CallStaticObjectMethod(id)); - return j_environment_->JavaToStdString(j_string); -} - -std::string BuildInfo::GetDeviceModel() { - return GetStringFromJava("getDeviceModel"); -} - -std::string BuildInfo::GetBrand() { - return GetStringFromJava("getBrand"); -} - -std::string BuildInfo::GetDeviceManufacturer() { - return GetStringFromJava("getDeviceManufacturer"); -} - -std::string BuildInfo::GetAndroidBuildId() { - return GetStringFromJava("getAndroidBuildId"); -} - -std::string BuildInfo::GetBuildType() { - return GetStringFromJava("getBuildType"); -} - -std::string BuildInfo::GetBuildRelease() { - return GetStringFromJava("getBuildRelease"); -} - -SdkCode BuildInfo::GetSdkVersion() { - jmethodID id = j_build_info_.GetStaticMethodId("getSdkVersion", "()I"); - jint j_version = j_build_info_.CallStaticIntMethod(id); - return static_cast(j_version); -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/build_info.h b/third_party/libwebrtc/modules/audio_device/android/build_info.h deleted file mode 100644 index 3647e5664934..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/build_info.h +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_BUILD_INFO_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_BUILD_INFO_H_ - -#include - -#include -#include - -#include "modules/utility/include/jvm_android.h" - -namespace webrtc { - -// This enumeration maps to the values returned by BuildInfo::GetSdkVersion(), -// indicating the Android release associated with a given SDK version. -// See https://developer.android.com/guide/topics/manifest/uses-sdk-element.html -// for details. -enum SdkCode { - SDK_CODE_JELLY_BEAN = 16, // Android 4.1 - SDK_CODE_JELLY_BEAN_MR1 = 17, // Android 4.2 - SDK_CODE_JELLY_BEAN_MR2 = 18, // Android 4.3 - SDK_CODE_KITKAT = 19, // Android 4.4 - SDK_CODE_WATCH = 20, // Android 4.4W - SDK_CODE_LOLLIPOP = 21, // Android 5.0 - SDK_CODE_LOLLIPOP_MR1 = 22, // Android 5.1 - SDK_CODE_MARSHMALLOW = 23, // Android 6.0 - SDK_CODE_N = 24, -}; - -// Utility class used to query the Java class (org/webrtc/voiceengine/BuildInfo) -// for device and Android build information. -// The calling thread is attached to the JVM at construction if needed and a -// valid Java environment object is also created. -// All Get methods must be called on the creating thread. If not, the code will -// hit RTC_DCHECKs when calling JNIEnvironment::JavaToStdString(). -class BuildInfo { - public: - BuildInfo(); - ~BuildInfo() {} - - // End-user-visible name for the end product (e.g. "Nexus 6"). - std::string GetDeviceModel(); - // Consumer-visible brand (e.g. "google"). - std::string GetBrand(); - // Manufacturer of the product/hardware (e.g. "motorola"). - std::string GetDeviceManufacturer(); - // Android build ID (e.g. LMY47D). - std::string GetAndroidBuildId(); - // The type of build (e.g. "user" or "eng"). - std::string GetBuildType(); - // The user-visible version string (e.g. "5.1"). - std::string GetBuildRelease(); - // The user-visible SDK version of the framework (e.g. 21). See SdkCode enum - // for translation. - SdkCode GetSdkVersion(); - - private: - // Helper method which calls a static getter method with `name` and returns - // a string from Java. - std::string GetStringFromJava(const char* name); - - // Ensures that this class can access a valid JNI interface pointer even - // if the creating thread was not attached to the JVM. - JvmThreadConnector attach_thread_if_needed_; - - // Provides access to the JNIEnv interface pointer and the JavaToStdString() - // method which is used to translate Java strings to std strings. - std::unique_ptr j_environment_; - - // Holds the jclass object and provides access to CallStaticObjectMethod(). - // Used by GetStringFromJava() during construction only. - JavaClass j_build_info_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_BUILD_INFO_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/ensure_initialized.cc b/third_party/libwebrtc/modules/audio_device/android/ensure_initialized.cc deleted file mode 100644 index 59e9c8f7a6db..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/ensure_initialized.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/ensure_initialized.h" - -#include -#include -#include - -#include "modules/utility/include/jvm_android.h" -#include "rtc_base/checks.h" -#include "sdk/android/src/jni/jvm.h" - -namespace webrtc { -namespace audiodevicemodule { - -static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT; - -void EnsureInitializedOnce() { - RTC_CHECK(::webrtc::jni::GetJVM() != nullptr); - - JNIEnv* jni = ::webrtc::jni::AttachCurrentThreadIfNeeded(); - JavaVM* jvm = NULL; - RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm)); - - // Initialize the Java environment (currently only used by the audio manager). - webrtc::JVM::Initialize(jvm); -} - -void EnsureInitialized() { - RTC_CHECK_EQ(0, pthread_once(&g_initialize_once, &EnsureInitializedOnce)); -} - -} // namespace audiodevicemodule -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java b/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java deleted file mode 100644 index aed8a06454b9..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.voiceengine; - -import android.os.Build; - -public final class BuildInfo { - public static String getDevice() { - return Build.DEVICE; - } - - public static String getDeviceModel() { - return Build.MODEL; - } - - public static String getProduct() { - return Build.PRODUCT; - } - - public static String getBrand() { - return Build.BRAND; - } - - public static String getDeviceManufacturer() { - return Build.MANUFACTURER; - } - - public static String getAndroidBuildId() { - return Build.ID; - } - - public static String getBuildType() { - return Build.TYPE; - } - - public static String getBuildRelease() { - return Build.VERSION.RELEASE; - } - - public static int getSdkVersion() { - return Build.VERSION.SDK_INT; - } -} diff --git a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java b/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java deleted file mode 100644 index 92f1c93524b8..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioEffects.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.voiceengine; - -import android.media.audiofx.AcousticEchoCanceler; -import android.media.audiofx.AudioEffect; -import android.media.audiofx.AudioEffect.Descriptor; -import android.media.audiofx.NoiseSuppressor; -import android.os.Build; -import androidx.annotation.Nullable; -import java.util.List; -import java.util.UUID; -import org.webrtc.Logging; - -// This class wraps control of three different platform effects. Supported -// effects are: AcousticEchoCanceler (AEC) and NoiseSuppressor (NS). -// Calling enable() will active all effects that are -// supported by the device if the corresponding `shouldEnableXXX` member is set. -public class WebRtcAudioEffects { - private static final boolean DEBUG = false; - - private static final String TAG = "WebRtcAudioEffects"; - - // UUIDs for Software Audio Effects that we want to avoid using. - // The implementor field will be set to "The Android Open Source Project". - private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER = - UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b"); - private static final UUID AOSP_NOISE_SUPPRESSOR = - UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b"); - - // Contains the available effect descriptors returned from the - // AudioEffect.getEffects() call. This result is cached to avoid doing the - // slow OS call multiple times. - private static @Nullable Descriptor[] cachedEffects; - - // Contains the audio effect objects. Created in enable() and destroyed - // in release(). - private @Nullable AcousticEchoCanceler aec; - private @Nullable NoiseSuppressor ns; - - // Affects the final state given to the setEnabled() method on each effect. - // The default state is set to "disabled" but each effect can also be enabled - // by calling setAEC() and setNS(). - // To enable an effect, both the shouldEnableXXX member and the static - // canUseXXX() must be true. - private boolean shouldEnableAec; - private boolean shouldEnableNs; - - // Checks if the device implements Acoustic Echo Cancellation (AEC). - // Returns true if the device implements AEC, false otherwise. - public static boolean isAcousticEchoCancelerSupported() { - // Note: we're using isAcousticEchoCancelerEffectAvailable() instead of - // AcousticEchoCanceler.isAvailable() to avoid the expensive getEffects() - // OS API call. - return isAcousticEchoCancelerEffectAvailable(); - } - - // Checks if the device implements Noise Suppression (NS). - // Returns true if the device implements NS, false otherwise. - public static boolean isNoiseSuppressorSupported() { - // Note: we're using isNoiseSuppressorEffectAvailable() instead of - // NoiseSuppressor.isAvailable() to avoid the expensive getEffects() - // OS API call. - return isNoiseSuppressorEffectAvailable(); - } - - // Returns true if the device is blacklisted for HW AEC usage. - public static boolean isAcousticEchoCancelerBlacklisted() { - List blackListedModels = WebRtcAudioUtils.getBlackListedModelsForAecUsage(); - boolean isBlacklisted = blackListedModels.contains(Build.MODEL); - if (isBlacklisted) { - Logging.w(TAG, Build.MODEL + " is blacklisted for HW AEC usage!"); - } - return isBlacklisted; - } - - // Returns true if the device is blacklisted for HW NS usage. - public static boolean isNoiseSuppressorBlacklisted() { - List blackListedModels = WebRtcAudioUtils.getBlackListedModelsForNsUsage(); - boolean isBlacklisted = blackListedModels.contains(Build.MODEL); - if (isBlacklisted) { - Logging.w(TAG, Build.MODEL + " is blacklisted for HW NS usage!"); - } - return isBlacklisted; - } - - // Returns true if the platform AEC should be excluded based on its UUID. - // AudioEffect.queryEffects() can throw IllegalStateException. - private static boolean isAcousticEchoCancelerExcludedByUUID() { - for (Descriptor d : getAvailableEffects()) { - if (d.type.equals(AudioEffect.EFFECT_TYPE_AEC) - && d.uuid.equals(AOSP_ACOUSTIC_ECHO_CANCELER)) { - return true; - } - } - return false; - } - - // Returns true if the platform NS should be excluded based on its UUID. - // AudioEffect.queryEffects() can throw IllegalStateException. - private static boolean isNoiseSuppressorExcludedByUUID() { - for (Descriptor d : getAvailableEffects()) { - if (d.type.equals(AudioEffect.EFFECT_TYPE_NS) && d.uuid.equals(AOSP_NOISE_SUPPRESSOR)) { - return true; - } - } - return false; - } - - // Returns true if the device supports Acoustic Echo Cancellation (AEC). - private static boolean isAcousticEchoCancelerEffectAvailable() { - return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC); - } - - // Returns true if the device supports Noise Suppression (NS). - private static boolean isNoiseSuppressorEffectAvailable() { - return isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS); - } - - // Returns true if all conditions for supporting the HW AEC are fulfilled. - // It will not be possible to enable the HW AEC if this method returns false. - public static boolean canUseAcousticEchoCanceler() { - boolean canUseAcousticEchoCanceler = isAcousticEchoCancelerSupported() - && !WebRtcAudioUtils.useWebRtcBasedAcousticEchoCanceler() - && !isAcousticEchoCancelerBlacklisted() && !isAcousticEchoCancelerExcludedByUUID(); - Logging.d(TAG, "canUseAcousticEchoCanceler: " + canUseAcousticEchoCanceler); - return canUseAcousticEchoCanceler; - } - - // Returns true if all conditions for supporting the HW NS are fulfilled. - // It will not be possible to enable the HW NS if this method returns false. - public static boolean canUseNoiseSuppressor() { - boolean canUseNoiseSuppressor = isNoiseSuppressorSupported() - && !WebRtcAudioUtils.useWebRtcBasedNoiseSuppressor() && !isNoiseSuppressorBlacklisted() - && !isNoiseSuppressorExcludedByUUID(); - Logging.d(TAG, "canUseNoiseSuppressor: " + canUseNoiseSuppressor); - return canUseNoiseSuppressor; - } - - public static WebRtcAudioEffects create() { - return new WebRtcAudioEffects(); - } - - private WebRtcAudioEffects() { - Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); - } - - // Call this method to enable or disable the platform AEC. It modifies - // `shouldEnableAec` which is used in enable() where the actual state - // of the AEC effect is modified. Returns true if HW AEC is supported and - // false otherwise. - public boolean setAEC(boolean enable) { - Logging.d(TAG, "setAEC(" + enable + ")"); - if (!canUseAcousticEchoCanceler()) { - Logging.w(TAG, "Platform AEC is not supported"); - shouldEnableAec = false; - return false; - } - if (aec != null && (enable != shouldEnableAec)) { - Logging.e(TAG, "Platform AEC state can't be modified while recording"); - return false; - } - shouldEnableAec = enable; - return true; - } - - // Call this method to enable or disable the platform NS. It modifies - // `shouldEnableNs` which is used in enable() where the actual state - // of the NS effect is modified. Returns true if HW NS is supported and - // false otherwise. - public boolean setNS(boolean enable) { - Logging.d(TAG, "setNS(" + enable + ")"); - if (!canUseNoiseSuppressor()) { - Logging.w(TAG, "Platform NS is not supported"); - shouldEnableNs = false; - return false; - } - if (ns != null && (enable != shouldEnableNs)) { - Logging.e(TAG, "Platform NS state can't be modified while recording"); - return false; - } - shouldEnableNs = enable; - return true; - } - - public void enable(int audioSession) { - Logging.d(TAG, "enable(audioSession=" + audioSession + ")"); - assertTrue(aec == null); - assertTrue(ns == null); - - if (DEBUG) { - // Add logging of supported effects but filter out "VoIP effects", i.e., - // AEC, AEC and NS. Avoid calling AudioEffect.queryEffects() unless the - // DEBUG flag is set since we have seen crashes in this API. - for (Descriptor d : AudioEffect.queryEffects()) { - if (effectTypeIsVoIP(d.type)) { - Logging.d(TAG, "name: " + d.name + ", " - + "mode: " + d.connectMode + ", " - + "implementor: " + d.implementor + ", " - + "UUID: " + d.uuid); - } - } - } - - if (isAcousticEchoCancelerSupported()) { - // Create an AcousticEchoCanceler and attach it to the AudioRecord on - // the specified audio session. - aec = AcousticEchoCanceler.create(audioSession); - if (aec != null) { - boolean enabled = aec.getEnabled(); - boolean enable = shouldEnableAec && canUseAcousticEchoCanceler(); - if (aec.setEnabled(enable) != AudioEffect.SUCCESS) { - Logging.e(TAG, "Failed to set the AcousticEchoCanceler state"); - } - Logging.d(TAG, "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") - + ", enable: " + enable + ", is now: " - + (aec.getEnabled() ? "enabled" : "disabled")); - } else { - Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance"); - } - } - - if (isNoiseSuppressorSupported()) { - // Create an NoiseSuppressor and attach it to the AudioRecord on the - // specified audio session. - ns = NoiseSuppressor.create(audioSession); - if (ns != null) { - boolean enabled = ns.getEnabled(); - boolean enable = shouldEnableNs && canUseNoiseSuppressor(); - if (ns.setEnabled(enable) != AudioEffect.SUCCESS) { - Logging.e(TAG, "Failed to set the NoiseSuppressor state"); - } - Logging.d(TAG, "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " - + enable + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled")); - } else { - Logging.e(TAG, "Failed to create the NoiseSuppressor instance"); - } - } - } - - // Releases all native audio effect resources. It is a good practice to - // release the effect engine when not in use as control can be returned - // to other applications or the native resources released. - public void release() { - Logging.d(TAG, "release"); - if (aec != null) { - aec.release(); - aec = null; - } - if (ns != null) { - ns.release(); - ns = null; - } - } - - // Returns true for effect types in `type` that are of "VoIP" types: - // Acoustic Echo Canceler (AEC) or Automatic Gain Control (AGC) or - // Noise Suppressor (NS). Note that, an extra check for support is needed - // in each comparison since some devices includes effects in the - // AudioEffect.Descriptor array that are actually not available on the device. - // As an example: Samsung Galaxy S6 includes an AGC in the descriptor but - // AutomaticGainControl.isAvailable() returns false. - private boolean effectTypeIsVoIP(UUID type) { - return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported()) - || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported()); - } - - // Helper method which throws an exception when an assertion has failed. - private static void assertTrue(boolean condition) { - if (!condition) { - throw new AssertionError("Expected condition to be true"); - } - } - - // Returns the cached copy of the audio effects array, if available, or - // queries the operating system for the list of effects. - private static @Nullable Descriptor[] getAvailableEffects() { - if (cachedEffects != null) { - return cachedEffects; - } - // The caching is best effort only - if this method is called from several - // threads in parallel, they may end up doing the underlying OS call - // multiple times. It's normally only called on one thread so there's no - // real need to optimize for the multiple threads case. - cachedEffects = AudioEffect.queryEffects(); - return cachedEffects; - } - - // Returns true if an effect of the specified type is available. Functionally - // equivalent to (NoiseSuppressor`AutomaticGainControl`...).isAvailable(), but - // faster as it avoids the expensive OS call to enumerate effects. - private static boolean isEffectTypeAvailable(UUID effectType) { - Descriptor[] effects = getAvailableEffects(); - if (effects == null) { - return false; - } - for (Descriptor d : effects) { - if (d.type.equals(effectType)) { - return true; - } - } - return false; - } -} diff --git a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java b/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java deleted file mode 100644 index 43c416f5b166..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java +++ /dev/null @@ -1,371 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.voiceengine; - -import android.content.Context; -import android.content.pm.PackageManager; -import android.media.AudioFormat; -import android.media.AudioManager; -import android.media.AudioRecord; -import android.media.AudioTrack; -import android.os.Build; -import androidx.annotation.Nullable; -import java.util.Timer; -import java.util.TimerTask; -import org.webrtc.ContextUtils; -import org.webrtc.Logging; - -// WebRtcAudioManager handles tasks that uses android.media.AudioManager. -// At construction, storeAudioParameters() is called and it retrieves -// fundamental audio parameters like native sample rate and number of channels. -// The result is then provided to the caller by nativeCacheAudioParameters(). -// It is also possible to call init() to set up the audio environment for best -// possible "VoIP performance". All settings done in init() are reverted by -// dispose(). This class can also be used without calling init() if the user -// prefers to set up the audio environment separately. However, it is -// recommended to always use AudioManager.MODE_IN_COMMUNICATION. -public class WebRtcAudioManager { - private static final boolean DEBUG = false; - - private static final String TAG = "WebRtcAudioManager"; - - // TODO(bugs.webrtc.org/8914): disabled by default until AAudio support has - // been completed. Goal is to always return false on Android O MR1 and higher. - private static final boolean blacklistDeviceForAAudioUsage = true; - - // Use mono as default for both audio directions. - private static boolean useStereoOutput; - private static boolean useStereoInput; - - private static boolean blacklistDeviceForOpenSLESUsage; - private static boolean blacklistDeviceForOpenSLESUsageIsOverridden; - - // Call this method to override the default list of blacklisted devices - // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS. - // Allows an app to take control over which devices to exclude from using - // the OpenSL ES audio output path - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) { - blacklistDeviceForOpenSLESUsageIsOverridden = true; - blacklistDeviceForOpenSLESUsage = enable; - } - - // Call these methods to override the default mono audio modes for the specified direction(s) - // (input and/or output). - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setStereoOutput(boolean enable) { - Logging.w(TAG, "Overriding default output behavior: setStereoOutput(" + enable + ')'); - useStereoOutput = enable; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setStereoInput(boolean enable) { - Logging.w(TAG, "Overriding default input behavior: setStereoInput(" + enable + ')'); - useStereoInput = enable; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized boolean getStereoOutput() { - return useStereoOutput; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized boolean getStereoInput() { - return useStereoInput; - } - - // Default audio data format is PCM 16 bit per sample. - // Guaranteed to be supported by all devices. - private static final int BITS_PER_SAMPLE = 16; - - private static final int DEFAULT_FRAME_PER_BUFFER = 256; - - // Private utility class that periodically checks and logs the volume level - // of the audio stream that is currently controlled by the volume control. - // A timer triggers logs once every 30 seconds and the timer's associated - // thread is named "WebRtcVolumeLevelLoggerThread". - private static class VolumeLogger { - private static final String THREAD_NAME = "WebRtcVolumeLevelLoggerThread"; - private static final int TIMER_PERIOD_IN_SECONDS = 30; - - private final AudioManager audioManager; - private @Nullable Timer timer; - - public VolumeLogger(AudioManager audioManager) { - this.audioManager = audioManager; - } - - public void start() { - timer = new Timer(THREAD_NAME); - timer.schedule(new LogVolumeTask(audioManager.getStreamMaxVolume(AudioManager.STREAM_RING), - audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)), - 0, TIMER_PERIOD_IN_SECONDS * 1000); - } - - private class LogVolumeTask extends TimerTask { - private final int maxRingVolume; - private final int maxVoiceCallVolume; - - LogVolumeTask(int maxRingVolume, int maxVoiceCallVolume) { - this.maxRingVolume = maxRingVolume; - this.maxVoiceCallVolume = maxVoiceCallVolume; - } - - @Override - public void run() { - final int mode = audioManager.getMode(); - if (mode == AudioManager.MODE_RINGTONE) { - Logging.d(TAG, "STREAM_RING stream volume: " - + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max=" - + maxRingVolume + ")"); - } else if (mode == AudioManager.MODE_IN_COMMUNICATION) { - Logging.d(TAG, "VOICE_CALL stream volume: " - + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max=" - + maxVoiceCallVolume + ")"); - } - } - } - - private void stop() { - if (timer != null) { - timer.cancel(); - timer = null; - } - } - } - - private final long nativeAudioManager; - private final AudioManager audioManager; - - private boolean initialized; - private int nativeSampleRate; - private int nativeChannels; - - private boolean hardwareAEC; - private boolean hardwareAGC; - private boolean hardwareNS; - private boolean lowLatencyOutput; - private boolean lowLatencyInput; - private boolean proAudio; - private boolean aAudio; - private int sampleRate; - private int outputChannels; - private int inputChannels; - private int outputBufferSize; - private int inputBufferSize; - - private final VolumeLogger volumeLogger; - - WebRtcAudioManager(long nativeAudioManager) { - Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); - this.nativeAudioManager = nativeAudioManager; - audioManager = - (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE); - if (DEBUG) { - WebRtcAudioUtils.logDeviceInfo(TAG); - } - volumeLogger = new VolumeLogger(audioManager); - storeAudioParameters(); - nativeCacheAudioParameters(sampleRate, outputChannels, inputChannels, hardwareAEC, hardwareAGC, - hardwareNS, lowLatencyOutput, lowLatencyInput, proAudio, aAudio, outputBufferSize, - inputBufferSize, nativeAudioManager); - WebRtcAudioUtils.logAudioState(TAG); - } - - private boolean init() { - Logging.d(TAG, "init" + WebRtcAudioUtils.getThreadInfo()); - if (initialized) { - return true; - } - Logging.d(TAG, "audio mode is: " - + WebRtcAudioUtils.modeToString(audioManager.getMode())); - initialized = true; - volumeLogger.start(); - return true; - } - - private void dispose() { - Logging.d(TAG, "dispose" + WebRtcAudioUtils.getThreadInfo()); - if (!initialized) { - return; - } - volumeLogger.stop(); - } - - private boolean isCommunicationModeEnabled() { - return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION); - } - - private boolean isDeviceBlacklistedForOpenSLESUsage() { - boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden - ? blacklistDeviceForOpenSLESUsage - : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage(); - if (blacklisted) { - Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!"); - } - return blacklisted; - } - - private void storeAudioParameters() { - outputChannels = getStereoOutput() ? 2 : 1; - inputChannels = getStereoInput() ? 2 : 1; - sampleRate = getNativeOutputSampleRate(); - hardwareAEC = isAcousticEchoCancelerSupported(); - // TODO(henrika): use of hardware AGC is no longer supported. Currently - // hardcoded to false. To be removed. - hardwareAGC = false; - hardwareNS = isNoiseSuppressorSupported(); - lowLatencyOutput = isLowLatencyOutputSupported(); - lowLatencyInput = isLowLatencyInputSupported(); - proAudio = isProAudioSupported(); - aAudio = isAAudioSupported(); - outputBufferSize = lowLatencyOutput ? getLowLatencyOutputFramesPerBuffer() - : getMinOutputFrameSize(sampleRate, outputChannels); - inputBufferSize = lowLatencyInput ? getLowLatencyInputFramesPerBuffer() - : getMinInputFrameSize(sampleRate, inputChannels); - } - - // Gets the current earpiece state. - private boolean hasEarpiece() { - return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature( - PackageManager.FEATURE_TELEPHONY); - } - - // Returns true if low-latency audio output is supported. - private boolean isLowLatencyOutputSupported() { - return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature( - PackageManager.FEATURE_AUDIO_LOW_LATENCY); - } - - // Returns true if low-latency audio input is supported. - // TODO(henrika): remove the hardcoded false return value when OpenSL ES - // input performance has been evaluated and tested more. - public boolean isLowLatencyInputSupported() { - // TODO(henrika): investigate if some sort of device list is needed here - // as well. The NDK doc states that: "As of API level 21, lower latency - // audio input is supported on select devices. To take advantage of this - // feature, first confirm that lower latency output is available". - return isLowLatencyOutputSupported(); - } - - // Returns true if the device has professional audio level of functionality - // and therefore supports the lowest possible round-trip latency. - private boolean isProAudioSupported() { - return Build.VERSION.SDK_INT >= 23 - && ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature( - PackageManager.FEATURE_AUDIO_PRO); - } - - // AAudio is supported on Androio Oreo MR1 (API 27) and higher. - // TODO(bugs.webrtc.org/8914): currently disabled by default. - private boolean isAAudioSupported() { - if (blacklistDeviceForAAudioUsage) { - Logging.w(TAG, "AAudio support is currently disabled on all devices!"); - } - return !blacklistDeviceForAAudioUsage && Build.VERSION.SDK_INT >= 27; - } - - // Returns the native output sample rate for this device's output stream. - private int getNativeOutputSampleRate() { - // Override this if we're running on an old emulator image which only - // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE. - if (WebRtcAudioUtils.runningOnEmulator()) { - Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz."); - return 8000; - } - // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz(). - // If so, use that value and return here. - if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) { - Logging.d(TAG, "Default sample rate is overriden to " - + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz"); - return WebRtcAudioUtils.getDefaultSampleRateHz(); - } - // No overrides available. Deliver best possible estimate based on default - // Android AudioManager APIs. - final int sampleRateHz = getSampleRateForApiLevel(); - Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz"); - return sampleRateHz; - } - - private int getSampleRateForApiLevel() { - String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); - return (sampleRateString == null) ? WebRtcAudioUtils.getDefaultSampleRateHz() - : Integer.parseInt(sampleRateString); - } - - // Returns the native output buffer size for low-latency output streams. - private int getLowLatencyOutputFramesPerBuffer() { - assertTrue(isLowLatencyOutputSupported()); - String framesPerBuffer = - audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); - return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer); - } - - // Returns true if the device supports an audio effect (AEC or NS). - // Four conditions must be fulfilled if functions are to return true: - // 1) the platform must support the built-in (HW) effect, - // 2) explicit use (override) of a WebRTC based version must not be set, - // 3) the device must not be blacklisted for use of the effect, and - // 4) the UUID of the effect must be approved (some UUIDs can be excluded). - private static boolean isAcousticEchoCancelerSupported() { - return WebRtcAudioEffects.canUseAcousticEchoCanceler(); - } - private static boolean isNoiseSuppressorSupported() { - return WebRtcAudioEffects.canUseNoiseSuppressor(); - } - - // Returns the minimum output buffer size for Java based audio (AudioTrack). - // This size can also be used for OpenSL ES implementations on devices that - // lacks support of low-latency output. - private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) { - final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); - final int channelConfig = - (numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); - return AudioTrack.getMinBufferSize( - sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) - / bytesPerFrame; - } - - // Returns the native input buffer size for input streams. - private int getLowLatencyInputFramesPerBuffer() { - assertTrue(isLowLatencyInputSupported()); - return getLowLatencyOutputFramesPerBuffer(); - } - - // Returns the minimum input buffer size for Java based audio (AudioRecord). - // This size can calso be used for OpenSL ES implementations on devices that - // lacks support of low-latency input. - private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { - final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); - final int channelConfig = - (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); - return AudioRecord.getMinBufferSize( - sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) - / bytesPerFrame; - } - - // Helper method which throws an exception when an assertion has failed. - private static void assertTrue(boolean condition) { - if (!condition) { - throw new AssertionError("Expected condition to be true"); - } - } - - private native void nativeCacheAudioParameters(int sampleRate, int outputChannels, - int inputChannels, boolean hardwareAEC, boolean hardwareAGC, boolean hardwareNS, - boolean lowLatencyOutput, boolean lowLatencyInput, boolean proAudio, boolean aAudio, - int outputBufferSize, int inputBufferSize, long nativeAudioManager); -} diff --git a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java deleted file mode 100644 index 8eab01cd69b6..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java +++ /dev/null @@ -1,409 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.voiceengine; - -import android.media.AudioFormat; -import android.media.AudioRecord; -import android.media.MediaRecorder.AudioSource; -import android.os.Build; -import android.os.Process; -import androidx.annotation.Nullable; -import java.lang.System; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.concurrent.TimeUnit; -import org.webrtc.Logging; -import org.webrtc.ThreadUtils; - -public class WebRtcAudioRecord { - private static final boolean DEBUG = false; - - private static final String TAG = "WebRtcAudioRecord"; - - // Default audio data format is PCM 16 bit per sample. - // Guaranteed to be supported by all devices. - private static final int BITS_PER_SAMPLE = 16; - - // Requested size of each recorded buffer provided to the client. - private static final int CALLBACK_BUFFER_SIZE_MS = 10; - - // Average number of callbacks per second. - private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS; - - // We ask for a native buffer size of BUFFER_SIZE_FACTOR * (minimum required - // buffer size). The extra space is allocated to guard against glitches under - // high load. - private static final int BUFFER_SIZE_FACTOR = 2; - - // The AudioRecordJavaThread is allowed to wait for successful call to join() - // but the wait times out afther this amount of time. - private static final long AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS = 2000; - - private static final int DEFAULT_AUDIO_SOURCE = getDefaultAudioSource(); - private static int audioSource = DEFAULT_AUDIO_SOURCE; - - private final long nativeAudioRecord; - - private @Nullable WebRtcAudioEffects effects; - - private ByteBuffer byteBuffer; - - private @Nullable AudioRecord audioRecord; - private @Nullable AudioRecordThread audioThread; - - private static volatile boolean microphoneMute; - private byte[] emptyBytes; - - // Audio recording error handler functions. - public enum AudioRecordStartErrorCode { - AUDIO_RECORD_START_EXCEPTION, - AUDIO_RECORD_START_STATE_MISMATCH, - } - - public static interface WebRtcAudioRecordErrorCallback { - void onWebRtcAudioRecordInitError(String errorMessage); - void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage); - void onWebRtcAudioRecordError(String errorMessage); - } - - private static @Nullable WebRtcAudioRecordErrorCallback errorCallback; - - public static void setErrorCallback(WebRtcAudioRecordErrorCallback errorCallback) { - Logging.d(TAG, "Set error callback"); - WebRtcAudioRecord.errorCallback = errorCallback; - } - - /** - * Contains audio sample information. Object is passed using {@link - * WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback} - */ - public static class AudioSamples { - /** See {@link AudioRecord#getAudioFormat()} */ - private final int audioFormat; - /** See {@link AudioRecord#getChannelCount()} */ - private final int channelCount; - /** See {@link AudioRecord#getSampleRate()} */ - private final int sampleRate; - - private final byte[] data; - - private AudioSamples(AudioRecord audioRecord, byte[] data) { - this.audioFormat = audioRecord.getAudioFormat(); - this.channelCount = audioRecord.getChannelCount(); - this.sampleRate = audioRecord.getSampleRate(); - this.data = data; - } - - public int getAudioFormat() { - return audioFormat; - } - - public int getChannelCount() { - return channelCount; - } - - public int getSampleRate() { - return sampleRate; - } - - public byte[] getData() { - return data; - } - } - - /** Called when new audio samples are ready. This should only be set for debug purposes */ - public static interface WebRtcAudioRecordSamplesReadyCallback { - void onWebRtcAudioRecordSamplesReady(AudioSamples samples); - } - - private static @Nullable WebRtcAudioRecordSamplesReadyCallback audioSamplesReadyCallback; - - public static void setOnAudioSamplesReady(WebRtcAudioRecordSamplesReadyCallback callback) { - audioSamplesReadyCallback = callback; - } - - /** - * Audio thread which keeps calling ByteBuffer.read() waiting for audio - * to be recorded. Feeds recorded data to the native counterpart as a - * periodic sequence of callbacks using DataIsRecorded(). - * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority. - */ - private class AudioRecordThread extends Thread { - private volatile boolean keepAlive = true; - - public AudioRecordThread(String name) { - super(name); - } - - // TODO(titovartem) make correct fix during webrtc:9175 - @SuppressWarnings("ByteBufferBackingArray") - @Override - public void run() { - Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); - Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); - - long lastTime = System.nanoTime(); - while (keepAlive) { - int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); - if (bytesRead == byteBuffer.capacity()) { - if (microphoneMute) { - byteBuffer.clear(); - byteBuffer.put(emptyBytes); - } - // It's possible we've been shut down during the read, and stopRecording() tried and - // failed to join this thread. To be a bit safer, try to avoid calling any native methods - // in case they've been unregistered after stopRecording() returned. - if (keepAlive) { - nativeDataIsRecorded(bytesRead, nativeAudioRecord); - } - if (audioSamplesReadyCallback != null) { - // Copy the entire byte buffer array. Assume that the start of the byteBuffer is - // at index 0. - byte[] data = Arrays.copyOf(byteBuffer.array(), byteBuffer.capacity()); - audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( - new AudioSamples(audioRecord, data)); - } - } else { - String errorMessage = "AudioRecord.read failed: " + bytesRead; - Logging.e(TAG, errorMessage); - if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { - keepAlive = false; - reportWebRtcAudioRecordError(errorMessage); - } - } - if (DEBUG) { - long nowTime = System.nanoTime(); - long durationInMs = TimeUnit.NANOSECONDS.toMillis((nowTime - lastTime)); - lastTime = nowTime; - Logging.d(TAG, "bytesRead[" + durationInMs + "] " + bytesRead); - } - } - - try { - if (audioRecord != null) { - audioRecord.stop(); - } - } catch (IllegalStateException e) { - Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); - } - } - - // Stops the inner thread loop and also calls AudioRecord.stop(). - // Does not block the calling thread. - public void stopThread() { - Logging.d(TAG, "stopThread"); - keepAlive = false; - } - } - - WebRtcAudioRecord(long nativeAudioRecord) { - Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); - this.nativeAudioRecord = nativeAudioRecord; - if (DEBUG) { - WebRtcAudioUtils.logDeviceInfo(TAG); - } - effects = WebRtcAudioEffects.create(); - } - - private boolean enableBuiltInAEC(boolean enable) { - Logging.d(TAG, "enableBuiltInAEC(" + enable + ')'); - if (effects == null) { - Logging.e(TAG, "Built-in AEC is not supported on this platform"); - return false; - } - return effects.setAEC(enable); - } - - private boolean enableBuiltInNS(boolean enable) { - Logging.d(TAG, "enableBuiltInNS(" + enable + ')'); - if (effects == null) { - Logging.e(TAG, "Built-in NS is not supported on this platform"); - return false; - } - return effects.setNS(enable); - } - - private int initRecording(int sampleRate, int channels) { - Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); - if (audioRecord != null) { - reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); - return -1; - } - final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); - final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; - byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); - Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); - emptyBytes = new byte[byteBuffer.capacity()]; - // Rather than passing the ByteBuffer with every callback (requiring - // the potentially expensive GetDirectBufferAddress) we simply have the - // the native class cache the address to the memory once. - nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord); - - // Get the minimum buffer size required for the successful creation of - // an AudioRecord object, in byte units. - // Note that this size doesn't guarantee a smooth recording under load. - final int channelConfig = channelCountToConfiguration(channels); - int minBufferSize = - AudioRecord.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT); - if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { - reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); - return -1; - } - Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); - - // Use a larger buffer size than the minimum required when creating the - // AudioRecord instance to ensure smooth recording under load. It has been - // verified that it does not increase the actual recording latency. - int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); - Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); - try { - audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig, - AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes); - } catch (IllegalArgumentException e) { - reportWebRtcAudioRecordInitError("AudioRecord ctor error: " + e.getMessage()); - releaseAudioResources(); - return -1; - } - if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { - reportWebRtcAudioRecordInitError("Failed to create a new AudioRecord instance"); - releaseAudioResources(); - return -1; - } - if (effects != null) { - effects.enable(audioRecord.getAudioSessionId()); - } - logMainParameters(); - logMainParametersExtended(); - return framesPerBuffer; - } - - private boolean startRecording() { - Logging.d(TAG, "startRecording"); - assertTrue(audioRecord != null); - assertTrue(audioThread == null); - try { - audioRecord.startRecording(); - } catch (IllegalStateException e) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, - "AudioRecord.startRecording failed: " + e.getMessage()); - return false; - } - if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { - reportWebRtcAudioRecordStartError( - AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, - "AudioRecord.startRecording failed - incorrect state :" - + audioRecord.getRecordingState()); - return false; - } - audioThread = new AudioRecordThread("AudioRecordJavaThread"); - audioThread.start(); - return true; - } - - private boolean stopRecording() { - Logging.d(TAG, "stopRecording"); - assertTrue(audioThread != null); - audioThread.stopThread(); - if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); - WebRtcAudioUtils.logAudioState(TAG); - } - audioThread = null; - if (effects != null) { - effects.release(); - } - releaseAudioResources(); - return true; - } - - private void logMainParameters() { - Logging.d(TAG, "AudioRecord: " - + "session ID: " + audioRecord.getAudioSessionId() + ", " - + "channels: " + audioRecord.getChannelCount() + ", " - + "sample rate: " + audioRecord.getSampleRate()); - } - - private void logMainParametersExtended() { - if (Build.VERSION.SDK_INT >= 23) { - Logging.d(TAG, "AudioRecord: " - // The frame count of the native AudioRecord buffer. - + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); - } - } - - // Helper method which throws an exception when an assertion has failed. - private static void assertTrue(boolean condition) { - if (!condition) { - throw new AssertionError("Expected condition to be true"); - } - } - - private int channelCountToConfiguration(int channels) { - return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); - } - - private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord); - - private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord); - - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setAudioSource(int source) { - Logging.w(TAG, "Audio source is changed from: " + audioSource - + " to " + source); - audioSource = source; - } - - private static int getDefaultAudioSource() { - return AudioSource.VOICE_COMMUNICATION; - } - - // Sets all recorded samples to zero if `mute` is true, i.e., ensures that - // the microphone is muted. - public static void setMicrophoneMute(boolean mute) { - Logging.w(TAG, "setMicrophoneMute(" + mute + ")"); - microphoneMute = mute; - } - - // Releases the native AudioRecord resources. - private void releaseAudioResources() { - Logging.d(TAG, "releaseAudioResources"); - if (audioRecord != null) { - audioRecord.release(); - audioRecord = null; - } - } - - private void reportWebRtcAudioRecordInitError(String errorMessage) { - Logging.e(TAG, "Init recording error: " + errorMessage); - WebRtcAudioUtils.logAudioState(TAG); - if (errorCallback != null) { - errorCallback.onWebRtcAudioRecordInitError(errorMessage); - } - } - - private void reportWebRtcAudioRecordStartError( - AudioRecordStartErrorCode errorCode, String errorMessage) { - Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); - WebRtcAudioUtils.logAudioState(TAG); - if (errorCallback != null) { - errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); - } - } - - private void reportWebRtcAudioRecordError(String errorMessage) { - Logging.e(TAG, "Run-time recording error: " + errorMessage); - WebRtcAudioUtils.logAudioState(TAG); - if (errorCallback != null) { - errorCallback.onWebRtcAudioRecordError(errorMessage); - } - } -} diff --git a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java deleted file mode 100644 index 3e1875c3d679..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ /dev/null @@ -1,494 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.voiceengine; - -import android.content.Context; -import android.media.AudioAttributes; -import android.media.AudioFormat; -import android.media.AudioManager; -import android.media.AudioTrack; -import android.os.Build; -import android.os.Process; -import androidx.annotation.Nullable; -import java.lang.Thread; -import java.nio.ByteBuffer; -import org.webrtc.ContextUtils; -import org.webrtc.Logging; -import org.webrtc.ThreadUtils; - -public class WebRtcAudioTrack { - private static final boolean DEBUG = false; - - private static final String TAG = "WebRtcAudioTrack"; - - // Default audio data format is PCM 16 bit per sample. - // Guaranteed to be supported by all devices. - private static final int BITS_PER_SAMPLE = 16; - - // Requested size of each recorded buffer provided to the client. - private static final int CALLBACK_BUFFER_SIZE_MS = 10; - - // Average number of callbacks per second. - private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS; - - // The AudioTrackThread is allowed to wait for successful call to join() - // but the wait times out afther this amount of time. - private static final long AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS = 2000; - - // By default, WebRTC creates audio tracks with a usage attribute - // corresponding to voice communications, such as telephony or VoIP. - private static final int DEFAULT_USAGE = AudioAttributes.USAGE_VOICE_COMMUNICATION; - private static int usageAttribute = DEFAULT_USAGE; - - // This method overrides the default usage attribute and allows the user - // to set it to something else than AudioAttributes.USAGE_VOICE_COMMUNICATION. - // NOTE: calling this method will most likely break existing VoIP tuning. - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setAudioTrackUsageAttribute(int usage) { - Logging.w(TAG, "Default usage attribute is changed from: " - + DEFAULT_USAGE + " to " + usage); - usageAttribute = usage; - } - - private final long nativeAudioTrack; - private final AudioManager audioManager; - private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); - - private ByteBuffer byteBuffer; - - private @Nullable AudioTrack audioTrack; - private @Nullable AudioTrackThread audioThread; - - // Samples to be played are replaced by zeros if `speakerMute` is set to true. - // Can be used to ensure that the speaker is fully muted. - private static volatile boolean speakerMute; - private byte[] emptyBytes; - - // Audio playout/track error handler functions. - public enum AudioTrackStartErrorCode { - AUDIO_TRACK_START_EXCEPTION, - AUDIO_TRACK_START_STATE_MISMATCH, - } - - @Deprecated - public static interface WebRtcAudioTrackErrorCallback { - void onWebRtcAudioTrackInitError(String errorMessage); - void onWebRtcAudioTrackStartError(String errorMessage); - void onWebRtcAudioTrackError(String errorMessage); - } - - // TODO(henrika): upgrade all clients to use this new interface instead. - public static interface ErrorCallback { - void onWebRtcAudioTrackInitError(String errorMessage); - void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage); - void onWebRtcAudioTrackError(String errorMessage); - } - - private static @Nullable WebRtcAudioTrackErrorCallback errorCallbackOld; - private static @Nullable ErrorCallback errorCallback; - - @Deprecated - public static void setErrorCallback(WebRtcAudioTrackErrorCallback errorCallback) { - Logging.d(TAG, "Set error callback (deprecated"); - WebRtcAudioTrack.errorCallbackOld = errorCallback; - } - - public static void setErrorCallback(ErrorCallback errorCallback) { - Logging.d(TAG, "Set extended error callback"); - WebRtcAudioTrack.errorCallback = errorCallback; - } - - /** - * Audio thread which keeps calling AudioTrack.write() to stream audio. - * Data is periodically acquired from the native WebRTC layer using the - * nativeGetPlayoutData callback function. - * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority. - */ - private class AudioTrackThread extends Thread { - private volatile boolean keepAlive = true; - - public AudioTrackThread(String name) { - super(name); - } - - @Override - public void run() { - Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); - Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); - - // Fixed size in bytes of each 10ms block of audio data that we ask for - // using callbacks to the native WebRTC client. - final int sizeInBytes = byteBuffer.capacity(); - - while (keepAlive) { - // Get 10ms of PCM data from the native WebRTC client. Audio data is - // written into the common ByteBuffer using the address that was - // cached at construction. - nativeGetPlayoutData(sizeInBytes, nativeAudioTrack); - // Write data until all data has been written to the audio sink. - // Upon return, the buffer position will have been advanced to reflect - // the amount of data that was successfully written to the AudioTrack. - assertTrue(sizeInBytes <= byteBuffer.remaining()); - if (speakerMute) { - byteBuffer.clear(); - byteBuffer.put(emptyBytes); - byteBuffer.position(0); - } - int bytesWritten = audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING); - if (bytesWritten != sizeInBytes) { - Logging.e(TAG, "AudioTrack.write played invalid number of bytes: " + bytesWritten); - // If a write() returns a negative value, an error has occurred. - // Stop playing and report an error in this case. - if (bytesWritten < 0) { - keepAlive = false; - reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); - } - } - // The byte buffer must be rewinded since byteBuffer.position() is - // increased at each call to AudioTrack.write(). If we don't do this, - // next call to AudioTrack.write() will fail. - byteBuffer.rewind(); - - // TODO(henrika): it is possible to create a delay estimate here by - // counting number of written frames and subtracting the result from - // audioTrack.getPlaybackHeadPosition(). - } - - // Stops playing the audio data. Since the instance was created in - // MODE_STREAM mode, audio will stop playing after the last buffer that - // was written has been played. - if (audioTrack != null) { - Logging.d(TAG, "Calling AudioTrack.stop..."); - try { - audioTrack.stop(); - Logging.d(TAG, "AudioTrack.stop is done."); - } catch (IllegalStateException e) { - Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); - } - } - } - - // Stops the inner thread loop which results in calling AudioTrack.stop(). - // Does not block the calling thread. - public void stopThread() { - Logging.d(TAG, "stopThread"); - keepAlive = false; - } - } - - WebRtcAudioTrack(long nativeAudioTrack) { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); - this.nativeAudioTrack = nativeAudioTrack; - audioManager = - (AudioManager) ContextUtils.getApplicationContext().getSystemService(Context.AUDIO_SERVICE); - if (DEBUG) { - WebRtcAudioUtils.logDeviceInfo(TAG); - } - } - - private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, - "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels - + ", bufferSizeFactor=" + bufferSizeFactor + ")"); - final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); - byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); - Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); - emptyBytes = new byte[byteBuffer.capacity()]; - // Rather than passing the ByteBuffer with every callback (requiring - // the potentially expensive GetDirectBufferAddress) we simply have the - // the native class cache the address to the memory once. - nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack); - - // Get the minimum buffer size required for the successful creation of an - // AudioTrack object to be created in the MODE_STREAM mode. - // Note that this size doesn't guarantee a smooth playback under load. - final int channelConfig = channelCountToConfiguration(channels); - final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig, - AudioFormat.ENCODING_PCM_16BIT) - * bufferSizeFactor); - Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes); - // For the streaming mode, data must be written to the audio sink in - // chunks of size (given by byteBuffer.capacity()) less than or equal - // to the total buffer size `minBufferSizeInBytes`. But, we have seen - // reports of "getMinBufferSize(): error querying hardware". Hence, it - // can happen that `minBufferSizeInBytes` contains an invalid value. - if (minBufferSizeInBytes < byteBuffer.capacity()) { - reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); - return -1; - } - - // Ensure that prevision audio session was stopped correctly before trying - // to create a new AudioTrack. - if (audioTrack != null) { - reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); - return -1; - } - try { - // Create an AudioTrack object and initialize its associated audio buffer. - // The size of this buffer determines how long an AudioTrack can play - // before running out of data. - // As we are on API level 21 or higher, it is possible to use a special AudioTrack - // constructor that uses AudioAttributes and AudioFormat as input. It allows us to - // supersede the notion of stream types for defining the behavior of audio playback, - // and to allow certain platforms or routing policies to use this information for more - // refined volume or routing decisions. - audioTrack = createAudioTrack(sampleRate, channelConfig, minBufferSizeInBytes); - } catch (IllegalArgumentException e) { - reportWebRtcAudioTrackInitError(e.getMessage()); - releaseAudioResources(); - return -1; - } - - // It can happen that an AudioTrack is created but it was not successfully - // initialized upon creation. Seems to be the case e.g. when the maximum - // number of globally available audio tracks is exceeded. - if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { - reportWebRtcAudioTrackInitError("Initialization of audio track failed."); - releaseAudioResources(); - return -1; - } - logMainParameters(); - logMainParametersExtended(); - return minBufferSizeInBytes; - } - - private boolean startPlayout() { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "startPlayout"); - assertTrue(audioTrack != null); - assertTrue(audioThread == null); - - // Starts playing an audio track. - try { - audioTrack.play(); - } catch (IllegalStateException e) { - reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, - "AudioTrack.play failed: " + e.getMessage()); - releaseAudioResources(); - return false; - } - if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { - reportWebRtcAudioTrackStartError( - AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, - "AudioTrack.play failed - incorrect state :" - + audioTrack.getPlayState()); - releaseAudioResources(); - return false; - } - - // Create and start new high-priority thread which calls AudioTrack.write() - // and where we also call the native nativeGetPlayoutData() callback to - // request decoded audio from WebRTC. - audioThread = new AudioTrackThread("AudioTrackJavaThread"); - audioThread.start(); - return true; - } - - private boolean stopPlayout() { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "stopPlayout"); - assertTrue(audioThread != null); - logUnderrunCount(); - audioThread.stopThread(); - - Logging.d(TAG, "Stopping the AudioTrackThread..."); - audioThread.interrupt(); - if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioTrackThread timed out."); - WebRtcAudioUtils.logAudioState(TAG); - } - Logging.d(TAG, "AudioTrackThread has now been stopped."); - audioThread = null; - releaseAudioResources(); - return true; - } - - // Get max possible volume index for a phone call audio stream. - private int getStreamMaxVolume() { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "getStreamMaxVolume"); - assertTrue(audioManager != null); - return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); - } - - // Set current volume level for a phone call audio stream. - private boolean setStreamVolume(int volume) { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "setStreamVolume(" + volume + ")"); - assertTrue(audioManager != null); - if (audioManager.isVolumeFixed()) { - Logging.e(TAG, "The device implements a fixed volume policy."); - return false; - } - audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0); - return true; - } - - /** Get current volume level for a phone call audio stream. */ - private int getStreamVolume() { - threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "getStreamVolume"); - assertTrue(audioManager != null); - return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); - } - - private void logMainParameters() { - Logging.d(TAG, "AudioTrack: " - + "session ID: " + audioTrack.getAudioSessionId() + ", " - + "channels: " + audioTrack.getChannelCount() + ", " - + "sample rate: " + audioTrack.getSampleRate() + ", " - // Gain (>=1.0) expressed as linear multiplier on sample values. - + "max gain: " + AudioTrack.getMaxVolume()); - } - - // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. - // It allows certain platforms or routing policies to use this information for more - // refined volume or routing decisions. - private static AudioTrack createAudioTrack( - int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { - Logging.d(TAG, "createAudioTrack"); - // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control - // performance when Android O is supported. Add some logging in the mean time. - final int nativeOutputSampleRate = - AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL); - Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate); - if (sampleRateInHz != nativeOutputSampleRate) { - Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); - } - if (usageAttribute != DEFAULT_USAGE) { - Logging.w(TAG, "A non default usage attribute is used: " + usageAttribute); - } - // Create an audio track where the audio usage is for VoIP and the content type is speech. - return new AudioTrack( - new AudioAttributes.Builder() - .setUsage(usageAttribute) - .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) - .build(), - new AudioFormat.Builder() - .setEncoding(AudioFormat.ENCODING_PCM_16BIT) - .setSampleRate(sampleRateInHz) - .setChannelMask(channelConfig) - .build(), - bufferSizeInBytes, - AudioTrack.MODE_STREAM, - AudioManager.AUDIO_SESSION_ID_GENERATE); - } - - private void logBufferSizeInFrames() { - if (Build.VERSION.SDK_INT >= 23) { - Logging.d(TAG, "AudioTrack: " - // The effective size of the AudioTrack buffer that the app writes to. - + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); - } - } - - private int getBufferSizeInFrames() { - if (Build.VERSION.SDK_INT >= 23) { - return audioTrack.getBufferSizeInFrames(); - } - return -1; - } - - private void logBufferCapacityInFrames() { - if (Build.VERSION.SDK_INT >= 24) { - Logging.d(TAG, - "AudioTrack: " - // Maximum size of the AudioTrack buffer in frames. - + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames()); - } - } - - private void logMainParametersExtended() { - logBufferSizeInFrames(); - logBufferCapacityInFrames(); - } - - // Prints the number of underrun occurrences in the application-level write - // buffer since the AudioTrack was created. An underrun occurs if the app does - // not write audio data quickly enough, causing the buffer to underflow and a - // potential audio glitch. - // TODO(henrika): keep track of this value in the field and possibly add new - // UMA stat if needed. - private void logUnderrunCount() { - if (Build.VERSION.SDK_INT >= 24) { - Logging.d(TAG, "underrun count: " + audioTrack.getUnderrunCount()); - } - } - - // Helper method which throws an exception when an assertion has failed. - private static void assertTrue(boolean condition) { - if (!condition) { - throw new AssertionError("Expected condition to be true"); - } - } - - private int channelCountToConfiguration(int channels) { - return (channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); - } - - private native void nativeCacheDirectBufferAddress(ByteBuffer byteBuffer, long nativeAudioRecord); - - private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord); - - // Sets all samples to be played out to zero if `mute` is true, i.e., - // ensures that the speaker is muted. - public static void setSpeakerMute(boolean mute) { - Logging.w(TAG, "setSpeakerMute(" + mute + ")"); - speakerMute = mute; - } - - // Releases the native AudioTrack resources. - private void releaseAudioResources() { - Logging.d(TAG, "releaseAudioResources"); - if (audioTrack != null) { - audioTrack.release(); - audioTrack = null; - } - } - - private void reportWebRtcAudioTrackInitError(String errorMessage) { - Logging.e(TAG, "Init playout error: " + errorMessage); - WebRtcAudioUtils.logAudioState(TAG); - if (errorCallbackOld != null) { - errorCallbackOld.onWebRtcAudioTrackInitError(errorMessage); - } - if (errorCallback != null) { - errorCallback.onWebRtcAudioTrackInitError(errorMessage); - } - } - - private void reportWebRtcAudioTrackStartError( - AudioTrackStartErrorCode errorCode, String errorMessage) { - Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage); - WebRtcAudioUtils.logAudioState(TAG); - if (errorCallbackOld != null) { - errorCallbackOld.onWebRtcAudioTrackStartError(errorMessage); - } - if (errorCallback != null) { - errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); - } - } - - private void reportWebRtcAudioTrackError(String errorMessage) { - Logging.e(TAG, "Run-time playback error: " + errorMessage); - WebRtcAudioUtils.logAudioState(TAG); - if (errorCallbackOld != null) { - errorCallbackOld.onWebRtcAudioTrackError(errorMessage); - } - if (errorCallback != null) { - errorCallback.onWebRtcAudioTrackError(errorMessage); - } - } -} diff --git a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java deleted file mode 100644 index afd3d429af19..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java +++ /dev/null @@ -1,382 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.voiceengine; - -import static android.media.AudioManager.MODE_IN_CALL; -import static android.media.AudioManager.MODE_IN_COMMUNICATION; -import static android.media.AudioManager.MODE_NORMAL; -import static android.media.AudioManager.MODE_RINGTONE; - -import android.annotation.SuppressLint; -import android.content.Context; -import android.content.pm.PackageManager; -import android.media.AudioDeviceInfo; -import android.media.AudioManager; -import android.os.Build; -import java.lang.Thread; -import java.util.Arrays; -import java.util.List; -import org.webrtc.ContextUtils; -import org.webrtc.Logging; - -public final class WebRtcAudioUtils { - private static final String TAG = "WebRtcAudioUtils"; - - // List of devices where we have seen issues (e.g. bad audio quality) using - // the low latency output mode in combination with OpenSL ES. - // The device name is given by Build.MODEL. - private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] { - // It is recommended to maintain a list of blacklisted models outside - // this package and instead call - // WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true) - // from the client for devices where OpenSL ES shall be disabled. - }; - - // List of devices where it has been verified that the built-in effect - // bad and where it makes sense to avoid using it and instead rely on the - // native WebRTC version instead. The device name is given by Build.MODEL. - private static final String[] BLACKLISTED_AEC_MODELS = new String[] { - // It is recommended to maintain a list of blacklisted models outside - // this package and instead call setWebRtcBasedAcousticEchoCanceler(true) - // from the client for devices where the built-in AEC shall be disabled. - }; - private static final String[] BLACKLISTED_NS_MODELS = new String[] { - // It is recommended to maintain a list of blacklisted models outside - // this package and instead call setWebRtcBasedNoiseSuppressor(true) - // from the client for devices where the built-in NS shall be disabled. - }; - - // Use 16kHz as the default sample rate. A higher sample rate might prevent - // us from supporting communication mode on some older (e.g. ICS) devices. - private static final int DEFAULT_SAMPLE_RATE_HZ = 16000; - private static int defaultSampleRateHz = DEFAULT_SAMPLE_RATE_HZ; - // Set to true if setDefaultSampleRateHz() has been called. - private static boolean isDefaultSampleRateOverridden; - - // By default, utilize hardware based audio effects for AEC and NS when - // available. - private static boolean useWebRtcBasedAcousticEchoCanceler; - private static boolean useWebRtcBasedNoiseSuppressor; - - // Call these methods if any hardware based effect shall be replaced by a - // software based version provided by the WebRTC stack instead. - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setWebRtcBasedAcousticEchoCanceler(boolean enable) { - useWebRtcBasedAcousticEchoCanceler = enable; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setWebRtcBasedNoiseSuppressor(boolean enable) { - useWebRtcBasedNoiseSuppressor = enable; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setWebRtcBasedAutomaticGainControl(boolean enable) { - // TODO(henrika): deprecated; remove when no longer used by any client. - Logging.w(TAG, "setWebRtcBasedAutomaticGainControl() is deprecated"); - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized boolean useWebRtcBasedAcousticEchoCanceler() { - if (useWebRtcBasedAcousticEchoCanceler) { - Logging.w(TAG, "Overriding default behavior; now using WebRTC AEC!"); - } - return useWebRtcBasedAcousticEchoCanceler; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized boolean useWebRtcBasedNoiseSuppressor() { - if (useWebRtcBasedNoiseSuppressor) { - Logging.w(TAG, "Overriding default behavior; now using WebRTC NS!"); - } - return useWebRtcBasedNoiseSuppressor; - } - - // TODO(henrika): deprecated; remove when no longer used by any client. - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized boolean useWebRtcBasedAutomaticGainControl() { - // Always return true here to avoid trying to use any built-in AGC. - return true; - } - - // Returns true if the device supports an audio effect (AEC or NS). - // Four conditions must be fulfilled if functions are to return true: - // 1) the platform must support the built-in (HW) effect, - // 2) explicit use (override) of a WebRTC based version must not be set, - // 3) the device must not be blacklisted for use of the effect, and - // 4) the UUID of the effect must be approved (some UUIDs can be excluded). - public static boolean isAcousticEchoCancelerSupported() { - return WebRtcAudioEffects.canUseAcousticEchoCanceler(); - } - public static boolean isNoiseSuppressorSupported() { - return WebRtcAudioEffects.canUseNoiseSuppressor(); - } - // TODO(henrika): deprecated; remove when no longer used by any client. - public static boolean isAutomaticGainControlSupported() { - // Always return false here to avoid trying to use any built-in AGC. - return false; - } - - // Call this method if the default handling of querying the native sample - // rate shall be overridden. Can be useful on some devices where the - // available Android APIs are known to return invalid results. - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setDefaultSampleRateHz(int sampleRateHz) { - isDefaultSampleRateOverridden = true; - defaultSampleRateHz = sampleRateHz; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized boolean isDefaultSampleRateOverridden() { - return isDefaultSampleRateOverridden; - } - - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized int getDefaultSampleRateHz() { - return defaultSampleRateHz; - } - - public static List getBlackListedModelsForAecUsage() { - return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_AEC_MODELS); - } - - public static List getBlackListedModelsForNsUsage() { - return Arrays.asList(WebRtcAudioUtils.BLACKLISTED_NS_MODELS); - } - - // Helper method for building a string of thread information. - public static String getThreadInfo() { - return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId() - + "]"; - } - - // Returns true if we're running on emulator. - public static boolean runningOnEmulator() { - return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_"); - } - - // Returns true if the device is blacklisted for OpenSL ES usage. - public static boolean deviceIsBlacklistedForOpenSLESUsage() { - List blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS); - return blackListedModels.contains(Build.MODEL); - } - - // Information about the current build, taken from system properties. - static void logDeviceInfo(String tag) { - Logging.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", " - + "Release: " + Build.VERSION.RELEASE + ", " - + "Brand: " + Build.BRAND + ", " - + "Device: " + Build.DEVICE + ", " - + "Id: " + Build.ID + ", " - + "Hardware: " + Build.HARDWARE + ", " - + "Manufacturer: " + Build.MANUFACTURER + ", " - + "Model: " + Build.MODEL + ", " - + "Product: " + Build.PRODUCT); - } - - // Logs information about the current audio state. The idea is to call this - // method when errors are detected to log under what conditions the error - // occurred. Hopefully it will provide clues to what might be the root cause. - static void logAudioState(String tag) { - logDeviceInfo(tag); - final Context context = ContextUtils.getApplicationContext(); - final AudioManager audioManager = - (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); - logAudioStateBasic(tag, audioManager); - logAudioStateVolume(tag, audioManager); - logAudioDeviceInfo(tag, audioManager); - } - - // Reports basic audio statistics. - private static void logAudioStateBasic(String tag, AudioManager audioManager) { - Logging.d(tag, "Audio State: " - + "audio mode: " + modeToString(audioManager.getMode()) + ", " - + "has mic: " + hasMicrophone() + ", " - + "mic muted: " + audioManager.isMicrophoneMute() + ", " - + "music active: " + audioManager.isMusicActive() + ", " - + "speakerphone: " + audioManager.isSpeakerphoneOn() + ", " - + "BT SCO: " + audioManager.isBluetoothScoOn()); - } - - // Adds volume information for all possible stream types. - private static void logAudioStateVolume(String tag, AudioManager audioManager) { - final int[] streams = { - AudioManager.STREAM_VOICE_CALL, - AudioManager.STREAM_MUSIC, - AudioManager.STREAM_RING, - AudioManager.STREAM_ALARM, - AudioManager.STREAM_NOTIFICATION, - AudioManager.STREAM_SYSTEM - }; - Logging.d(tag, "Audio State: "); - // Some devices may not have volume controls and might use a fixed volume. - boolean fixedVolume = audioManager.isVolumeFixed(); - Logging.d(tag, " fixed volume=" + fixedVolume); - if (!fixedVolume) { - for (int stream : streams) { - StringBuilder info = new StringBuilder(); - info.append(" " + streamTypeToString(stream) + ": "); - info.append("volume=").append(audioManager.getStreamVolume(stream)); - info.append(", max=").append(audioManager.getStreamMaxVolume(stream)); - logIsStreamMute(tag, audioManager, stream, info); - Logging.d(tag, info.toString()); - } - } - } - - private static void logIsStreamMute( - String tag, AudioManager audioManager, int stream, StringBuilder info) { - if (Build.VERSION.SDK_INT >= 23) { - info.append(", muted=").append(audioManager.isStreamMute(stream)); - } - } - - // Moz linting complains even though AudioManager.GET_DEVICES_ALL is - // listed in the docs here: - // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL - @SuppressLint("WrongConstant") - private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { - if (Build.VERSION.SDK_INT < 23) { - return; - } - final AudioDeviceInfo[] devices = - audioManager.getDevices(AudioManager.GET_DEVICES_ALL); - if (devices.length == 0) { - return; - } - Logging.d(tag, "Audio Devices: "); - for (AudioDeviceInfo device : devices) { - StringBuilder info = new StringBuilder(); - info.append(" ").append(deviceTypeToString(device.getType())); - info.append(device.isSource() ? "(in): " : "(out): "); - // An empty array indicates that the device supports arbitrary channel counts. - if (device.getChannelCounts().length > 0) { - info.append("channels=").append(Arrays.toString(device.getChannelCounts())); - info.append(", "); - } - if (device.getEncodings().length > 0) { - // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4. - info.append("encodings=").append(Arrays.toString(device.getEncodings())); - info.append(", "); - } - if (device.getSampleRates().length > 0) { - info.append("sample rates=").append(Arrays.toString(device.getSampleRates())); - info.append(", "); - } - info.append("id=").append(device.getId()); - Logging.d(tag, info.toString()); - } - } - - // Converts media.AudioManager modes into local string representation. - static String modeToString(int mode) { - switch (mode) { - case MODE_IN_CALL: - return "MODE_IN_CALL"; - case MODE_IN_COMMUNICATION: - return "MODE_IN_COMMUNICATION"; - case MODE_NORMAL: - return "MODE_NORMAL"; - case MODE_RINGTONE: - return "MODE_RINGTONE"; - default: - return "MODE_INVALID"; - } - } - - private static String streamTypeToString(int stream) { - switch(stream) { - case AudioManager.STREAM_VOICE_CALL: - return "STREAM_VOICE_CALL"; - case AudioManager.STREAM_MUSIC: - return "STREAM_MUSIC"; - case AudioManager.STREAM_RING: - return "STREAM_RING"; - case AudioManager.STREAM_ALARM: - return "STREAM_ALARM"; - case AudioManager.STREAM_NOTIFICATION: - return "STREAM_NOTIFICATION"; - case AudioManager.STREAM_SYSTEM: - return "STREAM_SYSTEM"; - default: - return "STREAM_INVALID"; - } - } - - // Converts AudioDeviceInfo types to local string representation. - private static String deviceTypeToString(int type) { - switch (type) { - case AudioDeviceInfo.TYPE_UNKNOWN: - return "TYPE_UNKNOWN"; - case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE: - return "TYPE_BUILTIN_EARPIECE"; - case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER: - return "TYPE_BUILTIN_SPEAKER"; - case AudioDeviceInfo.TYPE_WIRED_HEADSET: - return "TYPE_WIRED_HEADSET"; - case AudioDeviceInfo.TYPE_WIRED_HEADPHONES: - return "TYPE_WIRED_HEADPHONES"; - case AudioDeviceInfo.TYPE_LINE_ANALOG: - return "TYPE_LINE_ANALOG"; - case AudioDeviceInfo.TYPE_LINE_DIGITAL: - return "TYPE_LINE_DIGITAL"; - case AudioDeviceInfo.TYPE_BLUETOOTH_SCO: - return "TYPE_BLUETOOTH_SCO"; - case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP: - return "TYPE_BLUETOOTH_A2DP"; - case AudioDeviceInfo.TYPE_HDMI: - return "TYPE_HDMI"; - case AudioDeviceInfo.TYPE_HDMI_ARC: - return "TYPE_HDMI_ARC"; - case AudioDeviceInfo.TYPE_USB_DEVICE: - return "TYPE_USB_DEVICE"; - case AudioDeviceInfo.TYPE_USB_ACCESSORY: - return "TYPE_USB_ACCESSORY"; - case AudioDeviceInfo.TYPE_DOCK: - return "TYPE_DOCK"; - case AudioDeviceInfo.TYPE_FM: - return "TYPE_FM"; - case AudioDeviceInfo.TYPE_BUILTIN_MIC: - return "TYPE_BUILTIN_MIC"; - case AudioDeviceInfo.TYPE_FM_TUNER: - return "TYPE_FM_TUNER"; - case AudioDeviceInfo.TYPE_TV_TUNER: - return "TYPE_TV_TUNER"; - case AudioDeviceInfo.TYPE_TELEPHONY: - return "TYPE_TELEPHONY"; - case AudioDeviceInfo.TYPE_AUX_LINE: - return "TYPE_AUX_LINE"; - case AudioDeviceInfo.TYPE_IP: - return "TYPE_IP"; - case AudioDeviceInfo.TYPE_BUS: - return "TYPE_BUS"; - case AudioDeviceInfo.TYPE_USB_HEADSET: - return "TYPE_USB_HEADSET"; - default: - return "TYPE_UNKNOWN"; - } - } - - // Returns true if the device can record audio via a microphone. - private static boolean hasMicrophone() { - return ContextUtils.getApplicationContext().getPackageManager().hasSystemFeature( - PackageManager.FEATURE_MICROPHONE); - } -} diff --git a/third_party/libwebrtc/modules/audio_device/android/opensles_common.cc b/third_party/libwebrtc/modules/audio_device/android/opensles_common.cc deleted file mode 100644 index 019714dae46f..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/opensles_common.cc +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/opensles_common.h" - -#include - -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -// Returns a string representation given an integer SL_RESULT_XXX code. -// The mapping can be found in . -const char* GetSLErrorString(size_t code) { - static const char* sl_error_strings[] = { - "SL_RESULT_SUCCESS", // 0 - "SL_RESULT_PRECONDITIONS_VIOLATED", // 1 - "SL_RESULT_PARAMETER_INVALID", // 2 - "SL_RESULT_MEMORY_FAILURE", // 3 - "SL_RESULT_RESOURCE_ERROR", // 4 - "SL_RESULT_RESOURCE_LOST", // 5 - "SL_RESULT_IO_ERROR", // 6 - "SL_RESULT_BUFFER_INSUFFICIENT", // 7 - "SL_RESULT_CONTENT_CORRUPTED", // 8 - "SL_RESULT_CONTENT_UNSUPPORTED", // 9 - "SL_RESULT_CONTENT_NOT_FOUND", // 10 - "SL_RESULT_PERMISSION_DENIED", // 11 - "SL_RESULT_FEATURE_UNSUPPORTED", // 12 - "SL_RESULT_INTERNAL_ERROR", // 13 - "SL_RESULT_UNKNOWN_ERROR", // 14 - "SL_RESULT_OPERATION_ABORTED", // 15 - "SL_RESULT_CONTROL_LOST", // 16 - }; - - if (code >= arraysize(sl_error_strings)) { - return "SL_RESULT_UNKNOWN_ERROR"; - } - return sl_error_strings[code]; -} - -SLDataFormat_PCM CreatePCMConfiguration(size_t channels, - int sample_rate, - size_t bits_per_sample) { - RTC_CHECK_EQ(bits_per_sample, SL_PCMSAMPLEFORMAT_FIXED_16); - SLDataFormat_PCM format; - format.formatType = SL_DATAFORMAT_PCM; - format.numChannels = static_cast(channels); - // Note that, the unit of sample rate is actually in milliHertz and not Hertz. - switch (sample_rate) { - case 8000: - format.samplesPerSec = SL_SAMPLINGRATE_8; - break; - case 16000: - format.samplesPerSec = SL_SAMPLINGRATE_16; - break; - case 22050: - format.samplesPerSec = SL_SAMPLINGRATE_22_05; - break; - case 32000: - format.samplesPerSec = SL_SAMPLINGRATE_32; - break; - case 44100: - format.samplesPerSec = SL_SAMPLINGRATE_44_1; - break; - case 48000: - format.samplesPerSec = SL_SAMPLINGRATE_48; - break; - case 64000: - format.samplesPerSec = SL_SAMPLINGRATE_64; - break; - case 88200: - format.samplesPerSec = SL_SAMPLINGRATE_88_2; - break; - case 96000: - format.samplesPerSec = SL_SAMPLINGRATE_96; - break; - default: - RTC_CHECK(false) << "Unsupported sample rate: " << sample_rate; - break; - } - format.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16; - format.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16; - format.endianness = SL_BYTEORDER_LITTLEENDIAN; - if (format.numChannels == 1) { - format.channelMask = SL_SPEAKER_FRONT_CENTER; - } else if (format.numChannels == 2) { - format.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT; - } else { - RTC_CHECK(false) << "Unsupported number of channels: " - << format.numChannels; - } - return format; -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/opensles_common.h b/third_party/libwebrtc/modules/audio_device/android/opensles_common.h deleted file mode 100644 index 438c5220722e..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/opensles_common.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_COMMON_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_COMMON_H_ - -#include -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -// Returns a string representation given an integer SL_RESULT_XXX code. -// The mapping can be found in . -const char* GetSLErrorString(size_t code); - -// Configures an SL_DATAFORMAT_PCM structure based on native audio parameters. -SLDataFormat_PCM CreatePCMConfiguration(size_t channels, - int sample_rate, - size_t bits_per_sample); - -// Helper class for using SLObjectItf interfaces. -template -class ScopedSLObject { - public: - ScopedSLObject() : obj_(nullptr) {} - - ~ScopedSLObject() { Reset(); } - - SLType* Receive() { - RTC_DCHECK(!obj_); - return &obj_; - } - - SLDerefType operator->() { return *obj_; } - - SLType Get() const { return obj_; } - - void Reset() { - if (obj_) { - (*obj_)->Destroy(obj_); - obj_ = nullptr; - } - } - - private: - SLType obj_; -}; - -typedef ScopedSLObject ScopedSLObjectItf; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_COMMON_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/opensles_player.cc b/third_party/libwebrtc/modules/audio_device/android/opensles_player.cc deleted file mode 100644 index f2b3a37194b6..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/opensles_player.cc +++ /dev/null @@ -1,434 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/opensles_player.h" - -#include - -#include - -#include "api/array_view.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/fine_audio_buffer.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/time_utils.h" - -#define TAG "OpenSLESPlayer" -#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__) -#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) -#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) -#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__) -#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) - -#define RETURN_ON_ERROR(op, ...) \ - do { \ - SLresult err = (op); \ - if (err != SL_RESULT_SUCCESS) { \ - ALOGE("%s failed: %s", #op, GetSLErrorString(err)); \ - return __VA_ARGS__; \ - } \ - } while (0) - -namespace webrtc { - -OpenSLESPlayer::OpenSLESPlayer(AudioManager* audio_manager) - : audio_manager_(audio_manager), - audio_parameters_(audio_manager->GetPlayoutAudioParameters()), - audio_device_buffer_(nullptr), - initialized_(false), - playing_(false), - buffer_index_(0), - engine_(nullptr), - player_(nullptr), - simple_buffer_queue_(nullptr), - volume_(nullptr), - last_play_time_(0) { - ALOGD("ctor[tid=%d]", rtc::CurrentThreadId()); - // Use native audio output parameters provided by the audio manager and - // define the PCM format structure. - pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(), - audio_parameters_.sample_rate(), - audio_parameters_.bits_per_sample()); - // Detach from this thread since we want to use the checker to verify calls - // from the internal audio thread. - thread_checker_opensles_.Detach(); -} - -OpenSLESPlayer::~OpenSLESPlayer() { - ALOGD("dtor[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - Terminate(); - DestroyAudioPlayer(); - DestroyMix(); - engine_ = nullptr; - RTC_DCHECK(!engine_); - RTC_DCHECK(!output_mix_.Get()); - RTC_DCHECK(!player_); - RTC_DCHECK(!simple_buffer_queue_); - RTC_DCHECK(!volume_); -} - -int OpenSLESPlayer::Init() { - ALOGD("Init[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (audio_parameters_.channels() == 2) { - ALOGW("Stereo mode is enabled"); - } - return 0; -} - -int OpenSLESPlayer::Terminate() { - ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - StopPlayout(); - return 0; -} - -int OpenSLESPlayer::InitPlayout() { - ALOGD("InitPlayout[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - RTC_DCHECK(!playing_); - if (!ObtainEngineInterface()) { - ALOGE("Failed to obtain SL Engine interface"); - return -1; - } - CreateMix(); - initialized_ = true; - buffer_index_ = 0; - return 0; -} - -int OpenSLESPlayer::StartPlayout() { - ALOGD("StartPlayout[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(initialized_); - RTC_DCHECK(!playing_); - if (fine_audio_buffer_) { - fine_audio_buffer_->ResetPlayout(); - } - // The number of lower latency audio players is limited, hence we create the - // audio player in Start() and destroy it in Stop(). - CreateAudioPlayer(); - // Fill up audio buffers to avoid initial glitch and to ensure that playback - // starts when mode is later changed to SL_PLAYSTATE_PLAYING. - // TODO(henrika): we can save some delay by only making one call to - // EnqueuePlayoutData. Most likely not worth the risk of adding a glitch. - last_play_time_ = rtc::Time(); - for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) { - EnqueuePlayoutData(true); - } - // Start streaming data by setting the play state to SL_PLAYSTATE_PLAYING. - // For a player object, when the object is in the SL_PLAYSTATE_PLAYING - // state, adding buffers will implicitly start playback. - RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_PLAYING), -1); - playing_ = (GetPlayState() == SL_PLAYSTATE_PLAYING); - RTC_DCHECK(playing_); - return 0; -} - -int OpenSLESPlayer::StopPlayout() { - ALOGD("StopPlayout[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_ || !playing_) { - return 0; - } - // Stop playing by setting the play state to SL_PLAYSTATE_STOPPED. - RETURN_ON_ERROR((*player_)->SetPlayState(player_, SL_PLAYSTATE_STOPPED), -1); - // Clear the buffer queue to flush out any remaining data. - RETURN_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_), -1); -#if RTC_DCHECK_IS_ON - // Verify that the buffer queue is in fact cleared as it should. - SLAndroidSimpleBufferQueueState buffer_queue_state; - (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &buffer_queue_state); - RTC_DCHECK_EQ(0, buffer_queue_state.count); - RTC_DCHECK_EQ(0, buffer_queue_state.index); -#endif - // The number of lower latency audio players is limited, hence we create the - // audio player in Start() and destroy it in Stop(). - DestroyAudioPlayer(); - thread_checker_opensles_.Detach(); - initialized_ = false; - playing_ = false; - return 0; -} - -int OpenSLESPlayer::SpeakerVolumeIsAvailable(bool& available) { - available = false; - return 0; -} - -int OpenSLESPlayer::MaxSpeakerVolume(uint32_t& maxVolume) const { - return -1; -} - -int OpenSLESPlayer::MinSpeakerVolume(uint32_t& minVolume) const { - return -1; -} - -int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) { - return -1; -} - -int OpenSLESPlayer::SpeakerVolume(uint32_t& volume) const { - return -1; -} - -void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { - ALOGD("AttachAudioBuffer"); - RTC_DCHECK(thread_checker_.IsCurrent()); - audio_device_buffer_ = audioBuffer; - const int sample_rate_hz = audio_parameters_.sample_rate(); - ALOGD("SetPlayoutSampleRate(%d)", sample_rate_hz); - audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz); - const size_t channels = audio_parameters_.channels(); - ALOGD("SetPlayoutChannels(%zu)", channels); - audio_device_buffer_->SetPlayoutChannels(channels); - RTC_CHECK(audio_device_buffer_); - AllocateDataBuffers(); -} - -void OpenSLESPlayer::AllocateDataBuffers() { - ALOGD("AllocateDataBuffers"); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!simple_buffer_queue_); - RTC_CHECK(audio_device_buffer_); - // Create a modified audio buffer class which allows us to ask for any number - // of samples (and not only multiple of 10ms) to match the native OpenSL ES - // buffer size. The native buffer size corresponds to the - // PROPERTY_OUTPUT_FRAMES_PER_BUFFER property which is the number of audio - // frames that the HAL (Hardware Abstraction Layer) buffer can hold. It is - // recommended to construct audio buffers so that they contain an exact - // multiple of this number. If so, callbacks will occur at regular intervals, - // which reduces jitter. - const size_t buffer_size_in_samples = - audio_parameters_.frames_per_buffer() * audio_parameters_.channels(); - ALOGD("native buffer size: %zu", buffer_size_in_samples); - ALOGD("native buffer size in ms: %.2f", - audio_parameters_.GetBufferSizeInMilliseconds()); - fine_audio_buffer_ = std::make_unique(audio_device_buffer_); - // Allocated memory for audio buffers. - for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) { - audio_buffers_[i].reset(new SLint16[buffer_size_in_samples]); - } -} - -bool OpenSLESPlayer::ObtainEngineInterface() { - ALOGD("ObtainEngineInterface"); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (engine_) - return true; - // Get access to (or create if not already existing) the global OpenSL Engine - // object. - SLObjectItf engine_object = audio_manager_->GetOpenSLEngine(); - if (engine_object == nullptr) { - ALOGE("Failed to access the global OpenSL engine"); - return false; - } - // Get the SL Engine Interface which is implicit. - RETURN_ON_ERROR( - (*engine_object)->GetInterface(engine_object, SL_IID_ENGINE, &engine_), - false); - return true; -} - -bool OpenSLESPlayer::CreateMix() { - ALOGD("CreateMix"); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(engine_); - if (output_mix_.Get()) - return true; - - // Create the ouput mix on the engine object. No interfaces will be used. - RETURN_ON_ERROR((*engine_)->CreateOutputMix(engine_, output_mix_.Receive(), 0, - nullptr, nullptr), - false); - RETURN_ON_ERROR(output_mix_->Realize(output_mix_.Get(), SL_BOOLEAN_FALSE), - false); - return true; -} - -void OpenSLESPlayer::DestroyMix() { - ALOGD("DestroyMix"); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!output_mix_.Get()) - return; - output_mix_.Reset(); -} - -bool OpenSLESPlayer::CreateAudioPlayer() { - ALOGD("CreateAudioPlayer"); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(output_mix_.Get()); - if (player_object_.Get()) - return true; - RTC_DCHECK(!player_); - RTC_DCHECK(!simple_buffer_queue_); - RTC_DCHECK(!volume_); - - // source: Android Simple Buffer Queue Data Locator is source. - SLDataLocator_AndroidSimpleBufferQueue simple_buffer_queue = { - SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, - static_cast(kNumOfOpenSLESBuffers)}; - SLDataSource audio_source = {&simple_buffer_queue, &pcm_format_}; - - // sink: OutputMix-based data is sink. - SLDataLocator_OutputMix locator_output_mix = {SL_DATALOCATOR_OUTPUTMIX, - output_mix_.Get()}; - SLDataSink audio_sink = {&locator_output_mix, nullptr}; - - // Define interfaces that we indend to use and realize. - const SLInterfaceID interface_ids[] = {SL_IID_ANDROIDCONFIGURATION, - SL_IID_BUFFERQUEUE, SL_IID_VOLUME}; - const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, - SL_BOOLEAN_TRUE}; - - // Create the audio player on the engine interface. - RETURN_ON_ERROR( - (*engine_)->CreateAudioPlayer( - engine_, player_object_.Receive(), &audio_source, &audio_sink, - arraysize(interface_ids), interface_ids, interface_required), - false); - - // Use the Android configuration interface to set platform-specific - // parameters. Should be done before player is realized. - SLAndroidConfigurationItf player_config; - RETURN_ON_ERROR( - player_object_->GetInterface(player_object_.Get(), - SL_IID_ANDROIDCONFIGURATION, &player_config), - false); - // Set audio player configuration to SL_ANDROID_STREAM_VOICE which - // corresponds to android.media.AudioManager.STREAM_VOICE_CALL. - SLint32 stream_type = SL_ANDROID_STREAM_VOICE; - RETURN_ON_ERROR( - (*player_config) - ->SetConfiguration(player_config, SL_ANDROID_KEY_STREAM_TYPE, - &stream_type, sizeof(SLint32)), - false); - - // Realize the audio player object after configuration has been set. - RETURN_ON_ERROR( - player_object_->Realize(player_object_.Get(), SL_BOOLEAN_FALSE), false); - - // Get the SLPlayItf interface on the audio player. - RETURN_ON_ERROR( - player_object_->GetInterface(player_object_.Get(), SL_IID_PLAY, &player_), - false); - - // Get the SLAndroidSimpleBufferQueueItf interface on the audio player. - RETURN_ON_ERROR( - player_object_->GetInterface(player_object_.Get(), SL_IID_BUFFERQUEUE, - &simple_buffer_queue_), - false); - - // Register callback method for the Android Simple Buffer Queue interface. - // This method will be called when the native audio layer needs audio data. - RETURN_ON_ERROR((*simple_buffer_queue_) - ->RegisterCallback(simple_buffer_queue_, - SimpleBufferQueueCallback, this), - false); - - // Get the SLVolumeItf interface on the audio player. - RETURN_ON_ERROR(player_object_->GetInterface(player_object_.Get(), - SL_IID_VOLUME, &volume_), - false); - - // TODO(henrika): might not be required to set volume to max here since it - // seems to be default on most devices. Might be required for unit tests. - // RETURN_ON_ERROR((*volume_)->SetVolumeLevel(volume_, 0), false); - - return true; -} - -void OpenSLESPlayer::DestroyAudioPlayer() { - ALOGD("DestroyAudioPlayer"); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!player_object_.Get()) - return; - (*simple_buffer_queue_) - ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr); - player_object_.Reset(); - player_ = nullptr; - simple_buffer_queue_ = nullptr; - volume_ = nullptr; -} - -// static -void OpenSLESPlayer::SimpleBufferQueueCallback( - SLAndroidSimpleBufferQueueItf caller, - void* context) { - OpenSLESPlayer* stream = reinterpret_cast(context); - stream->FillBufferQueue(); -} - -void OpenSLESPlayer::FillBufferQueue() { - RTC_DCHECK(thread_checker_opensles_.IsCurrent()); - SLuint32 state = GetPlayState(); - if (state != SL_PLAYSTATE_PLAYING) { - ALOGW("Buffer callback in non-playing state!"); - return; - } - EnqueuePlayoutData(false); -} - -void OpenSLESPlayer::EnqueuePlayoutData(bool silence) { - // Check delta time between two successive callbacks and provide a warning - // if it becomes very large. - // TODO(henrika): using 150ms as upper limit but this value is rather random. - const uint32_t current_time = rtc::Time(); - const uint32_t diff = current_time - last_play_time_; - if (diff > 150) { - ALOGW("Bad OpenSL ES playout timing, dT=%u [ms]", diff); - } - last_play_time_ = current_time; - SLint8* audio_ptr8 = - reinterpret_cast(audio_buffers_[buffer_index_].get()); - if (silence) { - RTC_DCHECK(thread_checker_.IsCurrent()); - // Avoid acquiring real audio data from WebRTC and fill the buffer with - // zeros instead. Used to prime the buffer with silence and to avoid asking - // for audio data from two different threads. - memset(audio_ptr8, 0, audio_parameters_.GetBytesPerBuffer()); - } else { - RTC_DCHECK(thread_checker_opensles_.IsCurrent()); - // Read audio data from the WebRTC source using the FineAudioBuffer object - // to adjust for differences in buffer size between WebRTC (10ms) and native - // OpenSL ES. Use hardcoded delay estimate since OpenSL ES does not support - // delay estimation. - fine_audio_buffer_->GetPlayoutData( - rtc::ArrayView(audio_buffers_[buffer_index_].get(), - audio_parameters_.frames_per_buffer() * - audio_parameters_.channels()), - 25); - } - // Enqueue the decoded audio buffer for playback. - SLresult err = (*simple_buffer_queue_) - ->Enqueue(simple_buffer_queue_, audio_ptr8, - audio_parameters_.GetBytesPerBuffer()); - if (SL_RESULT_SUCCESS != err) { - ALOGE("Enqueue failed: %d", err); - } - buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers; -} - -SLuint32 OpenSLESPlayer::GetPlayState() const { - RTC_DCHECK(player_); - SLuint32 state; - SLresult err = (*player_)->GetPlayState(player_, &state); - if (SL_RESULT_SUCCESS != err) { - ALOGE("GetPlayState failed: %d", err); - } - return state; -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/opensles_player.h b/third_party/libwebrtc/modules/audio_device/android/opensles_player.h deleted file mode 100644 index 41593a448fb2..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/opensles_player.h +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_ - -#include -#include -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/android/opensles_common.h" -#include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "modules/utility/include/helpers_android.h" - -namespace webrtc { - -class FineAudioBuffer; - -// Implements 16-bit mono PCM audio output support for Android using the -// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done. -// -// An instance must be created and destroyed on one and the same thread. -// All public methods must also be called on the same thread. A thread checker -// will RTC_DCHECK if any method is called on an invalid thread. Decoded audio -// buffers are requested on a dedicated internal thread managed by the OpenSL -// ES layer. -// -// The existing design forces the user to call InitPlayout() after Stoplayout() -// to be able to call StartPlayout() again. This is inline with how the Java- -// based implementation works. -// -// OpenSL ES is a native C API which have no Dalvik-related overhead such as -// garbage collection pauses and it supports reduced audio output latency. -// If the device doesn't claim this feature but supports API level 9 (Android -// platform version 2.3) or later, then we can still use the OpenSL ES APIs but -// the output latency may be higher. -class OpenSLESPlayer { - public: - // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is - // required for lower latency. Beginning with API level 18 (Android 4.3), a - // buffer count of 1 is sufficient for lower latency. In addition, the buffer - // size and sample rate must be compatible with the device's native output - // configuration provided via the audio manager at construction. - // TODO(henrika): perhaps set this value dynamically based on OS version. - static const int kNumOfOpenSLESBuffers = 2; - - explicit OpenSLESPlayer(AudioManager* audio_manager); - ~OpenSLESPlayer(); - - int Init(); - int Terminate(); - - int InitPlayout(); - bool PlayoutIsInitialized() const { return initialized_; } - - int StartPlayout(); - int StopPlayout(); - bool Playing() const { return playing_; } - - int SpeakerVolumeIsAvailable(bool& available); - int SetSpeakerVolume(uint32_t volume); - int SpeakerVolume(uint32_t& volume) const; - int MaxSpeakerVolume(uint32_t& maxVolume) const; - int MinSpeakerVolume(uint32_t& minVolume) const; - - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); - - private: - // These callback methods are called when data is required for playout. - // They are both called from an internal "OpenSL ES thread" which is not - // attached to the Dalvik VM. - static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller, - void* context); - void FillBufferQueue(); - // Reads audio data in PCM format using the AudioDeviceBuffer. - // Can be called both on the main thread (during Start()) and from the - // internal audio thread while output streaming is active. - // If the `silence` flag is set, the audio is filled with zeros instead of - // asking the WebRTC layer for real audio data. This procedure is also known - // as audio priming. - void EnqueuePlayoutData(bool silence); - - // Allocate memory for audio buffers which will be used to render audio - // via the SLAndroidSimpleBufferQueueItf interface. - void AllocateDataBuffers(); - - // Obtaines the SL Engine Interface from the existing global Engine object. - // The interface exposes creation methods of all the OpenSL ES object types. - // This method defines the `engine_` member variable. - bool ObtainEngineInterface(); - - // Creates/destroys the output mix object. - bool CreateMix(); - void DestroyMix(); - - // Creates/destroys the audio player and the simple-buffer object. - // Also creates the volume object. - bool CreateAudioPlayer(); - void DestroyAudioPlayer(); - - SLuint32 GetPlayState() const; - - // Ensures that methods are called from the same thread as this object is - // created on. - SequenceChecker thread_checker_; - - // Stores thread ID in first call to SimpleBufferQueueCallback() from internal - // non-application thread which is not attached to the Dalvik JVM. - // Detached during construction of this object. - SequenceChecker thread_checker_opensles_; - - // Raw pointer to the audio manager injected at construction. Used to cache - // audio parameters and to access the global SL engine object needed by the - // ObtainEngineInterface() method. The audio manager outlives any instance of - // this class. - AudioManager* audio_manager_; - - // Contains audio parameters provided to this class at construction by the - // AudioManager. - const AudioParameters audio_parameters_; - - // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the - // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). - AudioDeviceBuffer* audio_device_buffer_; - - bool initialized_; - bool playing_; - - // PCM-type format definition. - // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if - // 32-bit float representation is needed. - SLDataFormat_PCM pcm_format_; - - // Queue of audio buffers to be used by the player object for rendering - // audio. - std::unique_ptr audio_buffers_[kNumOfOpenSLESBuffers]; - - // FineAudioBuffer takes an AudioDeviceBuffer which delivers audio data - // in chunks of 10ms. It then allows for this data to be pulled in - // a finer or coarser granularity. I.e. interacting with this class instead - // of directly with the AudioDeviceBuffer one can ask for any number of - // audio data samples. - // Example: native buffer size can be 192 audio frames at 48kHz sample rate. - // WebRTC will provide 480 audio frames per 10ms but OpenSL ES asks for 192 - // in each callback (one every 4th ms). This class can then ask for 192 and - // the FineAudioBuffer will ask WebRTC for new data approximately only every - // second callback and also cache non-utilized audio. - std::unique_ptr fine_audio_buffer_; - - // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue. - // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ... - int buffer_index_; - - // This interface exposes creation methods for all the OpenSL ES object types. - // It is the OpenSL ES API entry point. - SLEngineItf engine_; - - // Output mix object to be used by the player object. - webrtc::ScopedSLObjectItf output_mix_; - - // The audio player media object plays out audio to the speakers. It also - // supports volume control. - webrtc::ScopedSLObjectItf player_object_; - - // This interface is supported on the audio player and it controls the state - // of the audio player. - SLPlayItf player_; - - // The Android Simple Buffer Queue interface is supported on the audio player - // and it provides methods to send audio data from the source to the audio - // player for rendering. - SLAndroidSimpleBufferQueueItf simple_buffer_queue_; - - // This interface exposes controls for manipulating the object’s audio volume - // properties. This interface is supported on the Audio Player object. - SLVolumeItf volume_; - - // Last time the OpenSL ES layer asked for audio data to play out. - uint32_t last_play_time_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_PLAYER_H_ diff --git a/third_party/libwebrtc/modules/audio_device/android/opensles_recorder.cc b/third_party/libwebrtc/modules/audio_device/android/opensles_recorder.cc deleted file mode 100644 index 4e0c26dbf08c..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/opensles_recorder.cc +++ /dev/null @@ -1,431 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_device/android/opensles_recorder.h" - -#include - -#include - -#include "api/array_view.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/fine_audio_buffer.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/time_utils.h" - -#define TAG "OpenSLESRecorder" -#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__) -#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) -#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) -#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__) -#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) - -#define LOG_ON_ERROR(op) \ - [](SLresult err) { \ - if (err != SL_RESULT_SUCCESS) { \ - ALOGE("%s:%d %s failed: %s", __FILE__, __LINE__, #op, \ - GetSLErrorString(err)); \ - return true; \ - } \ - return false; \ - }(op) - -namespace webrtc { - -OpenSLESRecorder::OpenSLESRecorder(AudioManager* audio_manager) - : audio_manager_(audio_manager), - audio_parameters_(audio_manager->GetRecordAudioParameters()), - audio_device_buffer_(nullptr), - initialized_(false), - recording_(false), - engine_(nullptr), - recorder_(nullptr), - simple_buffer_queue_(nullptr), - buffer_index_(0), - last_rec_time_(0) { - ALOGD("ctor[tid=%d]", rtc::CurrentThreadId()); - // Detach from this thread since we want to use the checker to verify calls - // from the internal audio thread. - thread_checker_opensles_.Detach(); - // Use native audio output parameters provided by the audio manager and - // define the PCM format structure. - pcm_format_ = CreatePCMConfiguration(audio_parameters_.channels(), - audio_parameters_.sample_rate(), - audio_parameters_.bits_per_sample()); -} - -OpenSLESRecorder::~OpenSLESRecorder() { - ALOGD("dtor[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - Terminate(); - DestroyAudioRecorder(); - engine_ = nullptr; - RTC_DCHECK(!engine_); - RTC_DCHECK(!recorder_); - RTC_DCHECK(!simple_buffer_queue_); -} - -int OpenSLESRecorder::Init() { - ALOGD("Init[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (audio_parameters_.channels() == 2) { - ALOGD("Stereo mode is enabled"); - } - return 0; -} - -int OpenSLESRecorder::Terminate() { - ALOGD("Terminate[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - StopRecording(); - return 0; -} - -int OpenSLESRecorder::InitRecording() { - ALOGD("InitRecording[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!initialized_); - RTC_DCHECK(!recording_); - if (!ObtainEngineInterface()) { - ALOGE("Failed to obtain SL Engine interface"); - return -1; - } - CreateAudioRecorder(); - initialized_ = true; - buffer_index_ = 0; - return 0; -} - -int OpenSLESRecorder::StartRecording() { - ALOGD("StartRecording[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(initialized_); - RTC_DCHECK(!recording_); - if (fine_audio_buffer_) { - fine_audio_buffer_->ResetRecord(); - } - // Add buffers to the queue before changing state to SL_RECORDSTATE_RECORDING - // to ensure that recording starts as soon as the state is modified. On some - // devices, SLAndroidSimpleBufferQueue::Clear() used in Stop() does not flush - // the buffers as intended and we therefore check the number of buffers - // already queued first. Enqueue() can return SL_RESULT_BUFFER_INSUFFICIENT - // otherwise. - int num_buffers_in_queue = GetBufferCount(); - for (int i = 0; i < kNumOfOpenSLESBuffers - num_buffers_in_queue; ++i) { - if (!EnqueueAudioBuffer()) { - recording_ = false; - return -1; - } - } - num_buffers_in_queue = GetBufferCount(); - RTC_DCHECK_EQ(num_buffers_in_queue, kNumOfOpenSLESBuffers); - LogBufferState(); - // Start audio recording by changing the state to SL_RECORDSTATE_RECORDING. - // Given that buffers are already enqueued, recording should start at once. - // The macro returns -1 if recording fails to start. - last_rec_time_ = rtc::Time(); - if (LOG_ON_ERROR( - (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_RECORDING))) { - return -1; - } - recording_ = (GetRecordState() == SL_RECORDSTATE_RECORDING); - RTC_DCHECK(recording_); - return 0; -} - -int OpenSLESRecorder::StopRecording() { - ALOGD("StopRecording[tid=%d]", rtc::CurrentThreadId()); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!initialized_ || !recording_) { - return 0; - } - // Stop recording by setting the record state to SL_RECORDSTATE_STOPPED. - if (LOG_ON_ERROR( - (*recorder_)->SetRecordState(recorder_, SL_RECORDSTATE_STOPPED))) { - return -1; - } - // Clear the buffer queue to get rid of old data when resuming recording. - if (LOG_ON_ERROR((*simple_buffer_queue_)->Clear(simple_buffer_queue_))) { - return -1; - } - thread_checker_opensles_.Detach(); - initialized_ = false; - recording_ = false; - return 0; -} - -void OpenSLESRecorder::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) { - ALOGD("AttachAudioBuffer"); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_CHECK(audio_buffer); - audio_device_buffer_ = audio_buffer; - // Ensure that the audio device buffer is informed about the native sample - // rate used on the recording side. - const int sample_rate_hz = audio_parameters_.sample_rate(); - ALOGD("SetRecordingSampleRate(%d)", sample_rate_hz); - audio_device_buffer_->SetRecordingSampleRate(sample_rate_hz); - // Ensure that the audio device buffer is informed about the number of - // channels preferred by the OS on the recording side. - const size_t channels = audio_parameters_.channels(); - ALOGD("SetRecordingChannels(%zu)", channels); - audio_device_buffer_->SetRecordingChannels(channels); - // Allocated memory for internal data buffers given existing audio parameters. - AllocateDataBuffers(); -} - -int OpenSLESRecorder::EnableBuiltInAEC(bool enable) { - ALOGD("EnableBuiltInAEC(%d)", enable); - RTC_DCHECK(thread_checker_.IsCurrent()); - ALOGE("Not implemented"); - return 0; -} - -int OpenSLESRecorder::EnableBuiltInAGC(bool enable) { - ALOGD("EnableBuiltInAGC(%d)", enable); - RTC_DCHECK(thread_checker_.IsCurrent()); - ALOGE("Not implemented"); - return 0; -} - -int OpenSLESRecorder::EnableBuiltInNS(bool enable) { - ALOGD("EnableBuiltInNS(%d)", enable); - RTC_DCHECK(thread_checker_.IsCurrent()); - ALOGE("Not implemented"); - return 0; -} - -bool OpenSLESRecorder::ObtainEngineInterface() { - ALOGD("ObtainEngineInterface"); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (engine_) - return true; - // Get access to (or create if not already existing) the global OpenSL Engine - // object. - SLObjectItf engine_object = audio_manager_->GetOpenSLEngine(); - if (engine_object == nullptr) { - ALOGE("Failed to access the global OpenSL engine"); - return false; - } - // Get the SL Engine Interface which is implicit. - if (LOG_ON_ERROR( - (*engine_object) - ->GetInterface(engine_object, SL_IID_ENGINE, &engine_))) { - return false; - } - return true; -} - -bool OpenSLESRecorder::CreateAudioRecorder() { - ALOGD("CreateAudioRecorder"); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (recorder_object_.Get()) - return true; - RTC_DCHECK(!recorder_); - RTC_DCHECK(!simple_buffer_queue_); - - // Audio source configuration. - SLDataLocator_IODevice mic_locator = {SL_DATALOCATOR_IODEVICE, - SL_IODEVICE_AUDIOINPUT, - SL_DEFAULTDEVICEID_AUDIOINPUT, NULL}; - SLDataSource audio_source = {&mic_locator, NULL}; - - // Audio sink configuration. - SLDataLocator_AndroidSimpleBufferQueue buffer_queue = { - SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, - static_cast(kNumOfOpenSLESBuffers)}; - SLDataSink audio_sink = {&buffer_queue, &pcm_format_}; - - // Create the audio recorder object (requires the RECORD_AUDIO permission). - // Do not realize the recorder yet. Set the configuration first. - const SLInterfaceID interface_id[] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE, - SL_IID_ANDROIDCONFIGURATION}; - const SLboolean interface_required[] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; - if (LOG_ON_ERROR((*engine_)->CreateAudioRecorder( - engine_, recorder_object_.Receive(), &audio_source, &audio_sink, - arraysize(interface_id), interface_id, interface_required))) { - return false; - } - - // Configure the audio recorder (before it is realized). - SLAndroidConfigurationItf recorder_config; - if (LOG_ON_ERROR((recorder_object_->GetInterface(recorder_object_.Get(), - SL_IID_ANDROIDCONFIGURATION, - &recorder_config)))) { - return false; - } - - // Uses the default microphone tuned for audio communication. - // Note that, SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION leads to a fast - // track but also excludes usage of required effects like AEC, AGC and NS. - // SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION - SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION; - if (LOG_ON_ERROR(((*recorder_config) - ->SetConfiguration(recorder_config, - SL_ANDROID_KEY_RECORDING_PRESET, - &stream_type, sizeof(SLint32))))) { - return false; - } - - // The audio recorder can now be realized (in synchronous mode). - if (LOG_ON_ERROR((recorder_object_->Realize(recorder_object_.Get(), - SL_BOOLEAN_FALSE)))) { - return false; - } - - // Get the implicit recorder interface (SL_IID_RECORD). - if (LOG_ON_ERROR((recorder_object_->GetInterface( - recorder_object_.Get(), SL_IID_RECORD, &recorder_)))) { - return false; - } - - // Get the simple buffer queue interface (SL_IID_ANDROIDSIMPLEBUFFERQUEUE). - // It was explicitly requested. - if (LOG_ON_ERROR((recorder_object_->GetInterface( - recorder_object_.Get(), SL_IID_ANDROIDSIMPLEBUFFERQUEUE, - &simple_buffer_queue_)))) { - return false; - } - - // Register the input callback for the simple buffer queue. - // This callback will be called when receiving new data from the device. - if (LOG_ON_ERROR(((*simple_buffer_queue_) - ->RegisterCallback(simple_buffer_queue_, - SimpleBufferQueueCallback, this)))) { - return false; - } - return true; -} - -void OpenSLESRecorder::DestroyAudioRecorder() { - ALOGD("DestroyAudioRecorder"); - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!recorder_object_.Get()) - return; - (*simple_buffer_queue_) - ->RegisterCallback(simple_buffer_queue_, nullptr, nullptr); - recorder_object_.Reset(); - recorder_ = nullptr; - simple_buffer_queue_ = nullptr; -} - -void OpenSLESRecorder::SimpleBufferQueueCallback( - SLAndroidSimpleBufferQueueItf buffer_queue, - void* context) { - OpenSLESRecorder* stream = static_cast(context); - stream->ReadBufferQueue(); -} - -void OpenSLESRecorder::AllocateDataBuffers() { - ALOGD("AllocateDataBuffers"); - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(!simple_buffer_queue_); - RTC_CHECK(audio_device_buffer_); - // Create a modified audio buffer class which allows us to deliver any number - // of samples (and not only multiple of 10ms) to match the native audio unit - // buffer size. - ALOGD("frames per native buffer: %zu", audio_parameters_.frames_per_buffer()); - ALOGD("frames per 10ms buffer: %zu", - audio_parameters_.frames_per_10ms_buffer()); - ALOGD("bytes per native buffer: %zu", audio_parameters_.GetBytesPerBuffer()); - ALOGD("native sample rate: %d", audio_parameters_.sample_rate()); - RTC_DCHECK(audio_device_buffer_); - fine_audio_buffer_ = std::make_unique(audio_device_buffer_); - // Allocate queue of audio buffers that stores recorded audio samples. - const int buffer_size_samples = - audio_parameters_.frames_per_buffer() * audio_parameters_.channels(); - audio_buffers_.reset(new std::unique_ptr[kNumOfOpenSLESBuffers]); - for (int i = 0; i < kNumOfOpenSLESBuffers; ++i) { - audio_buffers_[i].reset(new SLint16[buffer_size_samples]); - } -} - -void OpenSLESRecorder::ReadBufferQueue() { - RTC_DCHECK(thread_checker_opensles_.IsCurrent()); - SLuint32 state = GetRecordState(); - if (state != SL_RECORDSTATE_RECORDING) { - ALOGW("Buffer callback in non-recording state!"); - return; - } - // Check delta time between two successive callbacks and provide a warning - // if it becomes very large. - // TODO(henrika): using 150ms as upper limit but this value is rather random. - const uint32_t current_time = rtc::Time(); - const uint32_t diff = current_time - last_rec_time_; - if (diff > 150) { - ALOGW("Bad OpenSL ES record timing, dT=%u [ms]", diff); - } - last_rec_time_ = current_time; - // Send recorded audio data to the WebRTC sink. - // TODO(henrika): fix delay estimates. It is OK to use fixed values for now - // since there is no support to turn off built-in EC in combination with - // OpenSL ES anyhow. Hence, as is, the WebRTC based AEC (which would use - // these estimates) will never be active. - fine_audio_buffer_->DeliverRecordedData( - rtc::ArrayView( - audio_buffers_[buffer_index_].get(), - audio_parameters_.frames_per_buffer() * audio_parameters_.channels()), - 25); - // Enqueue the utilized audio buffer and use if for recording again. - EnqueueAudioBuffer(); -} - -bool OpenSLESRecorder::EnqueueAudioBuffer() { - SLresult err = - (*simple_buffer_queue_) - ->Enqueue( - simple_buffer_queue_, - reinterpret_cast(audio_buffers_[buffer_index_].get()), - audio_parameters_.GetBytesPerBuffer()); - if (SL_RESULT_SUCCESS != err) { - ALOGE("Enqueue failed: %s", GetSLErrorString(err)); - return false; - } - buffer_index_ = (buffer_index_ + 1) % kNumOfOpenSLESBuffers; - return true; -} - -SLuint32 OpenSLESRecorder::GetRecordState() const { - RTC_DCHECK(recorder_); - SLuint32 state; - SLresult err = (*recorder_)->GetRecordState(recorder_, &state); - if (SL_RESULT_SUCCESS != err) { - ALOGE("GetRecordState failed: %s", GetSLErrorString(err)); - } - return state; -} - -SLAndroidSimpleBufferQueueState OpenSLESRecorder::GetBufferQueueState() const { - RTC_DCHECK(simple_buffer_queue_); - // state.count: Number of buffers currently in the queue. - // state.index: Index of the currently filling buffer. This is a linear index - // that keeps a cumulative count of the number of buffers recorded. - SLAndroidSimpleBufferQueueState state; - SLresult err = - (*simple_buffer_queue_)->GetState(simple_buffer_queue_, &state); - if (SL_RESULT_SUCCESS != err) { - ALOGE("GetState failed: %s", GetSLErrorString(err)); - } - return state; -} - -void OpenSLESRecorder::LogBufferState() const { - SLAndroidSimpleBufferQueueState state = GetBufferQueueState(); - ALOGD("state.count:%d state.index:%d", state.count, state.index); -} - -SLuint32 OpenSLESRecorder::GetBufferCount() { - SLAndroidSimpleBufferQueueState state = GetBufferQueueState(); - return state.count; -} - -} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/android/opensles_recorder.h b/third_party/libwebrtc/modules/audio_device/android/opensles_recorder.h deleted file mode 100644 index e659c3c15775..000000000000 --- a/third_party/libwebrtc/modules/audio_device/android/opensles_recorder.h +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_RECORDER_H_ -#define MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_RECORDER_H_ - -#include -#include -#include - -#include - -#include "api/sequence_checker.h" -#include "modules/audio_device/android/audio_common.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/android/opensles_common.h" -#include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "modules/utility/include/helpers_android.h" - -namespace webrtc { - -class FineAudioBuffer; - -// Implements 16-bit mono PCM audio input support for Android using the -// C based OpenSL ES API. No calls from C/C++ to Java using JNI is done. -// -// An instance must be created and destroyed on one and the same thread. -// All public methods must also be called on the same thread. A thread checker -// will RTC_DCHECK if any method is called on an invalid thread. Recorded audio -// buffers are provided on a dedicated internal thread managed by the OpenSL -// ES layer. -// -// The existing design forces the user to call InitRecording() after -// StopRecording() to be able to call StartRecording() again. This is inline -// with how the Java-based implementation works. -// -// As of API level 21, lower latency audio input is supported on select devices. -// To take advantage of this feature, first confirm that lower latency output is -// available. The capability for lower latency output is a prerequisite for the -// lower latency input feature. Then, create an AudioRecorder with the same -// sample rate and buffer size as would be used for output. OpenSL ES interfaces -// for input effects preclude the lower latency path. -// See https://developer.android.com/ndk/guides/audio/opensl-prog-notes.html -// for more details. -class OpenSLESRecorder { - public: - // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is - // required for lower latency. Beginning with API level 18 (Android 4.3), a - // buffer count of 1 is sufficient for lower latency. In addition, the buffer - // size and sample rate must be compatible with the device's native input - // configuration provided via the audio manager at construction. - // TODO(henrika): perhaps set this value dynamically based on OS version. - static const int kNumOfOpenSLESBuffers = 2; - - explicit OpenSLESRecorder(AudioManager* audio_manager); - ~OpenSLESRecorder(); - - int Init(); - int Terminate(); - - int InitRecording(); - bool RecordingIsInitialized() const { return initialized_; } - - int StartRecording(); - int StopRecording(); - bool Recording() const { return recording_; } - - void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer); - - // TODO(henrika): add support using OpenSL ES APIs when available. - int EnableBuiltInAEC(bool enable); - int EnableBuiltInAGC(bool enable); - int EnableBuiltInNS(bool enable); - - private: - // Obtaines the SL Engine Interface from the existing global Engine object. - // The interface exposes creation methods of all the OpenSL ES object types. - // This method defines the `engine_` member variable. - bool ObtainEngineInterface(); - - // Creates/destroys the audio recorder and the simple-buffer queue object. - bool CreateAudioRecorder(); - void DestroyAudioRecorder(); - - // Allocate memory for audio buffers which will be used to capture audio - // via the SLAndroidSimpleBufferQueueItf interface. - void AllocateDataBuffers(); - - // These callback methods are called when data has been written to the input - // buffer queue. They are both called from an internal "OpenSL ES thread" - // which is not attached to the Dalvik VM. - static void SimpleBufferQueueCallback(SLAndroidSimpleBufferQueueItf caller, - void* context); - void ReadBufferQueue(); - - // Wraps calls to SLAndroidSimpleBufferQueueState::Enqueue() and it can be - // called both on the main thread (but before recording has started) and from - // the internal audio thread while input streaming is active. It uses - // `simple_buffer_queue_` but no lock is needed since the initial calls from - // the main thread and the native callback thread are mutually exclusive. - bool EnqueueAudioBuffer(); - - // Returns the current recorder state. - SLuint32 GetRecordState() const; - - // Returns the current buffer queue state. - SLAndroidSimpleBufferQueueState GetBufferQueueState() const; - - // Number of buffers currently in the queue. - SLuint32 GetBufferCount(); - - // Prints a log message of the current queue state. Can be used for debugging - // purposes. - void LogBufferState() const; - - // Ensures that methods are called from the same thread as this object is - // created on. - SequenceChecker thread_checker_; - - // Stores thread ID in first call to SimpleBufferQueueCallback() from internal - // non-application thread which is not attached to the Dalvik JVM. - // Detached during construction of this object. - SequenceChecker thread_checker_opensles_; - - // Raw pointer to the audio manager injected at construction. Used to cache - // audio parameters and to access the global SL engine object needed by the - // ObtainEngineInterface() method. The audio manager outlives any instance of - // this class. - AudioManager* const audio_manager_; - - // Contains audio parameters provided to this class at construction by the - // AudioManager. - const AudioParameters audio_parameters_; - - // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the - // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). - AudioDeviceBuffer* audio_device_buffer_; - - // PCM-type format definition. - // TODO(henrika): add support for SLAndroidDataFormat_PCM_EX (android-21) if - // 32-bit float representation is needed. - SLDataFormat_PCM pcm_format_; - - bool initialized_; - bool recording_; - - // This interface exposes creation methods for all the OpenSL ES object types. - // It is the OpenSL ES API entry point. - SLEngineItf engine_; - - // The audio recorder media object records audio to the destination specified - // by the data sink capturing it from the input specified by the data source. - webrtc::ScopedSLObjectItf recorder_object_; - - // This interface is supported on the audio recorder object and it controls - // the state of the audio recorder. - SLRecordItf recorder_; - - // The Android Simple Buffer Queue interface is supported on the audio - // recorder. For recording, an app should enqueue empty buffers. When a - // registered callback sends notification that the system has finished writing - // data to the buffer, the app can read the buffer. - SLAndroidSimpleBufferQueueItf simple_buffer_queue_; - - // Consumes audio of native buffer size and feeds the WebRTC layer with 10ms - // chunks of audio. - std::unique_ptr fine_audio_buffer_; - - // Queue of audio buffers to be used by the recorder object for capturing - // audio. They will be used in a Round-robin way and the size of each buffer - // is given by AudioParameters::frames_per_buffer(), i.e., it corresponds to - // the native OpenSL ES buffer size. - std::unique_ptr[]> audio_buffers_; - - // Keeps track of active audio buffer 'n' in the audio_buffers_[n] queue. - // Example (kNumOfOpenSLESBuffers = 2): counts 0, 1, 0, 1, ... - int buffer_index_; - - // Last time the OpenSL ES layer delivered recorded audio data. - uint32_t last_rec_time_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_RECORDER_H_ diff --git a/third_party/libwebrtc/modules/audio_device/audio_device_buffer.cc b/third_party/libwebrtc/modules/audio_device/audio_device_buffer.cc index 91964d1eddc6..f1bd8e823b13 100644 --- a/third_party/libwebrtc/modules/audio_device/audio_device_buffer.cc +++ b/third_party/libwebrtc/modules/audio_device/audio_device_buffer.cc @@ -41,7 +41,8 @@ static const size_t kMinValidCallTimeTimeInMilliseconds = static const double k2Pi = 6.28318530717959; #endif -AudioDeviceBuffer::AudioDeviceBuffer(TaskQueueFactory* task_queue_factory) +AudioDeviceBuffer::AudioDeviceBuffer(TaskQueueFactory* task_queue_factory, + bool create_detached) : task_queue_(task_queue_factory->CreateTaskQueue( kTimerQueueName, TaskQueueFactory::Priority::NORMAL)), @@ -67,6 +68,9 @@ AudioDeviceBuffer::AudioDeviceBuffer(TaskQueueFactory* task_queue_factory) phase_ = 0.0; RTC_LOG(LS_WARNING) << "AUDIO_DEVICE_PLAYS_SINUS_TONE is defined!"; #endif + if (create_detached) { + main_thread_checker_.Detach(); + } } AudioDeviceBuffer::~AudioDeviceBuffer() { diff --git a/third_party/libwebrtc/modules/audio_device/audio_device_buffer.h b/third_party/libwebrtc/modules/audio_device/audio_device_buffer.h index f7c4ecdcff75..1260a24c6103 100644 --- a/third_party/libwebrtc/modules/audio_device/audio_device_buffer.h +++ b/third_party/libwebrtc/modules/audio_device/audio_device_buffer.h @@ -78,7 +78,11 @@ class AudioDeviceBuffer { int16_t max_play_level = 0; }; - explicit AudioDeviceBuffer(TaskQueueFactory* task_queue_factory); + // If `create_detached` is true, the created buffer can be used on another + // thread compared to the one on which it was created. It's useful for + // testing. + explicit AudioDeviceBuffer(TaskQueueFactory* task_queue_factory, + bool create_detached = false); virtual ~AudioDeviceBuffer(); int32_t RegisterAudioCallback(AudioTransport* audio_callback); diff --git a/third_party/libwebrtc/modules/audio_device/audio_device_impl.cc b/third_party/libwebrtc/modules/audio_device/audio_device_impl.cc index 092b98f2bf24..c0e0de6fecce 100644 --- a/third_party/libwebrtc/modules/audio_device/audio_device_impl.cc +++ b/third_party/libwebrtc/modules/audio_device/audio_device_impl.cc @@ -26,16 +26,7 @@ #endif #elif defined(WEBRTC_ANDROID) #include -#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) -#include "modules/audio_device/android/aaudio_player.h" -#include "modules/audio_device/android/aaudio_recorder.h" -#endif -#include "modules/audio_device/android/audio_device_template.h" -#include "modules/audio_device/android/audio_manager.h" -#include "modules/audio_device/android/audio_record_jni.h" -#include "modules/audio_device/android/audio_track_jni.h" -#include "modules/audio_device/android/opensles_player.h" -#include "modules/audio_device/android/opensles_recorder.h" +#include "sdk/android/native_api/audio_device_module/audio_device_android.h" #elif defined(WEBRTC_LINUX) #if defined(WEBRTC_ENABLE_LINUX_ALSA) #include "modules/audio_device/linux/audio_device_alsa_linux.h" @@ -89,6 +80,14 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( RTC_LOG(LS_ERROR) << "Use the CreateWindowsCoreAudioAudioDeviceModule() " "factory method instead for this option."; return nullptr; + } else if (audio_layer == AudioDeviceModule::kAndroidJavaAudio || + audio_layer == AudioDeviceModule::kAndroidOpenSLESAudio || + audio_layer == AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio || + audio_layer == kAndroidAAudioAudio || + audio_layer == kAndroidJavaInputAndAAudioOutputAudio) { + RTC_LOG(LS_ERROR) << "Use the CreateAndroidAudioDeviceModule() " + "factory method instead for this option."; + return nullptr; } // Create the generic reference counted (platform independent) implementation. @@ -121,6 +120,17 @@ AudioDeviceModuleImpl::AudioDeviceModuleImpl( RTC_DLOG(LS_INFO) << __FUNCTION__; } +AudioDeviceModuleImpl::AudioDeviceModuleImpl( + AudioLayer audio_layer, + std::unique_ptr audio_device, + TaskQueueFactory* task_queue_factory, + bool create_detached) + : audio_layer_(audio_layer), + audio_device_buffer_(task_queue_factory, create_detached), + audio_device_(std::move(audio_device)) { + RTC_DLOG(LS_INFO) << __FUNCTION__; +} + int32_t AudioDeviceModuleImpl::CheckPlatform() { RTC_DLOG(LS_INFO) << __FUNCTION__; // Ensure that the current platform is supported @@ -140,6 +150,9 @@ int32_t AudioDeviceModuleImpl::CheckPlatform() { #elif defined(WEBRTC_MAC) platform = kPlatformMac; RTC_LOG(LS_INFO) << "current platform is Mac"; +#elif defined(WEBRTC_FUCHSIA) + platform = kPlatformFuchsia; + RTC_LOG(LS_INFO) << "current platform is Fuchsia"; #endif if (platform == kPlatformNotSupported) { RTC_LOG(LS_ERROR) @@ -153,6 +166,10 @@ int32_t AudioDeviceModuleImpl::CheckPlatform() { int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { RTC_LOG(LS_INFO) << __FUNCTION__; + if (audio_device_ != nullptr) { + RTC_LOG(LS_INFO) << "Reusing provided audio device"; + return 0; + } // Dummy ADM implementations if build flags are set. #if defined(WEBRTC_DUMMY_AUDIO_BUILD) audio_device_.reset(new AudioDeviceDummy()); @@ -182,70 +199,13 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { } #endif // defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD) -#if defined(WEBRTC_ANDROID) - // Create an Android audio manager. - audio_manager_android_.reset(new AudioManager()); - // Select best possible combination of audio layers. - if (audio_layer == kPlatformDefaultAudio) { - if (audio_manager_android_->IsAAudioSupported()) { - // Use of AAudio for both playout and recording has highest priority. - audio_layer = kAndroidAAudioAudio; - } else if (audio_manager_android_->IsLowLatencyPlayoutSupported() && - audio_manager_android_->IsLowLatencyRecordSupported()) { - // Use OpenSL ES for both playout and recording. - audio_layer = kAndroidOpenSLESAudio; - } else if (audio_manager_android_->IsLowLatencyPlayoutSupported() && - !audio_manager_android_->IsLowLatencyRecordSupported()) { - // Use OpenSL ES for output on devices that only supports the - // low-latency output audio path. - audio_layer = kAndroidJavaInputAndOpenSLESOutputAudio; - } else { - // Use Java-based audio in both directions when low-latency output is - // not supported. - audio_layer = kAndroidJavaAudio; - } - } - AudioManager* audio_manager = audio_manager_android_.get(); - if (audio_layer == kAndroidJavaAudio) { - // Java audio for both input and output audio. - audio_device_.reset(new AudioDeviceTemplate( - audio_layer, audio_manager)); - } else if (audio_layer == kAndroidOpenSLESAudio) { - // OpenSL ES based audio for both input and output audio. - audio_device_.reset( - new AudioDeviceTemplate( - audio_layer, audio_manager)); - } else if (audio_layer == kAndroidJavaInputAndOpenSLESOutputAudio) { - // Java audio for input and OpenSL ES for output audio (i.e. mixed APIs). - // This combination provides low-latency output audio and at the same - // time support for HW AEC using the AudioRecord Java API. - audio_device_.reset(new AudioDeviceTemplate( - audio_layer, audio_manager)); - } else if (audio_layer == kAndroidAAudioAudio) { -#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) - // AAudio based audio for both input and output. - audio_device_.reset(new AudioDeviceTemplate( - audio_layer, audio_manager)); -#endif - } else if (audio_layer == kAndroidJavaInputAndAAudioOutputAudio) { -#if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) - // Java audio for input and AAudio for output audio (i.e. mixed APIs). - audio_device_.reset(new AudioDeviceTemplate( - audio_layer, audio_manager)); -#endif - } else { - RTC_LOG(LS_ERROR) << "The requested audio layer is not supported"; - audio_device_.reset(nullptr); - } -// END #if defined(WEBRTC_ANDROID) - // Linux ADM implementation. // Note that, WEBRTC_ENABLE_LINUX_ALSA is always defined by default when // WEBRTC_LINUX is defined. WEBRTC_ENABLE_LINUX_PULSE depends on the // 'rtc_include_pulse_audio' build flag. // TODO(bugs.webrtc.org/9127): improve support and make it more clear that // PulseAudio is the default selection. -#elif defined(WEBRTC_LINUX) +#if !defined(WEBRTC_ANDROID) && defined(WEBRTC_LINUX) #if !defined(WEBRTC_ENABLE_LINUX_PULSE) // Build flag 'rtc_include_pulse_audio' is set to false. In this mode: // - kPlatformDefaultAudio => ALSA, and diff --git a/third_party/libwebrtc/modules/audio_device/audio_device_impl.h b/third_party/libwebrtc/modules/audio_device/audio_device_impl.h index 45f73dcd655f..46d91a46c831 100644 --- a/third_party/libwebrtc/modules/audio_device/audio_device_impl.h +++ b/third_party/libwebrtc/modules/audio_device/audio_device_impl.h @@ -24,7 +24,6 @@ namespace webrtc { class AudioDeviceGeneric; -class AudioManager; class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { public: @@ -35,7 +34,12 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { kPlatformLinux = 3, kPlatformMac = 4, kPlatformAndroid = 5, - kPlatformIOS = 6 + kPlatformIOS = 6, + // Fuchsia isn't fully supported, as there is no implementation for + // AudioDeviceGeneric which will be created for Fuchsia, so + // `CreatePlatformSpecificObjects()` call will fail unless usable + // implementation will be provided by the user. + kPlatformFuchsia = 7, }; int32_t CheckPlatform(); @@ -44,6 +48,12 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { AudioDeviceModuleImpl(AudioLayer audio_layer, TaskQueueFactory* task_queue_factory); + // If `create_detached` is true, created ADM can be used on another thread + // compared to the one on which it was created. It's useful for testing. + AudioDeviceModuleImpl(AudioLayer audio_layer, + std::unique_ptr audio_device, + TaskQueueFactory* task_queue_factory, + bool create_detached); ~AudioDeviceModuleImpl() override; // Retrieve the currently utilized audio layer @@ -145,12 +155,6 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS -#if defined(WEBRTC_ANDROID) - // Only use this acccessor for test purposes on Android. - AudioManager* GetAndroidAudioManagerForTest() { - return audio_manager_android_.get(); - } -#endif AudioDeviceBuffer* GetAudioDeviceBuffer() { return &audio_device_buffer_; } int RestartPlayoutInternally() override { return -1; } @@ -165,10 +169,6 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { AudioLayer audio_layer_; PlatformType platform_type_ = kPlatformNotSupported; bool initialized_ = false; -#if defined(WEBRTC_ANDROID) - // Should be declared first to ensure that it outlives other resources. - std::unique_ptr audio_manager_android_; -#endif AudioDeviceBuffer audio_device_buffer_; std::unique_ptr audio_device_; }; diff --git a/third_party/libwebrtc/modules/audio_device/g3doc/audio_device_module.md b/third_party/libwebrtc/modules/audio_device/g3doc/audio_device_module.md index e325faacad8b..93e9aca7415a 100644 --- a/third_party/libwebrtc/modules/audio_device/g3doc/audio_device_module.md +++ b/third_party/libwebrtc/modules/audio_device/g3doc/audio_device_module.md @@ -5,8 +5,8 @@ ## Overview -The ADM is responsible for driving input (microphone) and output (speaker) audio -in WebRTC and the API is defined in [audio_device.h][19]. +The ADM(AudioDeviceModule) is responsible for driving input (microphone) and +output (speaker) audio in WebRTC and the API is defined in [audio_device.h][19]. Main functions of the ADM are: diff --git a/third_party/libwebrtc/modules/audio_device/include/test_audio_device.cc b/third_party/libwebrtc/modules/audio_device/include/test_audio_device.cc index 76406448a085..4c29c98f2cb2 100644 --- a/third_party/libwebrtc/modules/audio_device/include/test_audio_device.cc +++ b/third_party/libwebrtc/modules/audio_device/include/test_audio_device.cc @@ -22,7 +22,9 @@ #include "api/array_view.h" #include "api/make_ref_counted.h" #include "common_audio/wav_file.h" +#include "modules/audio_device/audio_device_impl.h" #include "modules/audio_device/include/audio_device_default.h" +#include "modules/audio_device/test_audio_device_impl.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" @@ -43,164 +45,23 @@ namespace { constexpr int kFrameLengthUs = 10000; constexpr int kFramesPerSecond = rtc::kNumMicrosecsPerSec / kFrameLengthUs; -// TestAudioDeviceModule implements an AudioDevice module that can act both as a -// capturer and a renderer. It will use 10ms audio frames. -class TestAudioDeviceModuleImpl - : public webrtc_impl::AudioDeviceModuleDefault { +class TestAudioDeviceModuleImpl : public AudioDeviceModuleImpl { public: - // Creates a new TestAudioDeviceModule. When capturing or playing, 10 ms audio - // frames will be processed every 10ms / `speed`. - // `capturer` is an object that produces audio data. Can be nullptr if this - // device is never used for recording. - // `renderer` is an object that receives audio data that would have been - // played out. Can be nullptr if this device is never used for playing. - // Use one of the Create... functions to get these instances. - TestAudioDeviceModuleImpl(TaskQueueFactory* task_queue_factory, - std::unique_ptr capturer, - std::unique_ptr renderer, - float speed = 1) - : task_queue_factory_(task_queue_factory), - capturer_(std::move(capturer)), - renderer_(std::move(renderer)), - process_interval_us_(kFrameLengthUs / speed), - audio_callback_(nullptr), - rendering_(false), - capturing_(false) { - auto good_sample_rate = [](int sr) { - return sr == 8000 || sr == 16000 || sr == 32000 || sr == 44100 || - sr == 48000; - }; + TestAudioDeviceModuleImpl( + TaskQueueFactory* task_queue_factory, + std::unique_ptr capturer, + std::unique_ptr renderer, + float speed = 1) + : AudioDeviceModuleImpl( + AudioLayer::kDummyAudio, + std::make_unique(task_queue_factory, + std::move(capturer), + std::move(renderer), + speed), + task_queue_factory, + /*create_detached=*/true) {} - if (renderer_) { - const int sample_rate = renderer_->SamplingFrequency(); - playout_buffer_.resize( - SamplesPerFrame(sample_rate) * renderer_->NumChannels(), 0); - RTC_CHECK(good_sample_rate(sample_rate)); - } - if (capturer_) { - RTC_CHECK(good_sample_rate(capturer_->SamplingFrequency())); - } - } - - ~TestAudioDeviceModuleImpl() override { - StopPlayout(); - StopRecording(); - } - - int32_t Init() override { - task_queue_ = - std::make_unique(task_queue_factory_->CreateTaskQueue( - "TestAudioDeviceModuleImpl", TaskQueueFactory::Priority::NORMAL)); - - RepeatingTaskHandle::Start(task_queue_->Get(), [this]() { - ProcessAudio(); - return TimeDelta::Micros(process_interval_us_); - }); - return 0; - } - - int32_t RegisterAudioCallback(AudioTransport* callback) override { - MutexLock lock(&lock_); - RTC_DCHECK(callback || audio_callback_); - audio_callback_ = callback; - return 0; - } - - int32_t StartPlayout() override { - MutexLock lock(&lock_); - RTC_CHECK(renderer_); - rendering_ = true; - return 0; - } - - int32_t StopPlayout() override { - MutexLock lock(&lock_); - rendering_ = false; - return 0; - } - - int32_t StartRecording() override { - MutexLock lock(&lock_); - RTC_CHECK(capturer_); - capturing_ = true; - return 0; - } - - int32_t StopRecording() override { - MutexLock lock(&lock_); - capturing_ = false; - return 0; - } - - bool Playing() const override { - MutexLock lock(&lock_); - return rendering_; - } - - bool Recording() const override { - MutexLock lock(&lock_); - return capturing_; - } - - // Blocks forever until the Recorder stops producing data. - void WaitForRecordingEnd() override { - done_capturing_.Wait(rtc::Event::kForever); - } - - private: - void ProcessAudio() { - MutexLock lock(&lock_); - if (capturing_) { - // Capture 10ms of audio. 2 bytes per sample. - const bool keep_capturing = capturer_->Capture(&recording_buffer_); - uint32_t new_mic_level = 0; - if (recording_buffer_.size() > 0) { - audio_callback_->RecordedDataIsAvailable( - recording_buffer_.data(), - recording_buffer_.size() / capturer_->NumChannels(), - 2 * capturer_->NumChannels(), capturer_->NumChannels(), - capturer_->SamplingFrequency(), /*totalDelayMS=*/0, - /*clockDrift=*/0, - /*currentMicLevel=*/0, /*keyPressed=*/false, new_mic_level, - absl::make_optional(rtc::TimeNanos())); - } - if (!keep_capturing) { - capturing_ = false; - done_capturing_.Set(); - } - } - if (rendering_) { - size_t samples_out = 0; - int64_t elapsed_time_ms = -1; - int64_t ntp_time_ms = -1; - const int sampling_frequency = renderer_->SamplingFrequency(); - audio_callback_->NeedMorePlayData( - SamplesPerFrame(sampling_frequency), 2 * renderer_->NumChannels(), - renderer_->NumChannels(), sampling_frequency, playout_buffer_.data(), - samples_out, &elapsed_time_ms, &ntp_time_ms); - const bool keep_rendering = renderer_->Render( - rtc::ArrayView(playout_buffer_.data(), samples_out)); - if (!keep_rendering) { - rendering_ = false; - done_rendering_.Set(); - } - } - } - TaskQueueFactory* const task_queue_factory_; - const std::unique_ptr capturer_ RTC_GUARDED_BY(lock_); - const std::unique_ptr renderer_ RTC_GUARDED_BY(lock_); - const int64_t process_interval_us_; - - mutable Mutex lock_; - AudioTransport* audio_callback_ RTC_GUARDED_BY(lock_); - bool rendering_ RTC_GUARDED_BY(lock_); - bool capturing_ RTC_GUARDED_BY(lock_); - rtc::Event done_rendering_; - rtc::Event done_capturing_; - - std::vector playout_buffer_ RTC_GUARDED_BY(lock_); - rtc::BufferT recording_buffer_ RTC_GUARDED_BY(lock_); - std::unique_ptr task_queue_; + ~TestAudioDeviceModuleImpl() override = default; }; // A fake capturer that generates pulses with random samples between @@ -433,6 +294,150 @@ class DiscardRenderer final : public TestAudioDeviceModule::Renderer { const int num_channels_; }; +class RawFileReader final : public TestAudioDeviceModule::Capturer { + public: + RawFileReader(absl::string_view input_file_name, + int sampling_frequency_in_hz, + int num_channels, + bool repeat) + : input_file_name_(input_file_name), + sampling_frequency_in_hz_(sampling_frequency_in_hz), + num_channels_(num_channels), + repeat_(repeat), + read_buffer_( + TestAudioDeviceModule::SamplesPerFrame(sampling_frequency_in_hz) * + num_channels * 2, + 0) { + input_file_ = FileWrapper::OpenReadOnly(input_file_name_); + RTC_CHECK(input_file_.is_open()) + << "Failed to open audio input file: " << input_file_name_; + } + + ~RawFileReader() override { input_file_.Close(); } + + int SamplingFrequency() const override { return sampling_frequency_in_hz_; } + + int NumChannels() const override { return num_channels_; } + + bool Capture(rtc::BufferT* buffer) override { + buffer->SetData( + TestAudioDeviceModule::SamplesPerFrame(SamplingFrequency()) * + NumChannels(), + [&](rtc::ArrayView data) { + rtc::ArrayView read_buffer_view = ReadBufferView(); + size_t size = data.size() * 2; + size_t read = input_file_.Read(read_buffer_view.data(), size); + if (read < size && repeat_) { + do { + input_file_.Rewind(); + size_t delta = input_file_.Read( + read_buffer_view.subview(read).data(), size - read); + RTC_CHECK_GT(delta, 0) << "No new data to read from file"; + read += delta; + } while (read < size); + } + memcpy(data.data(), read_buffer_view.data(), size); + return read / 2; + }); + return buffer->size() > 0; + } + + private: + rtc::ArrayView ReadBufferView() { return read_buffer_; } + + const std::string input_file_name_; + const int sampling_frequency_in_hz_; + const int num_channels_; + const bool repeat_; + FileWrapper input_file_; + std::vector read_buffer_; +}; + +class RawFileWriter : public TestAudioDeviceModule::Renderer { + public: + RawFileWriter(absl::string_view output_file_name, + int sampling_frequency_in_hz, + int num_channels) + : output_file_name_(output_file_name), + sampling_frequency_in_hz_(sampling_frequency_in_hz), + num_channels_(num_channels), + silent_audio_( + TestAudioDeviceModule::SamplesPerFrame(sampling_frequency_in_hz) * + num_channels * 2, + 0), + write_buffer_( + TestAudioDeviceModule::SamplesPerFrame(sampling_frequency_in_hz) * + num_channels * 2, + 0), + started_writing_(false), + trailing_zeros_(0) { + output_file_ = FileWrapper::OpenWriteOnly(output_file_name_); + RTC_CHECK(output_file_.is_open()) + << "Failed to open playout file" << output_file_name_; + } + ~RawFileWriter() override { output_file_.Close(); } + + int SamplingFrequency() const override { return sampling_frequency_in_hz_; } + + int NumChannels() const override { return num_channels_; } + + bool Render(rtc::ArrayView data) override { + const int16_t kAmplitudeThreshold = 5; + + const int16_t* begin = data.begin(); + const int16_t* end = data.end(); + if (!started_writing_) { + // Cut off silence at the beginning. + while (begin < end) { + if (std::abs(*begin) > kAmplitudeThreshold) { + started_writing_ = true; + break; + } + ++begin; + } + } + if (started_writing_) { + // Cut off silence at the end. + while (begin < end) { + if (*(end - 1) != 0) { + break; + } + --end; + } + if (begin < end) { + // If it turns out that the silence was not final, need to write all the + // skipped zeros and continue writing audio. + while (trailing_zeros_ > 0) { + const size_t zeros_to_write = + std::min(trailing_zeros_, silent_audio_.size()); + output_file_.Write(silent_audio_.data(), zeros_to_write * 2); + trailing_zeros_ -= zeros_to_write; + } + WriteInt16(begin, end); + } + // Save the number of zeros we skipped in case this needs to be restored. + trailing_zeros_ += data.end() - end; + } + return true; + } + + private: + void WriteInt16(const int16_t* begin, const int16_t* end) { + int size = (end - begin) * sizeof(int16_t); + memcpy(write_buffer_.data(), begin, size); + output_file_.Write(write_buffer_.data(), size); + } + + const std::string output_file_name_; + const int sampling_frequency_in_hz_; + const int num_channels_; + FileWrapper output_file_; + std::vector silent_audio_; + std::vector write_buffer_; + bool started_writing_; + size_t trailing_zeros_; +}; + } // namespace size_t TestAudioDeviceModule::SamplesPerFrame(int sampling_frequency_in_hz) { @@ -444,8 +449,26 @@ rtc::scoped_refptr TestAudioDeviceModule::Create( std::unique_ptr capturer, std::unique_ptr renderer, float speed) { - return rtc::make_ref_counted( + auto audio_device = rtc::make_ref_counted( task_queue_factory, std::move(capturer), std::move(renderer), speed); + + // Ensure that the current platform is supported. + if (audio_device->CheckPlatform() == -1) { + return nullptr; + } + + // Create the platform-dependent implementation. + if (audio_device->CreatePlatformSpecificObjects() == -1) { + return nullptr; + } + + // Ensure that the generic audio buffer can communicate with the platform + // specific parts. + if (audio_device->AttachAudioBuffer() == -1) { + return nullptr; + } + + return audio_device; } std::unique_ptr @@ -497,4 +520,21 @@ TestAudioDeviceModule::CreateBoundedWavFileWriter(absl::string_view filename, filename, sampling_frequency_in_hz, num_channels); } +std::unique_ptr +TestAudioDeviceModule::CreateRawFileReader(absl::string_view filename, + int sampling_frequency_in_hz, + int num_channels, + bool repeat) { + return std::make_unique(filename, sampling_frequency_in_hz, + num_channels, repeat); +} + +std::unique_ptr +TestAudioDeviceModule::CreateRawFileWriter(absl::string_view filename, + int sampling_frequency_in_hz, + int num_channels) { + return std::make_unique(filename, sampling_frequency_in_hz, + num_channels); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/include/test_audio_device.h b/third_party/libwebrtc/modules/audio_device/include/test_audio_device.h index 87ba9cfbae79..4b2d755ae1f9 100644 --- a/third_party/libwebrtc/modules/audio_device/include/test_audio_device.h +++ b/third_party/libwebrtc/modules/audio_device/include/test_audio_device.h @@ -29,9 +29,10 @@ namespace webrtc { // This is test API and is in development, so it can be changed/removed without // notice. -// TestAudioDeviceModule implements an AudioDevice module that can act both as a -// capturer and a renderer. It will use 10ms audio frames. -class TestAudioDeviceModule : public AudioDeviceModule { +// This class exists for historical reasons. For now it only contains static +// methods to create test AudioDeviceModule. Implementation details of that +// module are considered private. This class isn't intended to be instantiated. +class TestAudioDeviceModule { public: // Returns the number of samples that Capturers and Renderers with this // sampling frequency will work with every time Capture or Render is called. @@ -73,8 +74,6 @@ class TestAudioDeviceModule : public AudioDeviceModule { virtual void SetMaxAmplitude(int16_t amplitude) = 0; }; - ~TestAudioDeviceModule() override {} - // Creates a new TestAudioDeviceModule. When capturing or playing, 10 ms audio // frames will be processed every 10ms / `speed`. // `capturer` is an object that produces audio data. Can be nullptr if this @@ -103,8 +102,8 @@ class TestAudioDeviceModule : public AudioDeviceModule { // WavReader and WavWriter creation based on file name. - // Returns a Capturer instance that gets its data from a file. The sample rate - // and channels will be checked against the Wav file. + // Returns a Capturer instance that gets its data from a WAV file. The sample + // rate and channels will be checked against the Wav file. static std::unique_ptr CreateWavFileReader( absl::string_view filename, int sampling_frequency_in_hz, @@ -132,19 +131,23 @@ class TestAudioDeviceModule : public AudioDeviceModule { int sampling_frequency_in_hz, int num_channels = 1); - int32_t Init() override = 0; - int32_t RegisterAudioCallback(AudioTransport* callback) override = 0; + // Returns a Capturer instance that gets its data from a raw file (*.raw). + static std::unique_ptr CreateRawFileReader( + absl::string_view filename, + int sampling_frequency_in_hz = 48000, + int num_channels = 2, + bool repeat = true); - int32_t StartPlayout() override = 0; - int32_t StopPlayout() override = 0; - int32_t StartRecording() override = 0; - int32_t StopRecording() override = 0; + // Returns a Renderer instance that writes its data to a raw file (*.raw), + // cutting off silence at the beginning (not necessarily perfect silence, see + // kAmplitudeThreshold) and at the end (only actual 0 samples in this case). + static std::unique_ptr CreateRawFileWriter( + absl::string_view filename, + int sampling_frequency_in_hz = 48000, + int num_channels = 2); - bool Playing() const override = 0; - bool Recording() const override = 0; - - // Blocks forever until the Recorder stops producing data. - virtual void WaitForRecordingEnd() = 0; + private: + TestAudioDeviceModule() = default; }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/include/test_audio_device_unittest.cc b/third_party/libwebrtc/modules/audio_device/include/test_audio_device_unittest.cc index 2975b11325c4..7a122ca84b32 100644 --- a/third_party/libwebrtc/modules/audio_device/include/test_audio_device_unittest.cc +++ b/third_party/libwebrtc/modules/audio_device/include/test_audio_device_unittest.cc @@ -12,22 +12,30 @@ #include #include +#include +#include +#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "common_audio/wav_file.h" #include "common_audio/wav_header.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" +#include "test/time_controller/simulated_time_controller.h" namespace webrtc { - namespace { -void RunTest(const std::vector& input_samples, - const std::vector& expected_samples, - size_t samples_per_frame) { +void RunWavTest(const std::vector& input_samples, + const std::vector& expected_samples) { const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); @@ -64,14 +72,13 @@ void RunTest(const std::vector& input_samples, remove(output_filename.c_str()); } -} // namespace TEST(BoundedWavFileWriterTest, NoSilence) { static const std::vector kInputSamples = { 75, 1234, 243, -1231, -22222, 0, 3, 88, 1222, -1213, -13222, -7, -3525, 5787, -25247, 8}; static const std::vector kExpectedSamples = kInputSamples; - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(BoundedWavFileWriterTest, SomeStartSilence) { @@ -79,7 +86,7 @@ TEST(BoundedWavFileWriterTest, SomeStartSilence) { 0, 0, 0, 0, 3, 0, 0, 0, 0, 3, -13222, -7, -3525, 5787, -25247, 8}; static const std::vector kExpectedSamples(kInputSamples.begin() + 10, kInputSamples.end()); - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(BoundedWavFileWriterTest, NegativeStartSilence) { @@ -87,7 +94,7 @@ TEST(BoundedWavFileWriterTest, NegativeStartSilence) { 0, -4, -6, 0, 3, 0, 0, 0, 0, 3, -13222, -7, -3525, 5787, -25247, 8}; static const std::vector kExpectedSamples(kInputSamples.begin() + 2, kInputSamples.end()); - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(BoundedWavFileWriterTest, SomeEndSilence) { @@ -95,7 +102,7 @@ TEST(BoundedWavFileWriterTest, SomeEndSilence) { 75, 1234, 243, -1231, -22222, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0}; static const std::vector kExpectedSamples(kInputSamples.begin(), kInputSamples.end() - 9); - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(BoundedWavFileWriterTest, DoubleEndSilence) { @@ -104,7 +111,7 @@ TEST(BoundedWavFileWriterTest, DoubleEndSilence) { 0, -1213, -13222, -7, -3525, 5787, 0, 0}; static const std::vector kExpectedSamples(kInputSamples.begin(), kInputSamples.end() - 2); - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(BoundedWavFileWriterTest, DoubleSilence) { @@ -112,7 +119,7 @@ TEST(BoundedWavFileWriterTest, DoubleSilence) { -3525, 5787, 0, 0}; static const std::vector kExpectedSamples(kInputSamples.begin() + 1, kInputSamples.end() - 2); - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(BoundedWavFileWriterTest, EndSilenceCutoff) { @@ -120,7 +127,7 @@ TEST(BoundedWavFileWriterTest, EndSilenceCutoff) { 75, 1234, 243, -1231, -22222, 0, 1, 0, 0, 0, 0}; static const std::vector kExpectedSamples(kInputSamples.begin(), kInputSamples.end() - 4); - RunTest(kInputSamples, kExpectedSamples, 8); + RunWavTest(kInputSamples, kExpectedSamples); } TEST(WavFileReaderTest, RepeatedTrueWithSingleFrameFileReadTwice) { @@ -138,7 +145,7 @@ TEST(WavFileReaderTest, RepeatedTrueWithSingleFrameFileReadTwice) { EXPECT_EQ(TestAudioDeviceModule::SamplesPerFrame(kSampleRate), kSamplesPerFrame); - // Create wav file to read. + // Create raw file to read. { std::unique_ptr writer = TestAudioDeviceModule::CreateWavFileWriter(output_filename, 800); @@ -163,6 +170,154 @@ TEST(WavFileReaderTest, RepeatedTrueWithSingleFrameFileReadTwice) { remove(output_filename.c_str()); } +void RunRawTestNoRepeat(const std::vector& input_samples, + const std::vector& expected_samples) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + + const std::string output_filename = test::OutputPath() + "RawFileTest_" + + test_info->name() + "_" + + std::to_string(std::rand()) + ".raw"; + + static const size_t kSamplesPerFrame = 8; + static const int kSampleRate = kSamplesPerFrame * 100; + EXPECT_EQ(TestAudioDeviceModule::SamplesPerFrame(kSampleRate), + kSamplesPerFrame); + + // Test through file name API. + { + std::unique_ptr writer = + TestAudioDeviceModule::CreateRawFileWriter( + output_filename, /*sampling_frequency_in_hz=*/800); + + for (size_t i = 0; i < input_samples.size(); i += kSamplesPerFrame) { + EXPECT_TRUE(writer->Render(rtc::ArrayView( + &input_samples[i], + std::min(kSamplesPerFrame, input_samples.size() - i)))); + } + } + + { + std::unique_ptr reader = + TestAudioDeviceModule::CreateRawFileReader( + output_filename, /*sampling_frequency_in_hz=*/800, + /*num_channels=*/2, /*repeat=*/false); + rtc::BufferT buffer(expected_samples.size()); + rtc::BufferT expected_buffer(expected_samples.size()); + expected_buffer.SetData(expected_samples); + EXPECT_TRUE(reader->Capture(&buffer)); + EXPECT_EQ(expected_buffer, buffer); + EXPECT_FALSE(reader->Capture(&buffer)); + EXPECT_TRUE(buffer.empty()); + } + + remove(output_filename.c_str()); +} + +TEST(RawFileWriterTest, NoSilence) { + static const std::vector kInputSamples = { + 75, 1234, 243, -1231, -22222, 0, 3, 88, + 1222, -1213, -13222, -7, -3525, 5787, -25247, 8}; + static const std::vector kExpectedSamples = kInputSamples; + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, SomeStartSilence) { + static const std::vector kInputSamples = { + 0, 0, 0, 0, 3, 0, 0, 0, 0, 3, -13222, -7, -3525, 5787, -25247, 8}; + static const std::vector kExpectedSamples(kInputSamples.begin() + 10, + kInputSamples.end()); + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, NegativeStartSilence) { + static const std::vector kInputSamples = { + 0, -4, -6, 0, 3, 0, 0, 0, 0, 3, -13222, -7, -3525, 5787, -25247, 8}; + static const std::vector kExpectedSamples(kInputSamples.begin() + 2, + kInputSamples.end()); + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, SomeEndSilence) { + static const std::vector kInputSamples = { + 75, 1234, 243, -1231, -22222, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + static const std::vector kExpectedSamples(kInputSamples.begin(), + kInputSamples.end() - 9); + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, DoubleEndSilence) { + static const std::vector kInputSamples = { + 75, 1234, 243, -1231, -22222, 0, 0, 0, + 0, -1213, -13222, -7, -3525, 5787, 0, 0}; + static const std::vector kExpectedSamples(kInputSamples.begin(), + kInputSamples.end() - 2); + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, DoubleSilence) { + static const std::vector kInputSamples = {0, -1213, -13222, -7, + -3525, 5787, 0, 0}; + static const std::vector kExpectedSamples(kInputSamples.begin() + 1, + kInputSamples.end() - 2); + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, EndSilenceCutoff) { + static const std::vector kInputSamples = { + 75, 1234, 243, -1231, -22222, 0, 1, 0, 0, 0, 0}; + static const std::vector kExpectedSamples(kInputSamples.begin(), + kInputSamples.end() - 4); + RunRawTestNoRepeat(kInputSamples, kExpectedSamples); +} + +TEST(RawFileWriterTest, Repeat) { + static const std::vector kInputSamples = { + 75, 1234, 243, -1231, -22222, 0, 3, 88, + 1222, -1213, -13222, -7, -3525, 5787, -25247, 8}; + static const rtc::BufferT kExpectedSamples(kInputSamples.data(), + kInputSamples.size()); + + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + + const std::string output_filename = test::OutputPath() + "RawFileTest_" + + test_info->name() + "_" + + std::to_string(std::rand()) + ".raw"; + + static const size_t kSamplesPerFrame = 8; + static const int kSampleRate = kSamplesPerFrame * 100; + EXPECT_EQ(TestAudioDeviceModule::SamplesPerFrame(kSampleRate), + kSamplesPerFrame); + + // Test through file name API. + { + std::unique_ptr writer = + TestAudioDeviceModule::CreateRawFileWriter( + output_filename, /*sampling_frequency_in_hz=*/800); + + for (size_t i = 0; i < kInputSamples.size(); i += kSamplesPerFrame) { + EXPECT_TRUE(writer->Render(rtc::ArrayView( + &kInputSamples[i], + std::min(kSamplesPerFrame, kInputSamples.size() - i)))); + } + } + + { + std::unique_ptr reader = + TestAudioDeviceModule::CreateRawFileReader( + output_filename, /*sampling_frequency_in_hz=*/800, + /*num_channels=*/2, /*repeat=*/true); + rtc::BufferT buffer(kExpectedSamples.size()); + EXPECT_TRUE(reader->Capture(&buffer)); + EXPECT_EQ(kExpectedSamples, buffer); + EXPECT_TRUE(reader->Capture(&buffer)); + EXPECT_EQ(kExpectedSamples, buffer); + } + + remove(output_filename.c_str()); +} + TEST(PulsedNoiseCapturerTest, SetMaxAmplitude) { const int16_t kAmplitude = 50; std::unique_ptr capturer = @@ -189,4 +344,185 @@ TEST(PulsedNoiseCapturerTest, SetMaxAmplitude) { EXPECT_GT(max_sample, kAmplitude); } +using ::testing::ElementsAre; + +constexpr Timestamp kStartTime = Timestamp::Millis(10000); + +class TestAudioTransport : public AudioTransport { + public: + enum class Mode { kPlaying, kRecording }; + + explicit TestAudioTransport(Mode mode) : mode_(mode) {} + ~TestAudioTransport() override = default; + + int32_t RecordedDataIsAvailable( + const void* audioSamples, + size_t samples_per_channel, + size_t bytes_per_sample, + size_t number_of_channels, + uint32_t samples_per_second, + uint32_t total_delay_ms, + int32_t clock_drift, + uint32_t current_mic_level, + bool key_pressed, + uint32_t& new_mic_level, + absl::optional estimated_capture_time_ns) override { + new_mic_level = 1; + + if (mode_ != Mode::kRecording) { + EXPECT_TRUE(false) + << "NeedMorePlayData mustn't be called when mode isn't kRecording"; + return -1; + } + + MutexLock lock(&mutex_); + samples_per_channel_.push_back(samples_per_channel); + number_of_channels_.push_back(number_of_channels); + bytes_per_sample_.push_back(bytes_per_sample); + samples_per_second_.push_back(samples_per_second); + return 0; + } + + int32_t NeedMorePlayData(size_t samples_per_channel, + size_t bytes_per_sample, + size_t number_of_channels, + uint32_t samples_per_second, + void* audio_samples, + size_t& samples_out, + int64_t* elapsed_time_ms, + int64_t* ntp_time_ms) override { + const size_t num_bytes = samples_per_channel * number_of_channels; + std::memset(audio_samples, 1, num_bytes); + samples_out = samples_per_channel * number_of_channels; + *elapsed_time_ms = 0; + *ntp_time_ms = 0; + + if (mode_ != Mode::kPlaying) { + EXPECT_TRUE(false) + << "NeedMorePlayData mustn't be called when mode isn't kPlaying"; + return -1; + } + + MutexLock lock(&mutex_); + samples_per_channel_.push_back(samples_per_channel); + number_of_channels_.push_back(number_of_channels); + bytes_per_sample_.push_back(bytes_per_sample); + samples_per_second_.push_back(samples_per_second); + return 0; + } + + int32_t RecordedDataIsAvailable(const void* audio_samples, + size_t samples_per_channel, + size_t bytes_per_sample, + size_t number_of_channels, + uint32_t samples_per_second, + uint32_t total_delay_ms, + int32_t clockDrift, + uint32_t current_mic_level, + bool key_pressed, + uint32_t& new_mic_level) override { + RTC_CHECK(false) << "This methods should be never executed"; + } + + void PullRenderData(int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* audio_data, + int64_t* elapsed_time_ms, + int64_t* ntp_time_ms) override { + RTC_CHECK(false) << "This methods should be never executed"; + } + + std::vector samples_per_channel() const { + MutexLock lock(&mutex_); + return samples_per_channel_; + } + std::vector number_of_channels() const { + MutexLock lock(&mutex_); + return number_of_channels_; + } + std::vector bytes_per_sample() const { + MutexLock lock(&mutex_); + return bytes_per_sample_; + } + std::vector samples_per_second() const { + MutexLock lock(&mutex_); + return samples_per_second_; + } + + private: + const Mode mode_; + + mutable Mutex mutex_; + std::vector samples_per_channel_ RTC_GUARDED_BY(mutex_); + std::vector number_of_channels_ RTC_GUARDED_BY(mutex_); + std::vector bytes_per_sample_ RTC_GUARDED_BY(mutex_); + std::vector samples_per_second_ RTC_GUARDED_BY(mutex_); +}; + +TEST(TestAudioDeviceModuleTest, CreatedADMCanRecord) { + GlobalSimulatedTimeController time_controller(kStartTime); + TestAudioTransport audio_transport(TestAudioTransport::Mode::kRecording); + std::unique_ptr capturer = + TestAudioDeviceModule::CreatePulsedNoiseCapturer( + /*max_amplitude=*/1000, + /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); + + rtc::scoped_refptr adm = TestAudioDeviceModule::Create( + time_controller.GetTaskQueueFactory(), std::move(capturer), + /*renderer=*/nullptr); + + ASSERT_EQ(adm->RegisterAudioCallback(&audio_transport), 0); + ASSERT_EQ(adm->Init(), 0); + + EXPECT_FALSE(adm->RecordingIsInitialized()); + ASSERT_EQ(adm->InitRecording(), 0); + EXPECT_TRUE(adm->RecordingIsInitialized()); + ASSERT_EQ(adm->StartRecording(), 0); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_TRUE(adm->Recording()); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_EQ(adm->StopRecording(), 0); + + EXPECT_THAT(audio_transport.samples_per_channel(), + ElementsAre(480, 480, 480)); + EXPECT_THAT(audio_transport.number_of_channels(), ElementsAre(2, 2, 2)); + EXPECT_THAT(audio_transport.bytes_per_sample(), ElementsAre(4, 4, 4)); + EXPECT_THAT(audio_transport.samples_per_second(), + ElementsAre(48000, 48000, 48000)); +} + +TEST(TestAudioDeviceModuleTest, CreatedADMCanPlay) { + GlobalSimulatedTimeController time_controller(kStartTime); + TestAudioTransport audio_transport(TestAudioTransport::Mode::kPlaying); + std::unique_ptr renderer = + TestAudioDeviceModule::CreateDiscardRenderer( + /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); + + rtc::scoped_refptr adm = + TestAudioDeviceModule::Create(time_controller.GetTaskQueueFactory(), + /*capturer=*/nullptr, std::move(renderer)); + + ASSERT_EQ(adm->RegisterAudioCallback(&audio_transport), 0); + ASSERT_EQ(adm->Init(), 0); + + EXPECT_FALSE(adm->PlayoutIsInitialized()); + ASSERT_EQ(adm->InitPlayout(), 0); + EXPECT_TRUE(adm->PlayoutIsInitialized()); + ASSERT_EQ(adm->StartPlayout(), 0); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_TRUE(adm->Playing()); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_EQ(adm->StopPlayout(), 0); + + EXPECT_THAT(audio_transport.samples_per_channel(), + ElementsAre(480, 480, 480)); + EXPECT_THAT(audio_transport.number_of_channels(), ElementsAre(2, 2, 2)); + EXPECT_THAT(audio_transport.bytes_per_sample(), ElementsAre(4, 4, 4)); + EXPECT_THAT(audio_transport.samples_per_second(), + ElementsAre(48000, 48000, 48000)); +} + +} // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/test_audio_device_impl.cc b/third_party/libwebrtc/modules/audio_device/test_audio_device_impl.cc new file mode 100644 index 000000000000..627e68b36f4d --- /dev/null +++ b/third_party/libwebrtc/modules/audio_device/test_audio_device_impl.cc @@ -0,0 +1,211 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/audio_device/test_audio_device_impl.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" +#include "modules/audio_device/include/test_audio_device.h" +#include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue.h" +#include "rtc_base/task_utils/repeating_task.h" + +namespace webrtc { +namespace { + +constexpr int kFrameLengthUs = 10000; + +} + +TestAudioDevice::TestAudioDevice( + TaskQueueFactory* task_queue_factory, + std::unique_ptr capturer, + std::unique_ptr renderer, + float speed) + : task_queue_factory_(task_queue_factory), + capturer_(std::move(capturer)), + renderer_(std::move(renderer)), + process_interval_us_(kFrameLengthUs / speed), + audio_buffer_(nullptr), + rendering_(false), + capturing_(false) { + auto good_sample_rate = [](int sr) { + return sr == 8000 || sr == 16000 || sr == 32000 || sr == 44100 || + sr == 48000; + }; + + if (renderer_) { + const int sample_rate = renderer_->SamplingFrequency(); + playout_buffer_.resize(TestAudioDeviceModule::SamplesPerFrame(sample_rate) * + renderer_->NumChannels(), + 0); + RTC_CHECK(good_sample_rate(sample_rate)); + } + if (capturer_) { + RTC_CHECK(good_sample_rate(capturer_->SamplingFrequency())); + } +} + +AudioDeviceGeneric::InitStatus TestAudioDevice::Init() { + task_queue_ = + std::make_unique(task_queue_factory_->CreateTaskQueue( + "TestAudioDeviceModuleImpl", TaskQueueFactory::Priority::NORMAL)); + + RepeatingTaskHandle::Start(task_queue_->Get(), [this]() { + ProcessAudio(); + return TimeDelta::Micros(process_interval_us_); + }); + return InitStatus::OK; +} + +int32_t TestAudioDevice::PlayoutIsAvailable(bool& available) { + MutexLock lock(&lock_); + available = renderer_ != nullptr; + return 0; +} + +int32_t TestAudioDevice::InitPlayout() { + MutexLock lock(&lock_); + + if (rendering_) { + return -1; + } + + if (audio_buffer_ != nullptr && renderer_ != nullptr) { + // Update webrtc audio buffer with the selected parameters + audio_buffer_->SetPlayoutSampleRate(renderer_->SamplingFrequency()); + audio_buffer_->SetPlayoutChannels(renderer_->NumChannels()); + } + rendering_initialized_ = true; + return 0; +} + +bool TestAudioDevice::PlayoutIsInitialized() const { + MutexLock lock(&lock_); + return rendering_initialized_; +} + +int32_t TestAudioDevice::StartPlayout() { + MutexLock lock(&lock_); + RTC_CHECK(renderer_); + rendering_ = true; + return 0; +} + +int32_t TestAudioDevice::StopPlayout() { + MutexLock lock(&lock_); + rendering_ = false; + return 0; +} + +int32_t TestAudioDevice::RecordingIsAvailable(bool& available) { + MutexLock lock(&lock_); + available = capturer_ != nullptr; + return 0; +} + +int32_t TestAudioDevice::InitRecording() { + MutexLock lock(&lock_); + + if (capturing_) { + return -1; + } + + if (audio_buffer_ != nullptr && capturer_ != nullptr) { + // Update webrtc audio buffer with the selected parameters + audio_buffer_->SetRecordingSampleRate(capturer_->SamplingFrequency()); + audio_buffer_->SetRecordingChannels(capturer_->NumChannels()); + } + capturing_initialized_ = true; + return 0; +} + +bool TestAudioDevice::RecordingIsInitialized() const { + MutexLock lock(&lock_); + return capturing_initialized_; +} + +int32_t TestAudioDevice::StartRecording() { + MutexLock lock(&lock_); + capturing_ = true; + return 0; +} + +int32_t TestAudioDevice::StopRecording() { + MutexLock lock(&lock_); + capturing_ = false; + return 0; +} + +bool TestAudioDevice::Playing() const { + MutexLock lock(&lock_); + return rendering_; +} + +bool TestAudioDevice::Recording() const { + MutexLock lock(&lock_); + return capturing_; +} + +void TestAudioDevice::ProcessAudio() { + MutexLock lock(&lock_); + if (audio_buffer_ == nullptr) { + return; + } + if (capturing_ && capturer_ != nullptr) { + // Capture 10ms of audio. 2 bytes per sample. + const bool keep_capturing = capturer_->Capture(&recording_buffer_); + if (recording_buffer_.size() > 0) { + audio_buffer_->SetRecordedBuffer( + recording_buffer_.data(), + recording_buffer_.size() / capturer_->NumChannels(), + absl::make_optional(rtc::TimeNanos())); + audio_buffer_->DeliverRecordedData(); + } + if (!keep_capturing) { + capturing_ = false; + } + } + if (rendering_) { + const int sampling_frequency = renderer_->SamplingFrequency(); + int32_t samples_per_channel = audio_buffer_->RequestPlayoutData( + TestAudioDeviceModule::SamplesPerFrame(sampling_frequency)); + audio_buffer_->GetPlayoutData(playout_buffer_.data()); + size_t samples_out = samples_per_channel * renderer_->NumChannels(); + RTC_CHECK_LE(samples_out, playout_buffer_.size()); + const bool keep_rendering = renderer_->Render( + rtc::ArrayView(playout_buffer_.data(), samples_out)); + if (!keep_rendering) { + rendering_ = false; + } + } +} + +void TestAudioDevice::AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) { + MutexLock lock(&lock_); + RTC_DCHECK(audio_buffer || audio_buffer_); + audio_buffer_ = audio_buffer; + + if (renderer_ != nullptr) { + audio_buffer_->SetPlayoutSampleRate(renderer_->SamplingFrequency()); + audio_buffer_->SetPlayoutChannels(renderer_->NumChannels()); + } + if (capturer_ != nullptr) { + audio_buffer_->SetRecordingSampleRate(capturer_->SamplingFrequency()); + audio_buffer_->SetRecordingChannels(capturer_->NumChannels()); + } +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_device/test_audio_device_impl.h b/third_party/libwebrtc/modules/audio_device/test_audio_device_impl.h new file mode 100644 index 000000000000..36192b7f7f67 --- /dev/null +++ b/third_party/libwebrtc/modules/audio_device/test_audio_device_impl.h @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_DEVICE_TEST_AUDIO_DEVICE_IMPL_H_ +#define MODULES_AUDIO_DEVICE_TEST_AUDIO_DEVICE_IMPL_H_ + +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/audio_device_generic.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "modules/audio_device/include/test_audio_device.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue.h" + +namespace webrtc { + +class TestAudioDevice : public AudioDeviceGeneric { + public: + // Creates a new TestAudioDevice. When capturing or playing, 10 ms audio + // frames will be processed every 10ms / `speed`. + // `capturer` is an object that produces audio data. Can be nullptr if this + // device is never used for recording. + // `renderer` is an object that receives audio data that would have been + // played out. Can be nullptr if this device is never used for playing. + TestAudioDevice(TaskQueueFactory* task_queue_factory, + std::unique_ptr capturer, + std::unique_ptr renderer, + float speed = 1); + TestAudioDevice(const TestAudioDevice&) = delete; + TestAudioDevice& operator=(const TestAudioDevice&) = delete; + ~TestAudioDevice() override = default; + + // Retrieve the currently utilized audio layer + int32_t ActiveAudioLayer( + AudioDeviceModule::AudioLayer& audioLayer) const override { + return 0; + } + + // Main initializaton and termination + InitStatus Init() override; + int32_t Terminate() override { return 0; } + bool Initialized() const override { return true; } + + // Device enumeration + int16_t PlayoutDevices() override { return 0; } + int16_t RecordingDevices() override { return 0; } + int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) override { + return 0; + } + int32_t RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) override { + return 0; + } + + // Device selection + int32_t SetPlayoutDevice(uint16_t index) override { return 0; } + int32_t SetPlayoutDevice( + AudioDeviceModule::WindowsDeviceType device) override { + return 0; + } + int32_t SetRecordingDevice(uint16_t index) override { return 0; } + int32_t SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType device) override { + return 0; + } + + // Audio transport initialization + int32_t PlayoutIsAvailable(bool& available) override; + int32_t InitPlayout() override; + bool PlayoutIsInitialized() const override; + int32_t RecordingIsAvailable(bool& available) override; + int32_t InitRecording() override; + bool RecordingIsInitialized() const override; + + // Audio transport control + int32_t StartPlayout() override; + int32_t StopPlayout() override; + bool Playing() const override; + int32_t StartRecording() override; + int32_t StopRecording() override; + bool Recording() const override; + + // Audio mixer initialization + int32_t InitSpeaker() override { return 0; } + bool SpeakerIsInitialized() const override { return true; } + int32_t InitMicrophone() override { return 0; } + bool MicrophoneIsInitialized() const override { return true; } + + // Speaker volume controls + int32_t SpeakerVolumeIsAvailable(bool& available) override { return 0; } + int32_t SetSpeakerVolume(uint32_t volume) override { return 0; } + int32_t SpeakerVolume(uint32_t& volume) const override { return 0; } + int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override { return 0; } + int32_t MinSpeakerVolume(uint32_t& minVolume) const override { return 0; } + + // Microphone volume controls + int32_t MicrophoneVolumeIsAvailable(bool& available) override { return 0; } + int32_t SetMicrophoneVolume(uint32_t volume) override { return 0; } + int32_t MicrophoneVolume(uint32_t& volume) const override { return 0; } + int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override { return 0; } + int32_t MinMicrophoneVolume(uint32_t& minVolume) const override { return 0; } + + // Speaker mute control + int32_t SpeakerMuteIsAvailable(bool& available) override { return 0; } + int32_t SetSpeakerMute(bool enable) override { return 0; } + int32_t SpeakerMute(bool& enabled) const override { return 0; } + + // Microphone mute control + int32_t MicrophoneMuteIsAvailable(bool& available) override { return 0; } + int32_t SetMicrophoneMute(bool enable) override { return 0; } + int32_t MicrophoneMute(bool& enabled) const override { return 0; } + + // Stereo support + int32_t StereoPlayoutIsAvailable(bool& available) override { + available = false; + return 0; + } + int32_t SetStereoPlayout(bool enable) override { return 0; } + int32_t StereoPlayout(bool& enabled) const override { return 0; } + int32_t StereoRecordingIsAvailable(bool& available) override { + available = false; + return 0; + } + int32_t SetStereoRecording(bool enable) override { return 0; } + int32_t StereoRecording(bool& enabled) const override { return 0; } + + // Delay information and control + int32_t PlayoutDelay(uint16_t& delayMS) const override { + delayMS = 0; + return 0; + } + + // Android only + bool BuiltInAECIsAvailable() const override { return false; } + bool BuiltInAGCIsAvailable() const override { return false; } + bool BuiltInNSIsAvailable() const override { return false; } + + // Windows Core Audio and Android only. + int32_t EnableBuiltInAEC(bool enable) override { return -1; } + int32_t EnableBuiltInAGC(bool enable) override { return -1; } + int32_t EnableBuiltInNS(bool enable) override { return -1; } + + // Play underrun count. + int32_t GetPlayoutUnderrunCount() const override { return -1; } + +// iOS only. +// TODO(henrika): add Android support. +#if defined(WEBRTC_IOS) + int GetPlayoutAudioParameters(AudioParameters* params) const override { + return -1; + } + int GetRecordAudioParameters(AudioParameters* params) const override { + return -1; + } +#endif // WEBRTC_IOS + + void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override; + + private: + void ProcessAudio(); + + TaskQueueFactory* const task_queue_factory_; + const std::unique_ptr capturer_ + RTC_GUARDED_BY(lock_); + const std::unique_ptr renderer_ + RTC_GUARDED_BY(lock_); + const int64_t process_interval_us_; + + mutable Mutex lock_; + AudioDeviceBuffer* audio_buffer_ RTC_GUARDED_BY(lock_) = nullptr; + bool rendering_ RTC_GUARDED_BY(lock_) = false; + bool capturing_ RTC_GUARDED_BY(lock_) = false; + bool rendering_initialized_ RTC_GUARDED_BY(lock_) = false; + bool capturing_initialized_ RTC_GUARDED_BY(lock_) = false; + + std::vector playout_buffer_ RTC_GUARDED_BY(lock_); + rtc::BufferT recording_buffer_ RTC_GUARDED_BY(lock_); + std::unique_ptr task_queue_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_DEVICE_TEST_AUDIO_DEVICE_IMPL_H_ diff --git a/third_party/libwebrtc/modules/audio_device/test_audio_device_impl_test.cc b/third_party/libwebrtc/modules/audio_device/test_audio_device_impl_test.cc new file mode 100644 index 000000000000..e81bb2f807c9 --- /dev/null +++ b/third_party/libwebrtc/modules/audio_device/test_audio_device_impl_test.cc @@ -0,0 +1,275 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/audio_device/test_audio_device_impl.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "modules/audio_device/audio_device_generic.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_device/include/audio_device_defines.h" +#include "modules/audio_device/include/test_audio_device.h" +#include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/time_controller/simulated_time_controller.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; + +constexpr Timestamp kStartTime = Timestamp::Millis(10000); + +class TestAudioTransport : public AudioTransport { + public: + enum class Mode { kPlaying, kRecording }; + + explicit TestAudioTransport(Mode mode) : mode_(mode) {} + ~TestAudioTransport() override = default; + + int32_t RecordedDataIsAvailable( + const void* audioSamples, + size_t samples_per_channel, + size_t bytes_per_sample, + size_t number_of_channels, + uint32_t samples_per_second, + uint32_t total_delay_ms, + int32_t clock_drift, + uint32_t current_mic_level, + bool key_pressed, + uint32_t& new_mic_level, + absl::optional estimated_capture_time_ns) override { + new_mic_level = 1; + + if (mode_ != Mode::kRecording) { + EXPECT_TRUE(false) << "RecordedDataIsAvailable mustn't be called when " + "mode isn't kRecording"; + return -1; + } + + MutexLock lock(&mutex_); + samples_per_channel_.push_back(samples_per_channel); + number_of_channels_.push_back(number_of_channels); + bytes_per_sample_.push_back(bytes_per_sample); + samples_per_second_.push_back(samples_per_second); + return 0; + } + + int32_t NeedMorePlayData(size_t samples_per_channel, + size_t bytes_per_sample, + size_t number_of_channels, + uint32_t samples_per_second, + void* audio_samples, + size_t& samples_out, + int64_t* elapsed_time_ms, + int64_t* ntp_time_ms) override { + const size_t num_bytes = samples_per_channel * number_of_channels; + std::memset(audio_samples, 1, num_bytes); + samples_out = samples_per_channel * number_of_channels; + *elapsed_time_ms = 0; + *ntp_time_ms = 0; + + if (mode_ != Mode::kPlaying) { + EXPECT_TRUE(false) + << "NeedMorePlayData mustn't be called when mode isn't kPlaying"; + return -1; + } + + MutexLock lock(&mutex_); + samples_per_channel_.push_back(samples_per_channel); + number_of_channels_.push_back(number_of_channels); + bytes_per_sample_.push_back(bytes_per_sample); + samples_per_second_.push_back(samples_per_second); + return 0; + } + + int32_t RecordedDataIsAvailable(const void* audio_samples, + size_t samples_per_channel, + size_t bytes_per_sample, + size_t number_of_channels, + uint32_t samples_per_second, + uint32_t total_delay_ms, + int32_t clockDrift, + uint32_t current_mic_level, + bool key_pressed, + uint32_t& new_mic_level) override { + RTC_CHECK(false) << "This methods should be never executed"; + } + + void PullRenderData(int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* audio_data, + int64_t* elapsed_time_ms, + int64_t* ntp_time_ms) override { + RTC_CHECK(false) << "This methods should be never executed"; + } + + std::vector samples_per_channel() const { + MutexLock lock(&mutex_); + return samples_per_channel_; + } + std::vector number_of_channels() const { + MutexLock lock(&mutex_); + return number_of_channels_; + } + std::vector bytes_per_sample() const { + MutexLock lock(&mutex_); + return bytes_per_sample_; + } + std::vector samples_per_second() const { + MutexLock lock(&mutex_); + return samples_per_second_; + } + + private: + const Mode mode_; + + mutable Mutex mutex_; + std::vector samples_per_channel_ RTC_GUARDED_BY(mutex_); + std::vector number_of_channels_ RTC_GUARDED_BY(mutex_); + std::vector bytes_per_sample_ RTC_GUARDED_BY(mutex_); + std::vector samples_per_second_ RTC_GUARDED_BY(mutex_); +}; + +TEST(TestAudioDeviceTest, EnablingRecordingProducesAudio) { + GlobalSimulatedTimeController time_controller(kStartTime); + TestAudioTransport audio_transport(TestAudioTransport::Mode::kRecording); + AudioDeviceBuffer audio_buffer(time_controller.GetTaskQueueFactory()); + ASSERT_EQ(audio_buffer.RegisterAudioCallback(&audio_transport), 0); + std::unique_ptr capturer = + TestAudioDeviceModule::CreatePulsedNoiseCapturer( + /*max_amplitude=*/1000, + /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); + + TestAudioDevice audio_device(time_controller.GetTaskQueueFactory(), + std::move(capturer), + /*renderer=*/nullptr); + ASSERT_EQ(audio_device.Init(), AudioDeviceGeneric::InitStatus::OK); + audio_device.AttachAudioBuffer(&audio_buffer); + + EXPECT_FALSE(audio_device.RecordingIsInitialized()); + ASSERT_EQ(audio_device.InitRecording(), 0); + EXPECT_TRUE(audio_device.RecordingIsInitialized()); + audio_buffer.StartRecording(); + ASSERT_EQ(audio_device.StartRecording(), 0); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_TRUE(audio_device.Recording()); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_EQ(audio_device.StopRecording(), 0); + audio_buffer.StopRecording(); + + EXPECT_THAT(audio_transport.samples_per_channel(), + ElementsAre(480, 480, 480)); + EXPECT_THAT(audio_transport.number_of_channels(), ElementsAre(2, 2, 2)); + EXPECT_THAT(audio_transport.bytes_per_sample(), ElementsAre(4, 4, 4)); + EXPECT_THAT(audio_transport.samples_per_second(), + ElementsAre(48000, 48000, 48000)); +} + +TEST(TestAudioDeviceTest, RecordingIsAvailableWhenCapturerIsSet) { + GlobalSimulatedTimeController time_controller(kStartTime); + std::unique_ptr capturer = + TestAudioDeviceModule::CreatePulsedNoiseCapturer( + /*max_amplitude=*/1000, + /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); + + TestAudioDevice audio_device(time_controller.GetTaskQueueFactory(), + std::move(capturer), + /*renderer=*/nullptr); + ASSERT_EQ(audio_device.Init(), AudioDeviceGeneric::InitStatus::OK); + + bool available; + EXPECT_EQ(audio_device.RecordingIsAvailable(available), 0); + EXPECT_TRUE(available); +} + +TEST(TestAudioDeviceTest, RecordingIsNotAvailableWhenCapturerIsNotSet) { + GlobalSimulatedTimeController time_controller(kStartTime); + TestAudioDevice audio_device(time_controller.GetTaskQueueFactory(), + /*capturer=*/nullptr, + /*renderer=*/nullptr); + ASSERT_EQ(audio_device.Init(), AudioDeviceGeneric::InitStatus::OK); + + bool available; + EXPECT_EQ(audio_device.RecordingIsAvailable(available), 0); + EXPECT_FALSE(available); +} + +TEST(TestAudioDeviceTest, EnablingPlayoutProducesAudio) { + GlobalSimulatedTimeController time_controller(kStartTime); + TestAudioTransport audio_transport(TestAudioTransport::Mode::kPlaying); + AudioDeviceBuffer audio_buffer(time_controller.GetTaskQueueFactory()); + ASSERT_EQ(audio_buffer.RegisterAudioCallback(&audio_transport), 0); + std::unique_ptr renderer = + TestAudioDeviceModule::CreateDiscardRenderer( + /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); + + TestAudioDevice audio_device(time_controller.GetTaskQueueFactory(), + /*capturer=*/nullptr, std::move(renderer)); + ASSERT_EQ(audio_device.Init(), AudioDeviceGeneric::InitStatus::OK); + audio_device.AttachAudioBuffer(&audio_buffer); + + EXPECT_FALSE(audio_device.PlayoutIsInitialized()); + ASSERT_EQ(audio_device.InitPlayout(), 0); + EXPECT_TRUE(audio_device.PlayoutIsInitialized()); + audio_buffer.StartPlayout(); + ASSERT_EQ(audio_device.StartPlayout(), 0); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_TRUE(audio_device.Playing()); + time_controller.AdvanceTime(TimeDelta::Millis(10)); + ASSERT_EQ(audio_device.StopPlayout(), 0); + audio_buffer.StopPlayout(); + + EXPECT_THAT(audio_transport.samples_per_channel(), + ElementsAre(480, 480, 480)); + EXPECT_THAT(audio_transport.number_of_channels(), ElementsAre(2, 2, 2)); + EXPECT_THAT(audio_transport.bytes_per_sample(), ElementsAre(4, 4, 4)); + EXPECT_THAT(audio_transport.samples_per_second(), + ElementsAre(48000, 48000, 48000)); +} + +TEST(TestAudioDeviceTest, PlayoutIsAvailableWhenRendererIsSet) { + GlobalSimulatedTimeController time_controller(kStartTime); + std::unique_ptr renderer = + TestAudioDeviceModule::CreateDiscardRenderer( + /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); + + TestAudioDevice audio_device(time_controller.GetTaskQueueFactory(), + /*capturer=*/nullptr, std::move(renderer)); + ASSERT_EQ(audio_device.Init(), AudioDeviceGeneric::InitStatus::OK); + + bool available; + EXPECT_EQ(audio_device.PlayoutIsAvailable(available), 0); + EXPECT_TRUE(available); +} + +TEST(TestAudioDeviceTest, PlayoutIsNotAvailableWhenRendererIsNotSet) { + GlobalSimulatedTimeController time_controller(kStartTime); + TestAudioDevice audio_device(time_controller.GetTaskQueueFactory(), + /*capturer=*/nullptr, + /*renderer=*/nullptr); + ASSERT_EQ(audio_device.Init(), AudioDeviceGeneric::InitStatus::OK); + + bool available; + EXPECT_EQ(audio_device.PlayoutIsAvailable(available), 0); + EXPECT_FALSE(available); +} + +} // namespace +} // namespace webrtc diff --git a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.cc b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.cc index a1247d733801..faa2b1e1eeb3 100644 --- a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.cc +++ b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.cc @@ -27,11 +27,8 @@ namespace webrtc { struct AudioMixerImpl::SourceStatus { - SourceStatus(Source* audio_source, bool is_mixed, float gain) - : audio_source(audio_source), is_mixed(is_mixed), gain(gain) {} + explicit SourceStatus(Source* audio_source) : audio_source(audio_source) {} Source* audio_source = nullptr; - bool is_mixed = false; - float gain = 0.0f; // A frame that will be passed to audio_source->GetAudioFrameWithInfo. AudioFrame audio_frame; @@ -39,74 +36,6 @@ struct AudioMixerImpl::SourceStatus { namespace { -class SourceFrame { - public: - // Default constructor required by call to `vector::resize()` below. - SourceFrame() = default; - - SourceFrame(AudioMixerImpl::SourceStatus* source_status, - AudioFrame* audio_frame, - bool muted) - : SourceFrame(source_status, - audio_frame, - muted, - muted ? 0u : AudioMixerCalculateEnergy(*audio_frame)) {} - - SourceFrame(AudioMixerImpl::SourceStatus* source_status, - AudioFrame* audio_frame, - bool muted, - uint32_t energy) - : source_status_(source_status), - audio_frame_(audio_frame), - muted_(muted), - energy_(energy) { - RTC_DCHECK(source_status); - RTC_DCHECK(audio_frame_); - } - - AudioMixerImpl::SourceStatus* source_status() { return source_status_; } - const AudioFrame* audio_frame() const { return audio_frame_; } - AudioFrame* mutable_audio_frame() { return audio_frame_; } - bool muted() const { return muted_; } - uint32_t energy() const { return energy_; } - - private: - // The below values are never changed directly, hence only accessors are - // offered. The values can change though via implicit assignment when sorting - // vectors. Pointer values will be nullptr when default constructed as a - // result of calling `vector::resize()`. - AudioMixerImpl::SourceStatus* source_status_ = nullptr; - AudioFrame* audio_frame_ = nullptr; - bool muted_ = true; - uint32_t energy_ = 0u; -}; - -// ShouldMixBefore(a, b) is used to select mixer sources. -// Returns true if `a` is preferred over `b` as a source to be mixed. -bool ShouldMixBefore(const SourceFrame& a, const SourceFrame& b) { - if (a.muted() != b.muted()) { - return b.muted(); - } - - const auto a_activity = a.audio_frame()->vad_activity_; - const auto b_activity = b.audio_frame()->vad_activity_; - - if (a_activity != b_activity) { - return a_activity == AudioFrame::kVadActive; - } - - return a.energy() > b.energy(); -} - -void RampAndUpdateGain(rtc::ArrayView mixed_sources_and_frames) { - for (auto& source_frame : mixed_sources_and_frames) { - float target_gain = source_frame.source_status()->is_mixed ? 1.0f : 0.0f; - Ramp(source_frame.source_status()->gain, target_gain, - source_frame.mutable_audio_frame()); - source_frame.source_status()->gain = target_gain; - } -} - std::vector>::const_iterator FindSourceInList( AudioMixerImpl::Source const* audio_source, @@ -123,46 +52,34 @@ FindSourceInList( struct AudioMixerImpl::HelperContainers { void resize(size_t size) { audio_to_mix.resize(size); - audio_source_mixing_data_list.resize(size); - ramp_list.resize(size); preferred_rates.resize(size); } std::vector audio_to_mix; - std::vector audio_source_mixing_data_list; - std::vector ramp_list; std::vector preferred_rates; }; AudioMixerImpl::AudioMixerImpl( std::unique_ptr output_rate_calculator, - bool use_limiter, - int max_sources_to_mix) - : max_sources_to_mix_(max_sources_to_mix), - output_rate_calculator_(std::move(output_rate_calculator)), + bool use_limiter) + : output_rate_calculator_(std::move(output_rate_calculator)), audio_source_list_(), helper_containers_(std::make_unique()), - frame_combiner_(use_limiter) { - RTC_CHECK_GE(max_sources_to_mix, 1) << "At least one source must be mixed"; - audio_source_list_.reserve(max_sources_to_mix); - helper_containers_->resize(max_sources_to_mix); -} + frame_combiner_(use_limiter) {} AudioMixerImpl::~AudioMixerImpl() {} -rtc::scoped_refptr AudioMixerImpl::Create( - int max_sources_to_mix) { +rtc::scoped_refptr AudioMixerImpl::Create() { return Create(std::unique_ptr( new DefaultOutputRateCalculator()), - /*use_limiter=*/true, max_sources_to_mix); + /*use_limiter=*/true); } rtc::scoped_refptr AudioMixerImpl::Create( std::unique_ptr output_rate_calculator, - bool use_limiter, - int max_sources_to_mix) { + bool use_limiter) { return rtc::make_ref_counted( - std::move(output_rate_calculator), use_limiter, max_sources_to_mix); + std::move(output_rate_calculator), use_limiter); } void AudioMixerImpl::Mix(size_t number_of_channels, @@ -194,7 +111,7 @@ bool AudioMixerImpl::AddSource(Source* audio_source) { RTC_DCHECK(FindSourceInList(audio_source, &audio_source_list_) == audio_source_list_.end()) << "Source already added to mixer"; - audio_source_list_.emplace_back(new SourceStatus(audio_source, false, 0)); + audio_source_list_.emplace_back(new SourceStatus(audio_source)); helper_containers_->resize(audio_source_list_.size()); UpdateSourceCountStats(); return true; @@ -210,72 +127,27 @@ void AudioMixerImpl::RemoveSource(Source* audio_source) { rtc::ArrayView AudioMixerImpl::GetAudioFromSources( int output_frequency) { - // Get audio from the audio sources and put it in the SourceFrame vector. - int audio_source_mixing_data_count = 0; + int audio_to_mix_count = 0; for (auto& source_and_status : audio_source_list_) { const auto audio_frame_info = source_and_status->audio_source->GetAudioFrameWithInfo( output_frequency, &source_and_status->audio_frame); - - if (audio_frame_info == Source::AudioFrameInfo::kError) { - RTC_LOG_F(LS_WARNING) << "failed to GetAudioFrameWithInfo() from source"; - continue; + switch (audio_frame_info) { + case Source::AudioFrameInfo::kError: + RTC_LOG_F(LS_WARNING) + << "failed to GetAudioFrameWithInfo() from source"; + break; + case Source::AudioFrameInfo::kMuted: + break; + case Source::AudioFrameInfo::kNormal: + helper_containers_->audio_to_mix[audio_to_mix_count++] = + &source_and_status->audio_frame; } - helper_containers_ - ->audio_source_mixing_data_list[audio_source_mixing_data_count++] = - SourceFrame(source_and_status.get(), &source_and_status->audio_frame, - audio_frame_info == Source::AudioFrameInfo::kMuted); } - rtc::ArrayView audio_source_mixing_data_view( - helper_containers_->audio_source_mixing_data_list.data(), - audio_source_mixing_data_count); - - // Sort frames by sorting function. - std::sort(audio_source_mixing_data_view.begin(), - audio_source_mixing_data_view.end(), ShouldMixBefore); - - int max_audio_frame_counter = max_sources_to_mix_; - int ramp_list_length = 0; - int audio_to_mix_count = 0; - // Go through list in order and put unmuted frames in result list. - for (auto& p : audio_source_mixing_data_view) { - // Filter muted. - if (p.muted()) { - p.source_status()->is_mixed = false; - continue; - } - - // Add frame to result vector for mixing. - bool is_mixed = false; - if (max_audio_frame_counter > 0) { - --max_audio_frame_counter; - helper_containers_->audio_to_mix[audio_to_mix_count++] = - p.mutable_audio_frame(); - helper_containers_->ramp_list[ramp_list_length++] = - SourceFrame(p.source_status(), p.mutable_audio_frame(), false, -1); - is_mixed = true; - } - p.source_status()->is_mixed = is_mixed; - } - RampAndUpdateGain(rtc::ArrayView( - helper_containers_->ramp_list.data(), ramp_list_length)); return rtc::ArrayView( helper_containers_->audio_to_mix.data(), audio_to_mix_count); } -bool AudioMixerImpl::GetAudioSourceMixabilityStatusForTest( - AudioMixerImpl::Source* audio_source) const { - MutexLock lock(&mutex_); - - const auto iter = FindSourceInList(audio_source, &audio_source_list_); - if (iter != audio_source_list_.end()) { - return (*iter)->is_mixed; - } - - RTC_LOG(LS_ERROR) << "Audio source unknown"; - return false; -} - void AudioMixerImpl::UpdateSourceCountStats() { size_t current_source_count = audio_source_list_.size(); // Log to the histogram whenever the maximum number of sources increases. diff --git a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.h b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.h index e1040defd793..831948701800 100644 --- a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.h +++ b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl.h @@ -35,15 +35,11 @@ class AudioMixerImpl : public AudioMixer { // AudioProcessing only accepts 10 ms frames. static const int kFrameDurationInMs = 10; - static const int kDefaultNumberOfMixedAudioSources = 3; - - static rtc::scoped_refptr Create( - int max_sources_to_mix = kDefaultNumberOfMixedAudioSources); + static rtc::scoped_refptr Create(); static rtc::scoped_refptr Create( std::unique_ptr output_rate_calculator, - bool use_limiter, - int max_sources_to_mix = kDefaultNumberOfMixedAudioSources); + bool use_limiter); ~AudioMixerImpl() override; @@ -58,24 +54,16 @@ class AudioMixerImpl : public AudioMixer { AudioFrame* audio_frame_for_mixing) override RTC_LOCKS_EXCLUDED(mutex_); - // Returns true if the source was mixed last round. Returns - // false and logs an error if the source was never added to the - // mixer. - bool GetAudioSourceMixabilityStatusForTest(Source* audio_source) const; - protected: AudioMixerImpl(std::unique_ptr output_rate_calculator, - bool use_limiter, - int max_sources_to_mix); + bool use_limiter); private: struct HelperContainers; void UpdateSourceCountStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // Compute what audio sources to mix from audio_source_list_. Ramp - // in and out. Update mixed status. Mixes up to - // kMaximumAmountOfMixedAudioSources audio sources. + // Fetches audio frames to mix from sources. rtc::ArrayView GetAudioFromSources(int output_frequency) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -84,8 +72,6 @@ class AudioMixerImpl : public AudioMixer { // checks that mixing is done sequentially. mutable Mutex mutex_; - const int max_sources_to_mix_; - std::unique_ptr output_rate_calculator_; // List of all audio sources. diff --git a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc index 8022332b2780..641c96657026 100644 --- a/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc +++ b/third_party/libwebrtc/modules/audio_mixer/audio_mixer_impl_unittest.cc @@ -129,39 +129,6 @@ class CustomRateCalculator : public OutputRateCalculator { const int rate_; }; -// Creates participants from `frames` and `frame_info` and adds them -// to the mixer. Compares mixed status with `expected_status` -void MixAndCompare( - const std::vector& frames, - const std::vector& frame_info, - const std::vector& expected_status) { - const size_t num_audio_sources = frames.size(); - RTC_DCHECK(frames.size() == frame_info.size()); - RTC_DCHECK(frame_info.size() == expected_status.size()); - - const auto mixer = AudioMixerImpl::Create(); - std::vector participants(num_audio_sources); - - for (size_t i = 0; i < num_audio_sources; ++i) { - participants[i].fake_frame()->CopyFrom(frames[i]); - participants[i].set_fake_info(frame_info[i]); - } - - for (size_t i = 0; i < num_audio_sources; ++i) { - EXPECT_TRUE(mixer->AddSource(&participants[i])); - EXPECT_CALL(participants[i], GetAudioFrameWithInfo(kDefaultSampleRateHz, _)) - .Times(Exactly(1)); - } - - mixer->Mix(1, &frame_for_mixing); - - for (size_t i = 0; i < num_audio_sources; ++i) { - EXPECT_EQ(expected_status[i], - mixer->GetAudioSourceMixabilityStatusForTest(&participants[i])) - << "Mixed status of AudioSource #" << i << " wrong."; - } -} - void MixMonoAtGivenNativeRate(int native_sample_rate, AudioFrame* mix_frame, rtc::scoped_refptr mixer, @@ -174,49 +141,6 @@ void MixMonoAtGivenNativeRate(int native_sample_rate, mixer->Mix(1, mix_frame); } -TEST(AudioMixer, LargestEnergyVadActiveMixed) { - constexpr int kAudioSources = - AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 3; - - const auto mixer = AudioMixerImpl::Create(); - - MockMixerAudioSource participants[kAudioSources]; - - for (int i = 0; i < kAudioSources; ++i) { - ResetFrame(participants[i].fake_frame()); - - // We set the 80-th sample value since the first 80 samples may be - // modified by a ramped-in window. - participants[i].fake_frame()->mutable_data()[80] = i; - - EXPECT_TRUE(mixer->AddSource(&participants[i])); - EXPECT_CALL(participants[i], GetAudioFrameWithInfo(_, _)).Times(Exactly(1)); - } - - // Last participant gives audio frame with passive VAD, although it has the - // largest energy. - participants[kAudioSources - 1].fake_frame()->vad_activity_ = - AudioFrame::kVadPassive; - - AudioFrame audio_frame; - mixer->Mix(1, // number of channels - &audio_frame); - - for (int i = 0; i < kAudioSources; ++i) { - bool is_mixed = - mixer->GetAudioSourceMixabilityStatusForTest(&participants[i]); - if (i == kAudioSources - 1 || - i < kAudioSources - 1 - - AudioMixerImpl::kDefaultNumberOfMixedAudioSources) { - EXPECT_FALSE(is_mixed) - << "Mixing status of AudioSource #" << i << " wrong."; - } else { - EXPECT_TRUE(is_mixed) - << "Mixing status of AudioSource #" << i << " wrong."; - } - } -} - TEST(AudioMixer, UpdatesSourceCountHistogram) { constexpr int kAudioSourcesGroup1 = 5; constexpr int kAudioSourcesGroup2 = 3; @@ -369,59 +293,6 @@ TEST(AudioMixer, ParticipantNumberOfChannels) { } } -// Maximal amount of participants are mixed one iteration, then -// another participant with higher energy is added. -TEST(AudioMixer, RampedOutSourcesShouldNotBeMarkedMixed) { - constexpr int kAudioSources = - AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1; - - const auto mixer = AudioMixerImpl::Create(); - MockMixerAudioSource participants[kAudioSources]; - - for (int i = 0; i < kAudioSources; ++i) { - ResetFrame(participants[i].fake_frame()); - // Set the participant audio energy to increase with the index - // `i`. - participants[i].fake_frame()->mutable_data()[0] = 100 * i; - } - - // Add all participants but the loudest for mixing. - for (int i = 0; i < kAudioSources - 1; ++i) { - EXPECT_TRUE(mixer->AddSource(&participants[i])); - EXPECT_CALL(participants[i], GetAudioFrameWithInfo(kDefaultSampleRateHz, _)) - .Times(Exactly(1)); - } - - // First mixer iteration - mixer->Mix(1, &frame_for_mixing); - - // All participants but the loudest should have been mixed. - for (int i = 0; i < kAudioSources - 1; ++i) { - EXPECT_TRUE(mixer->GetAudioSourceMixabilityStatusForTest(&participants[i])) - << "Mixed status of AudioSource #" << i << " wrong."; - } - - // Add new participant with higher energy. - EXPECT_TRUE(mixer->AddSource(&participants[kAudioSources - 1])); - for (int i = 0; i < kAudioSources; ++i) { - EXPECT_CALL(participants[i], GetAudioFrameWithInfo(kDefaultSampleRateHz, _)) - .Times(Exactly(1)); - } - - mixer->Mix(1, &frame_for_mixing); - - // The most quiet participant should not have been mixed. - EXPECT_FALSE(mixer->GetAudioSourceMixabilityStatusForTest(&participants[0])) - << "Mixed status of AudioSource #0 wrong."; - - // The loudest participants should have been mixed. - for (int i = 1; i < kAudioSources; ++i) { - EXPECT_EQ(true, - mixer->GetAudioSourceMixabilityStatusForTest(&participants[i])) - << "Mixed status of AudioSource #" << i << " wrong."; - } -} - // This test checks that the initialization and participant addition // can be done on a different thread. TEST(AudioMixer, ConstructFromOtherThread) { @@ -446,127 +317,6 @@ TEST(AudioMixer, ConstructFromOtherThread) { mixer->Mix(1, &frame_for_mixing); } -TEST(AudioMixer, MutedShouldMixAfterUnmuted) { - constexpr int kAudioSources = - AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1; - - std::vector frames(kAudioSources); - for (auto& frame : frames) { - ResetFrame(&frame); - } - - std::vector frame_info( - kAudioSources, AudioMixer::Source::AudioFrameInfo::kNormal); - frame_info[0] = AudioMixer::Source::AudioFrameInfo::kMuted; - std::vector expected_status(kAudioSources, true); - expected_status[0] = false; - - MixAndCompare(frames, frame_info, expected_status); -} - -TEST(AudioMixer, PassiveShouldMixAfterNormal) { - constexpr int kAudioSources = - AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1; - - std::vector frames(kAudioSources); - for (auto& frame : frames) { - ResetFrame(&frame); - } - - std::vector frame_info( - kAudioSources, AudioMixer::Source::AudioFrameInfo::kNormal); - frames[0].vad_activity_ = AudioFrame::kVadPassive; - std::vector expected_status(kAudioSources, true); - expected_status[0] = false; - - MixAndCompare(frames, frame_info, expected_status); -} - -TEST(AudioMixer, ActiveShouldMixBeforeLoud) { - constexpr int kAudioSources = - AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1; - - std::vector frames(kAudioSources); - for (auto& frame : frames) { - ResetFrame(&frame); - } - - std::vector frame_info( - kAudioSources, AudioMixer::Source::AudioFrameInfo::kNormal); - frames[0].vad_activity_ = AudioFrame::kVadPassive; - int16_t* frame_data = frames[0].mutable_data(); - std::fill(frame_data, frame_data + kDefaultSampleRateHz / 100, - std::numeric_limits::max()); - std::vector expected_status(kAudioSources, true); - expected_status[0] = false; - - MixAndCompare(frames, frame_info, expected_status); -} - -TEST(AudioMixer, ShouldMixUpToSpecifiedNumberOfSourcesToMix) { - constexpr int kAudioSources = 5; - constexpr int kSourcesToMix = 2; - - std::vector frames(kAudioSources); - for (auto& frame : frames) { - ResetFrame(&frame); - } - - std::vector frame_info( - kAudioSources, AudioMixer::Source::AudioFrameInfo::kNormal); - // Set up to kSourceToMix sources with kVadActive so that they're mixed. - const std::vector kVadActivities = { - AudioFrame::kVadUnknown, AudioFrame::kVadPassive, AudioFrame::kVadPassive, - AudioFrame::kVadActive, AudioFrame::kVadActive}; - // Populate VAD and frame for all sources. - for (int i = 0; i < kAudioSources; i++) { - frames[i].vad_activity_ = kVadActivities[i]; - } - - std::vector participants(kAudioSources); - for (int i = 0; i < kAudioSources; ++i) { - participants[i].fake_frame()->CopyFrom(frames[i]); - participants[i].set_fake_info(frame_info[i]); - } - - const auto mixer = AudioMixerImpl::Create(kSourcesToMix); - for (int i = 0; i < kAudioSources; ++i) { - EXPECT_TRUE(mixer->AddSource(&participants[i])); - EXPECT_CALL(participants[i], GetAudioFrameWithInfo(kDefaultSampleRateHz, _)) - .Times(Exactly(1)); - } - - mixer->Mix(1, &frame_for_mixing); - - std::vector expected_status = {false, false, false, true, true}; - for (int i = 0; i < kAudioSources; ++i) { - EXPECT_EQ(expected_status[i], - mixer->GetAudioSourceMixabilityStatusForTest(&participants[i])) - << "Wrong mix status for source #" << i << " is wrong"; - } -} - -TEST(AudioMixer, UnmutedShouldMixBeforeLoud) { - constexpr int kAudioSources = - AudioMixerImpl::kDefaultNumberOfMixedAudioSources + 1; - - std::vector frames(kAudioSources); - for (auto& frame : frames) { - ResetFrame(&frame); - } - - std::vector frame_info( - kAudioSources, AudioMixer::Source::AudioFrameInfo::kNormal); - frame_info[0] = AudioMixer::Source::AudioFrameInfo::kMuted; - int16_t* frame_data = frames[0].mutable_data(); - std::fill(frame_data, frame_data + kDefaultSampleRateHz / 100, - std::numeric_limits::max()); - std::vector expected_status(kAudioSources, true); - expected_status[0] = false; - - MixAndCompare(frames, frame_info, expected_status); -} - TEST(AudioMixer, MixingRateShouldBeDecidedByRateCalculator) { constexpr int kOutputRate = 22000; const auto mixer = @@ -728,55 +478,6 @@ TEST(AudioMixer, ShouldIncludeRtpPacketInfoFromAllMixedSources) { EXPECT_THAT(frame_for_mixing.packet_infos_, UnorderedElementsAre(p0, p1, p2)); } -TEST(AudioMixer, MixerShouldIncludeRtpPacketInfoFromMixedSourcesOnly) { - const uint32_t kSsrc0 = 10; - const uint32_t kSsrc1 = 11; - const uint32_t kSsrc2 = 21; - const uint32_t kCsrc0 = 30; - const uint32_t kCsrc1 = 31; - const uint32_t kCsrc2 = 32; - const uint32_t kCsrc3 = 33; - const int kAudioLevel0 = 10; - const absl::optional kAudioLevelMissing = absl::nullopt; - const uint32_t kRtpTimestamp0 = 300; - const uint32_t kRtpTimestamp1 = 400; - const Timestamp kReceiveTime0 = Timestamp::Millis(10); - const Timestamp kReceiveTime1 = Timestamp::Millis(20); - - RtpPacketInfo p0(kSsrc0, {kCsrc0, kCsrc1}, kRtpTimestamp0, kReceiveTime0); - p0.set_audio_level(kAudioLevel0); - RtpPacketInfo p1(kSsrc1, {kCsrc2}, kRtpTimestamp1, kReceiveTime1); - p1.set_audio_level(kAudioLevelMissing); - RtpPacketInfo p2(kSsrc2, {kCsrc3}, kRtpTimestamp1, kReceiveTime1); - p2.set_audio_level(kAudioLevelMissing); - - const auto mixer = AudioMixerImpl::Create(/*max_sources_to_mix=*/2); - - MockMixerAudioSource source1; - source1.set_packet_infos(RtpPacketInfos({p0})); - mixer->AddSource(&source1); - ResetFrame(source1.fake_frame()); - mixer->Mix(1, &frame_for_mixing); - - MockMixerAudioSource source2; - source2.set_packet_infos(RtpPacketInfos({p1})); - ResetFrame(source2.fake_frame()); - mixer->AddSource(&source2); - - // The mixer prioritizes kVadActive over kVadPassive. - // We limit the number of sources to mix to 2 and set the third source's VAD - // activity to kVadPassive so that it will not be added to the mix. - MockMixerAudioSource source3; - source3.set_packet_infos(RtpPacketInfos({p2})); - ResetFrame(source3.fake_frame()); - source3.fake_frame()->vad_activity_ = AudioFrame::kVadPassive; - mixer->AddSource(&source3); - - mixer->Mix(/*number_of_channels=*/1, &frame_for_mixing); - - EXPECT_THAT(frame_for_mixing.packet_infos_, UnorderedElementsAre(p0, p1)); -} - class HighOutputRateCalculator : public OutputRateCalculator { public: static const int kDefaultFrequency = 76000; diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build index 003b6457cdcb..f4a8bed8153e 100644 --- a/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/aec_dump_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build index 7c41104199e4..60cb54230758 100644 --- a/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/aec_dump/null_aec_dump_factory_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build index 3e6e8171e9d6..f970052fd188 100644 --- a/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/aec_dump_interface_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build index 3e808b4bc055..6e75e51801e5 100644 --- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build index e88194c51d8d..da4c7723a0a7 100644 --- a/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/api_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build b/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build index 58380fe34bf3..594ed5335c0d 100644 --- a/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build +++ b/third_party/libwebrtc/modules/audio_processing/audio_frame_proxies_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/audio_processing/vad/vad_audio_proc.h b/third_party/libwebrtc/modules/audio_processing/vad/vad_audio_proc.h index cbdd7071296c..905c687e0604 100644 --- a/third_party/libwebrtc/modules/audio_processing/vad/vad_audio_proc.h +++ b/third_party/libwebrtc/modules/audio_processing/vad/vad_audio_proc.h @@ -51,21 +51,19 @@ class VadAudioProc { // For every 30 ms we compute 3 spectral peak there for 3 LPC analysis. // LPC is computed over 15 ms of windowed audio. For every 10 ms sub-frame // we need 5 ms of past signal to create the input of LPC analysis. - static constexpr size_t kNumPastSignalSamples = - static_cast(kSampleRateHz / 200); + static constexpr size_t kNumPastSignalSamples = size_t{kSampleRateHz / 200}; // TODO(turajs): maybe defining this at a higher level (maybe enum) so that // all the code recognize it as "no-error." static constexpr int kNoError = 0; static constexpr size_t kNum10msSubframes = 3; - static constexpr size_t kNumSubframeSamples = - static_cast(kSampleRateHz / 100); + static constexpr size_t kNumSubframeSamples = size_t{kSampleRateHz / 100}; // Samples in 30 ms @ given sampling rate. static constexpr size_t kNumSamplesToProcess = - size_t{kNum10msSubframes} * kNumSubframeSamples; + kNum10msSubframes * kNumSubframeSamples; static constexpr size_t kBufferLength = - size_t{kNumPastSignalSamples} + kNumSamplesToProcess; + kNumPastSignalSamples + kNumSamplesToProcess; static constexpr size_t kIpLength = kDftSize >> 1; static constexpr size_t kWLength = kDftSize >> 1; static constexpr size_t kLpcOrder = 16; diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build index e889e0aa39c2..75b3630fd62a 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/alr_detector_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build index cbc5a4acac4d..43b6e64293be 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/loss_based_bwe_v1_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build index 12f16357dc0a..bc084b516a39 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/probe_controller_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build b/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build index a7da1d6e698b..8d0a5ea0759d 100644 --- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build +++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/pushback_controller_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc b/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc index d4c745b5465c..e0426788971b 100644 --- a/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/third_party/libwebrtc/modules/congestion_controller/receive_side_congestion_controller.cc @@ -98,11 +98,8 @@ void ReceiveSideCongestionController::OnReceivedPacket( } else { // Receive-side BWE. MutexLock lock(&mutex_); - RTPHeader header; - packet.GetHeader(&header); PickEstimator(packet.HasExtension()); - rbe_->IncomingPacket(packet.arrival_time().ms(), - packet.payload_size() + packet.padding_size(), header); + rbe_->IncomingPacket(packet); } } diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h index 775531081ba5..ee823d296f55 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capture_options.h @@ -159,15 +159,24 @@ class RTC_EXPORT DesktopCaptureOptions { } #if defined(RTC_ENABLE_WIN_WGC) - // This flag enables the WGC capturer for both window and screen capture. + // This flag enables the WGC capturer for capturing the screen. // This capturer should offer similar or better performance than the cropping // capturer without the disadvantages listed above. However, the WGC capturer // is only available on Windows 10 version 1809 (Redstone 5) and up. This flag // will have no affect on older versions. // If set, and running a supported version of Win10, this flag will take // precedence over the cropping, directx, and magnification flags. - bool allow_wgc_capturer() const { return allow_wgc_capturer_; } - void set_allow_wgc_capturer(bool allow) { allow_wgc_capturer_ = allow; } + bool allow_wgc_screen_capturer() const { return allow_wgc_screen_capturer_; } + void set_allow_wgc_screen_capturer(bool allow) { + allow_wgc_screen_capturer_ = allow; + } + + // This flag has the same effect as allow_wgc_screen_capturer but it only + // enables or disables WGC for window capturing (not screen). + bool allow_wgc_window_capturer() const { return allow_wgc_window_capturer_; } + void set_allow_wgc_window_capturer(bool allow) { + allow_wgc_window_capturer_ = allow; + } // This flag enables the WGC capturer for fallback capturer. // The flag is useful when the first capturer (eg. WindowCapturerWinGdi) is @@ -235,7 +244,8 @@ class RTC_EXPORT DesktopCaptureOptions { bool allow_directx_capturer_ = false; bool allow_cropping_window_capturer_ = false; #if defined(RTC_ENABLE_WIN_WGC) - bool allow_wgc_capturer_ = false; + bool allow_wgc_screen_capturer_ = false; + bool allow_wgc_window_capturer_ = false; bool allow_wgc_capturer_fallback_ = false; bool allow_wgc_zero_hertz_ = false; #endif diff --git a/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.cc b/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.cc index b99f5ecb803a..a52a76c2625c 100644 --- a/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.cc +++ b/third_party/libwebrtc/modules/desktop_capture/desktop_capturer.cc @@ -69,7 +69,8 @@ bool DesktopCapturer::IsOccluded(const DesktopVector& pos) { std::unique_ptr DesktopCapturer::CreateWindowCapturer( const DesktopCaptureOptions& options) { #if defined(RTC_ENABLE_WIN_WGC) - if (options.allow_wgc_capturer() && IsWgcSupported(CaptureType::kWindow)) { + if (options.allow_wgc_window_capturer() && + IsWgcSupported(CaptureType::kWindow)) { return WgcCapturerWin::CreateRawWindowCapturer(options); } #endif // defined(RTC_ENABLE_WIN_WGC) @@ -92,7 +93,8 @@ std::unique_ptr DesktopCapturer::CreateWindowCapturer( std::unique_ptr DesktopCapturer::CreateScreenCapturer( const DesktopCaptureOptions& options) { #if defined(RTC_ENABLE_WIN_WGC) - if (options.allow_wgc_capturer() && IsWgcSupported(CaptureType::kScreen)) { + if (options.allow_wgc_screen_capturer() && + IsWgcSupported(CaptureType::kScreen)) { return WgcCapturerWin::CreateRawScreenCapturer(options); } #endif // defined(RTC_ENABLE_WIN_WGC) diff --git a/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc b/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc index 19c4c735ad8b..b67632d1f933 100644 --- a/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc +++ b/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.cc @@ -56,18 +56,12 @@ size_t RoundUpToMultiple(size_t value, size_t multiple) { std::unique_ptr DesktopCapturer::CreateRawScreenCapturer( const DesktopCaptureOptions& options) { - if (ScreenCapturerFuchsia::CheckRequirements()) { - std::unique_ptr capturer( - new ScreenCapturerFuchsia()); - return capturer; - } - return nullptr; + std::unique_ptr capturer(new ScreenCapturerFuchsia()); + return capturer; } ScreenCapturerFuchsia::ScreenCapturerFuchsia() - : component_context_(sys::ComponentContext::Create()) { - RTC_DCHECK(CheckRequirements()); -} + : component_context_(sys::ComponentContext::Create()) {} ScreenCapturerFuchsia::~ScreenCapturerFuchsia() { // unmap virtual memory mapped pointers @@ -82,26 +76,6 @@ ScreenCapturerFuchsia::~ScreenCapturerFuchsia() { } } -// TODO(fxbug.dev/100303): Remove this function when Flatland is the only API. -bool ScreenCapturerFuchsia::CheckRequirements() { - std::unique_ptr component_context = - sys::ComponentContext::Create(); - fuchsia::ui::scenic::ScenicSyncPtr scenic; - zx_status_t status = component_context->svc()->Connect(scenic.NewRequest()); - if (status != ZX_OK) { - RTC_LOG(LS_ERROR) << "Failed to connect to Scenic: " << status; - return false; - } - - bool scenic_uses_flatland = false; - scenic->UsesFlatland(&scenic_uses_flatland); - if (!scenic_uses_flatland) { - RTC_LOG(LS_ERROR) << "Screen capture not supported without Flatland."; - } - - return scenic_uses_flatland; -} - void ScreenCapturerFuchsia::Start(Callback* callback) { RTC_DCHECK(!callback_); RTC_DCHECK(callback); diff --git a/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.h b/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.h index 444930963f55..6e0f87cc58dc 100644 --- a/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.h +++ b/third_party/libwebrtc/modules/desktop_capture/screen_capturer_fuchsia.h @@ -29,8 +29,6 @@ class ScreenCapturerFuchsia final : public DesktopCapturer { ScreenCapturerFuchsia(); ~ScreenCapturerFuchsia() override; - static bool CheckRequirements(); - // DesktopCapturer interface. void Start(Callback* callback) override; void CaptureFrame() override; diff --git a/third_party/libwebrtc/modules/pacing/pacing_controller.cc b/third_party/libwebrtc/modules/pacing/pacing_controller.cc index 495aaebffa61..74def9c5387c 100644 --- a/third_party/libwebrtc/modules/pacing/pacing_controller.cc +++ b/third_party/libwebrtc/modules/pacing/pacing_controller.cc @@ -291,9 +291,9 @@ TimeDelta PacingController::UpdateTimeAndGetElapsed(Timestamp now) { TimeDelta elapsed_time = now - last_process_time_; last_process_time_ = now; if (elapsed_time > kMaxElapsedTime) { - RTC_LOG(LS_WARNING) << "Elapsed time (" << elapsed_time.ms() - << " ms) longer than expected, limiting to " - << kMaxElapsedTime.ms(); + RTC_LOG(LS_WARNING) << "Elapsed time (" << ToLogString(elapsed_time) + << ") longer than expected, limiting to " + << ToLogString(kMaxElapsedTime); elapsed_time = kMaxElapsedTime; } return elapsed_time; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h index 0d4e15e9e15b..93a05540591a 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h @@ -13,15 +13,13 @@ #ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_REMOTE_BITRATE_ESTIMATOR_H_ #define MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_REMOTE_BITRATE_ESTIMATOR_H_ -#include -#include +#include #include #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "modules/include/module_common_types.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" namespace webrtc { @@ -45,12 +43,8 @@ class RemoteBitrateEstimator : public CallStatsObserver { // Called for each incoming packet. Updates the incoming payload bitrate // estimate and the over-use detector. If an over-use is detected the - // remote bitrate estimate will be updated. Note that `payload_size` is the - // packet size excluding headers. - // Note that `arrival_time_ms` can be of an arbitrary time base. - virtual void IncomingPacket(int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) = 0; + // remote bitrate estimate will be updated. + virtual void IncomingPacket(const RtpPacketReceived& rtp_packet) = 0; // Removes all data for `ssrc`. virtual void RemoveStream(uint32_t ssrc) = 0; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc index 135f5f34246d..b4df1d7a77a7 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc @@ -23,6 +23,8 @@ #include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/metrics.h" @@ -208,26 +210,19 @@ bool RemoteBitrateEstimatorAbsSendTime::IsBitrateImproving( } void RemoteBitrateEstimatorAbsSendTime::IncomingPacket( - int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) { - if (!header.extension.hasAbsoluteSendTime) { + const RtpPacketReceived& rtp_packet) { + uint32_t send_time_24bits; + if (!rtp_packet.GetExtension(&send_time_24bits)) { RTC_LOG(LS_WARNING) << "RemoteBitrateEstimatorAbsSendTimeImpl: Incoming packet " "is missing absolute send time extension!"; return; } - IncomingPacketInfo(Timestamp::Millis(arrival_time_ms), - header.extension.absoluteSendTime, - DataSize::Bytes(payload_size), header.ssrc); -} -void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( - Timestamp arrival_time, - uint32_t send_time_24bits, - DataSize payload_size, - uint32_t ssrc) { - RTC_CHECK(send_time_24bits < (1ul << 24)); + Timestamp arrival_time = rtp_packet.arrival_time(); + DataSize payload_size = + DataSize::Bytes(rtp_packet.payload_size() + rtp_packet.padding_size()); + if (!uma_recorded_) { RTC_HISTOGRAM_ENUMERATION(kBweTypeHistogram, BweNames::kReceiverAbsSendTime, BweNames::kBweNamesMax); @@ -270,7 +265,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( TimeoutStreams(now); RTC_DCHECK(inter_arrival_); RTC_DCHECK(estimator_); - ssrcs_.insert_or_assign(ssrc, now); + ssrcs_.insert_or_assign(rtp_packet.Ssrc(), now); // For now only try to detect probes while we don't have a valid estimate. // We currently assume that only packets larger than 200 bytes are paced by diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h index 7db2aea67df7..5924f2be8164 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h @@ -49,9 +49,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { ~RemoteBitrateEstimatorAbsSendTime() override; - void IncomingPacket(int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) override; + void IncomingPacket(const RtpPacketReceived& rtp_packet) override; TimeDelta Process() override; void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; void RemoveStream(uint32_t ssrc) override; @@ -89,11 +87,6 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { static void MaybeAddCluster(const Cluster& cluster_aggregate, std::list& clusters); - void IncomingPacketInfo(Timestamp arrival_time, - uint32_t send_time_24bits, - DataSize payload_size, - uint32_t ssrc); - std::list ComputeClusters() const; const Cluster* FindBestProbe(const std::list& clusters) const; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index 20326286fa30..070b2fc11bfe 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -19,6 +19,8 @@ #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_detector.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" @@ -26,6 +28,10 @@ namespace webrtc { namespace { + +constexpr int kTimestampGroupLengthMs = 5; +constexpr double kTimestampToMs = 1.0 / 90.0; + absl::optional OptionalRateFromOptionalBps( absl::optional bitrate_bps) { if (bitrate_bps) { @@ -36,19 +42,8 @@ absl::optional OptionalRateFromOptionalBps( } } // namespace -enum { kTimestampGroupLengthMs = 5 }; -static const double kTimestampToMs = 1.0 / 90.0; - -struct RemoteBitrateEstimatorSingleStream::Detector { - explicit Detector(int64_t last_packet_time_ms) - : last_packet_time_ms(last_packet_time_ms), - inter_arrival(90 * kTimestampGroupLengthMs, kTimestampToMs) {} - - int64_t last_packet_time_ms; - InterArrival inter_arrival; - OveruseEstimator estimator; - OveruseDetector detector; -}; +RemoteBitrateEstimatorSingleStream::Detector::Detector() + : inter_arrival(90 * kTimestampGroupLengthMs, kTimestampToMs) {} RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream( RemoteBitrateObserver* observer, @@ -64,43 +59,26 @@ RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream( RTC_LOG(LS_INFO) << "RemoteBitrateEstimatorSingleStream: Instantiating."; } -RemoteBitrateEstimatorSingleStream::~RemoteBitrateEstimatorSingleStream() { - while (!overuse_detectors_.empty()) { - SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.begin(); - delete it->second; - overuse_detectors_.erase(it); - } -} +RemoteBitrateEstimatorSingleStream::~RemoteBitrateEstimatorSingleStream() = + default; void RemoteBitrateEstimatorSingleStream::IncomingPacket( - int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) { + const RtpPacketReceived& rtp_packet) { + absl::optional transmission_time_offset = + rtp_packet.GetExtension(); if (!uma_recorded_) { - BweNames type = BweNames::kReceiverTOffset; - if (!header.extension.hasTransmissionTimeOffset) - type = BweNames::kReceiverNoExtension; + BweNames type = transmission_time_offset.has_value() + ? BweNames::kReceiverTOffset + : BweNames::kReceiverNoExtension; RTC_HISTOGRAM_ENUMERATION(kBweTypeHistogram, type, BweNames::kBweNamesMax); uma_recorded_ = true; } - uint32_t ssrc = header.ssrc; + uint32_t ssrc = rtp_packet.Ssrc(); uint32_t rtp_timestamp = - header.timestamp + header.extension.transmissionTimeOffset; + rtp_packet.Timestamp() + transmission_time_offset.value_or(0); int64_t now_ms = clock_->TimeInMilliseconds(); - SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.find(ssrc); - if (it == overuse_detectors_.end()) { - // This is a new SSRC. Adding to map. - // TODO(holmer): If the channel changes SSRC the old SSRC will still be - // around in this map until the channel is deleted. This is OK since the - // callback will no longer be called for the old SSRC. This will be - // automatically cleaned up when we have one RemoteBitrateEstimator per REMB - // group. - std::pair insert_result = - overuse_detectors_.insert(std::make_pair(ssrc, new Detector(now_ms))); - it = insert_result.first; - } - Detector* estimator = it->second; - estimator->last_packet_time_ms = now_ms; + Detector& estimator = overuse_detectors_[ssrc]; + estimator.last_packet_time_ms = now_ms; // Check if incoming bitrate estimate is valid, and if it needs to be reset. absl::optional incoming_bitrate = incoming_bitrate_.Rate(now_ms); @@ -113,23 +91,23 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( incoming_bitrate_.Reset(); last_valid_incoming_bitrate_ = 0; } + size_t payload_size = rtp_packet.payload_size() + rtp_packet.padding_size(); incoming_bitrate_.Update(payload_size, now_ms); - const BandwidthUsage prior_state = estimator->detector.State(); + const BandwidthUsage prior_state = estimator.detector.State(); uint32_t timestamp_delta = 0; int64_t time_delta = 0; int size_delta = 0; - if (estimator->inter_arrival.ComputeDeltas( - rtp_timestamp, arrival_time_ms, now_ms, payload_size, + if (estimator.inter_arrival.ComputeDeltas( + rtp_timestamp, rtp_packet.arrival_time().ms(), now_ms, payload_size, ×tamp_delta, &time_delta, &size_delta)) { double timestamp_delta_ms = timestamp_delta * kTimestampToMs; - estimator->estimator.Update(time_delta, timestamp_delta_ms, size_delta, - estimator->detector.State(), now_ms); - estimator->detector.Detect(estimator->estimator.offset(), - timestamp_delta_ms, - estimator->estimator.num_of_deltas(), now_ms); + estimator.estimator.Update(time_delta, timestamp_delta_ms, size_delta, + estimator.detector.State(), now_ms); + estimator.detector.Detect(estimator.estimator.offset(), timestamp_delta_ms, + estimator.estimator.num_of_deltas(), now_ms); } - if (estimator->detector.State() == BandwidthUsage::kBwOverusing) { + if (estimator.detector.State() == BandwidthUsage::kBwOverusing) { absl::optional incoming_bitrate_bps = incoming_bitrate_.Rate(now_ms); if (incoming_bitrate_bps && @@ -159,21 +137,19 @@ TimeDelta RemoteBitrateEstimatorSingleStream::Process() { void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { BandwidthUsage bw_state = BandwidthUsage::kBwNormal; - SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.begin(); + auto it = overuse_detectors_.begin(); while (it != overuse_detectors_.end()) { - const int64_t time_of_last_received_packet = - it->second->last_packet_time_ms; + const int64_t time_of_last_received_packet = it->second.last_packet_time_ms; if (time_of_last_received_packet >= 0 && now_ms - time_of_last_received_packet > kStreamTimeOutMs) { // This over-use detector hasn't received packets for `kStreamTimeOutMs` // milliseconds and is considered stale. - delete it->second; overuse_detectors_.erase(it++); } else { // Make sure that we trigger an over-use if any of the over-use detectors // is detecting over-use. - if (it->second->detector.State() > bw_state) { - bw_state = it->second->detector.State(); + if (it->second.detector.State() > bw_state) { + bw_state = it->second.detector.State(); } ++it; } @@ -190,10 +166,8 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) { if (remote_rate_.ValidEstimate()) { process_interval_ms_ = remote_rate_.GetFeedbackInterval().ms(); RTC_DCHECK_GT(process_interval_ms_, 0); - std::vector ssrcs; - GetSsrcs(&ssrcs); if (observer_) - observer_->OnReceiveBitrateChanged(ssrcs, target_bitrate); + observer_->OnReceiveBitrateChanged(GetSsrcs(), target_bitrate); } } @@ -202,12 +176,8 @@ void RemoteBitrateEstimatorSingleStream::OnRttUpdate(int64_t avg_rtt_ms, remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms)); } -void RemoteBitrateEstimatorSingleStream::RemoveStream(unsigned int ssrc) { - SsrcOveruseEstimatorMap::iterator it = overuse_detectors_.find(ssrc); - if (it != overuse_detectors_.end()) { - delete it->second; - overuse_detectors_.erase(it); - } +void RemoteBitrateEstimatorSingleStream::RemoveStream(uint32_t ssrc) { + overuse_detectors_.erase(ssrc); } DataRate RemoteBitrateEstimatorSingleStream::LatestEstimate() const { @@ -217,15 +187,13 @@ DataRate RemoteBitrateEstimatorSingleStream::LatestEstimate() const { return remote_rate_.LatestEstimate(); } -void RemoteBitrateEstimatorSingleStream::GetSsrcs( - std::vector* ssrcs) const { - RTC_DCHECK(ssrcs); - ssrcs->resize(overuse_detectors_.size()); - int i = 0; - for (SsrcOveruseEstimatorMap::const_iterator it = overuse_detectors_.begin(); - it != overuse_detectors_.end(); ++it, ++i) { - (*ssrcs)[i] = it->first; +std::vector RemoteBitrateEstimatorSingleStream::GetSsrcs() const { + std::vector ssrcs; + ssrcs.reserve(overuse_detectors_.size()); + for (const auto& [ssrc, unused] : overuse_detectors_) { + ssrcs.push_back(ssrc); } + return ssrcs; } } // namespace webrtc diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index 5e2961d90194..d0ca675cb39f 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -23,6 +23,9 @@ #include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/remote_bitrate_estimator/inter_arrival.h" +#include "modules/remote_bitrate_estimator/overuse_detector.h" +#include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "rtc_base/rate_statistics.h" namespace webrtc { @@ -43,27 +46,30 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { ~RemoteBitrateEstimatorSingleStream() override; - void IncomingPacket(int64_t arrival_time_ms, - size_t payload_size, - const RTPHeader& header) override; + void IncomingPacket(const RtpPacketReceived& rtp_packet) override; TimeDelta Process() override; void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; void RemoveStream(uint32_t ssrc) override; DataRate LatestEstimate() const override; private: - struct Detector; + struct Detector { + Detector(); - typedef std::map SsrcOveruseEstimatorMap; + int64_t last_packet_time_ms; + InterArrival inter_arrival; + OveruseEstimator estimator; + OveruseDetector detector; + }; // Triggers a new estimate calculation. void UpdateEstimate(int64_t time_now); - void GetSsrcs(std::vector* ssrcs) const; + std::vector GetSsrcs() const; Clock* const clock_; const FieldTrialBasedConfig field_trials_; - SsrcOveruseEstimatorMap overuse_detectors_; + std::map overuse_detectors_; RateStatistics incoming_bitrate_; uint32_t last_valid_incoming_bitrate_; AimdRateControl remote_rate_; diff --git a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc index 899037f5a73e..ee9644530ab5 100644 --- a/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc +++ b/third_party/libwebrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc @@ -13,6 +13,9 @@ #include #include +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" namespace webrtc { @@ -229,15 +232,17 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc, int64_t arrival_time, uint32_t rtp_timestamp, uint32_t absolute_send_time) { - RTPHeader header; - memset(&header, 0, sizeof(header)); - header.ssrc = ssrc; - header.timestamp = rtp_timestamp; - header.extension.hasAbsoluteSendTime = true; - header.extension.absoluteSendTime = absolute_send_time; - RTC_CHECK_GE(arrival_time + arrival_time_offset_ms_, 0); - bitrate_estimator_->IncomingPacket(arrival_time + arrival_time_offset_ms_, - payload_size, header); + RtpHeaderExtensionMap extensions; + extensions.Register(1); + RtpPacketReceived rtp_packet(&extensions); + rtp_packet.SetSsrc(ssrc); + rtp_packet.SetTimestamp(rtp_timestamp); + rtp_packet.SetExtension(absolute_send_time); + rtp_packet.SetPayloadSize(payload_size); + rtp_packet.set_arrival_time( + Timestamp::Millis(arrival_time + arrival_time_offset_ms_)); + + bitrate_estimator_->IncomingPacket(rtp_packet); } // Generates a frame of packets belonging to a stream at a given bitrate and diff --git a/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn b/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn index 2d2cf53a94d3..4a627e15a1d8 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn +++ b/third_party/libwebrtc/modules/rtp_rtcp/BUILD.gn @@ -288,6 +288,7 @@ rtc_library("rtp_rtcp") { "../../api/transport/rtp:dependency_descriptor", "../../api/transport/rtp:rtp_source", "../../api/units:data_rate", + "../../api/units:frequency", "../../api/units:time_delta", "../../api/units:timestamp", "../../api/video:encoded_frame", @@ -305,6 +306,7 @@ rtc_library("rtp_rtcp") { "../../logging:rtc_event_rtp_rtcp", "../../modules/audio_coding:audio_coding_module_typedefs", "../../rtc_base:bit_buffer", + "../../rtc_base:bitrate_tracker", "../../rtc_base:bitstream_reader", "../../rtc_base:buffer", "../../rtc_base:byte_buffer", @@ -312,6 +314,7 @@ rtc_library("rtp_rtcp") { "../../rtc_base:copy_on_write_buffer", "../../rtc_base:divide_round", "../../rtc_base:event_tracer", + "../../rtc_base:frequency_tracker", "../../rtc_base:gtest_prod", "../../rtc_base:logging", "../../rtc_base:macromagic", @@ -320,7 +323,6 @@ rtc_library("rtp_rtcp") { "../../rtc_base:race_checker", "../../rtc_base:random", "../../rtc_base:rate_limiter", - "../../rtc_base:rate_statistics", "../../rtc_base:rtc_numerics", "../../rtc_base:safe_conversions", "../../rtc_base:safe_minmax", @@ -365,11 +367,11 @@ rtc_source_set("rtp_rtcp_legacy") { "../../api/units:timestamp", "../../api/video:video_bitrate_allocation", "../../logging:rtc_event_rtp_rtcp", + "../../rtc_base:bitrate_tracker", "../../rtc_base:checks", "../../rtc_base:gtest_prod", "../../rtc_base:logging", "../../rtc_base:macromagic", - "../../rtc_base:rate_statistics", "../../rtc_base/synchronization:mutex", "../../system_wrappers", "../remote_bitrate_estimator", diff --git a/third_party/libwebrtc/modules/rtp_rtcp/include/flexfec_sender.h b/third_party/libwebrtc/modules/rtp_rtcp/include/flexfec_sender.h index b61781a480c2..8f21ab75173c 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/include/flexfec_sender.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/include/flexfec_sender.h @@ -24,8 +24,8 @@ #include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/ulpfec_generator.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "rtc_base/bitrate_tracker.h" #include "rtc_base/random.h" -#include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -96,7 +96,7 @@ class FlexfecSender : public VideoFecGenerator { const size_t header_extensions_size_; mutable Mutex mutex_; - RateStatistics fec_bitrate_ RTC_GUARDED_BY(mutex_); + BitrateTracker fec_bitrate_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 3ccc0e0f5c12..982e5c57efcb 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -429,9 +429,9 @@ class SendSideDelayObserver { // Remove SendSideDelayObserver once possible. class SendPacketObserver { public: - virtual ~SendPacketObserver() {} + virtual ~SendPacketObserver() = default; virtual void OnSendPacket(uint16_t packet_id, - int64_t capture_time_ms, + Timestamp capture_time, uint32_t ssrc) = 0; }; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 5af821e028b4..6872448f9809 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -120,7 +120,7 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override)); MOCK_METHOD(int32_t, SetCNAME, (absl::string_view cname), (override)); MOCK_METHOD(absl::optional, LastRtt, (), (const, override)); - MOCK_METHOD(int64_t, ExpectedRetransmissionTimeMs, (), (const, override)); + MOCK_METHOD(TimeDelta, ExpectedRetransmissionTime, (), (const, override)); MOCK_METHOD(int32_t, SendRTCP, (RTCPPacketType packet_type), (override)); MOCK_METHOD(void, GetSendStreamDataCounters, diff --git a/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build b/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build index 02b919fc1648..0a8968c7d302 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build +++ b/third_party/libwebrtc/modules/rtp_rtcp/rtp_video_header_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/byte_io.h b/third_party/libwebrtc/modules/rtp_rtcp/source/byte_io.h index a98eb3073bb4..ae70202c3005 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/byte_io.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/byte_io.h @@ -31,7 +31,7 @@ // Write an unsigned 40-bit (5 byte) integer in big endian format // ByteWriter::WriteBigEndian(buffer, val); // -// These classes are implemented as recursive templetizations, inteded to make +// These classes are implemented as recursive templetizations, intended to make // it easy for the compiler to completely inline the reading/writing. #include diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index 880b77d493cd..e2531bb1b1b4 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -24,7 +24,7 @@ namespace webrtc { namespace { constexpr uint32_t kTimestampTicksPerMs = 90; constexpr int kSendSideDelayWindowMs = 1000; -constexpr int kBitrateStatisticsWindowMs = 1000; +constexpr TimeDelta kBitrateStatisticsWindow = TimeDelta::Seconds(1); constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; } // namespace @@ -82,8 +82,7 @@ DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( timestamp_offset_(0), max_delay_it_(send_delays_.end()), sum_delays_ms_(0), - send_rates_(kNumMediaTypes, - {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}), + send_rates_(kNumMediaTypes, BitrateTracker(kBitrateStatisticsWindow)), rtp_sequence_number_map_(need_rtp_packet_infos_ ? std::make_unique( kRtpSequenceNumberMapMaxEntries) @@ -221,12 +220,11 @@ RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRates() const { } RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRatesLocked() const { - const int64_t now_ms = clock_->TimeInMilliseconds(); + const Timestamp now = clock_->CurrentTime(); RtpSendRates current_rates; for (size_t i = 0; i < kNumMediaTypes; ++i) { RtpPacketMediaType type = static_cast(i); - current_rates[type] = - DataRate::BitsPerSec(send_rates_[i].Rate(now_ms).value_or(0)); + current_rates[type] = send_rates_[i].Rate(now).value_or(DataRate::Zero()); } return current_rates; } @@ -408,7 +406,8 @@ void DEPRECATED_RtpSenderEgress::UpdateOnSendPacket(int packet_id, return; } - send_packet_observer_->OnSendPacket(packet_id, capture_time_ms, ssrc); + send_packet_observer_->OnSendPacket(packet_id, + Timestamp::Millis(capture_time_ms), ssrc); } bool DEPRECATED_RtpSenderEgress::SendPacketToNetwork( @@ -417,7 +416,7 @@ bool DEPRECATED_RtpSenderEgress::SendPacketToNetwork( const PacedPacketInfo& pacing_info) { int bytes_sent = -1; if (transport_) { - bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options) + bytes_sent = transport_->SendRtp(packet, options) ? static_cast(packet.size()) : -1; if (event_log_ && bytes_sent > 0) { @@ -452,7 +451,7 @@ void DEPRECATED_RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) { RTC_DCHECK(packet.packet_type().has_value()); send_rates_[static_cast(*packet.packet_type())].Update(packet.size(), - now.ms()); + now); if (rtp_stats_callback_) { rtp_stats_callback_->DataCountersUpdated(*counters, packet.Ssrc()); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h index 609a90d4fe29..e786d90c2f43 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h @@ -25,7 +25,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" -#include "rtc_base/rate_statistics.h" +#include "rtc_base/bitrate_tracker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -136,7 +136,7 @@ class DEPRECATED_RtpSenderEgress { StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_); // One element per value in RtpPacketMediaType, with index matching value. - std::vector send_rates_ RTC_GUARDED_BY(lock_); + std::vector send_rates_ RTC_GUARDED_BY(lock_); // Maps sent packets' sequence numbers to a tuple consisting of: // 1. The timestamp, without the randomizing offset mandated by the RFC. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc index cf4dc3e58e06..85f6d2c75489 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc @@ -179,9 +179,9 @@ bool Flexfec03HeaderReader::ReadFecHeader( } // At this point, K-bits 0 and 1 have been removed, and the front-most // part of the FlexFEC packet mask has been packed accordingly. We will - // now shift the remaning part of the packet mask three steps to the left. - // This corresponds to the (in total) three K-bits, which have been - // removed. + // now shift the remaining part of the packet mask three steps to + // the left. This corresponds to the (in total) three K-bits, which + // have been removed. uint8_t tail_bits = (packet_mask[6] >> 5) & 0x03; packet_mask[5] |= tail_bits; uint64_t mask_part2 = diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc index 79509f34c646..cfca7cb066c7 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc @@ -179,9 +179,9 @@ bool FlexfecHeaderReader::ReadFecHeader( kFlexfecPacketMaskSizes[2]; // At this point, K-bits 0 and 1 have been removed, and the front-most // part of the FlexFEC packet mask has been packed accordingly. We will - // now shift the remaning part of the packet mask two steps to the left. - // This corresponds to the (in total) two K-bits, which have been - // removed. + // now shift the remaining part of the packet mask two steps to + // the left. This corresponds to the (in total) two K-bits, which + // have been removed. uint8_t tail_bits = (data[byte_index] >> 6) & 0x03; data[byte_index - 1] |= tail_bits; uint64_t mask_part2 = diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_sender.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_sender.cc index c8bac0114d76..3a98778d1694 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_sender.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/flexfec_sender.cc @@ -99,7 +99,7 @@ FlexfecSender::FlexfecSender( RegisterSupportedExtensions(rtp_header_extensions)), header_extensions_size_( RtpHeaderExtensionSize(extension_sizes, rtp_header_extension_map_)), - fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) { + fec_bitrate_(/*max_window_size=*/TimeDelta::Seconds(1)) { // This object should not have been instantiated if FlexFEC is disabled. RTC_DCHECK_GE(payload_type, 0); RTC_DCHECK_LE(payload_type, 127); @@ -179,7 +179,7 @@ std::vector> FlexfecSender::GetFecPackets() { } MutexLock lock(&mutex_); - fec_bitrate_.Update(total_fec_data_bytes, now.ms()); + fec_bitrate_.Update(total_fec_data_bytes, now); return fec_packets_to_send; } @@ -191,8 +191,7 @@ size_t FlexfecSender::MaxPacketOverhead() const { DataRate FlexfecSender::CurrentFecRate() const { MutexLock lock(&mutex_); - return DataRate::BitsPerSec( - fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0)); + return fec_bitrate_.Rate(clock_->CurrentTime()).value_or(DataRate::Zero()); } absl::optional FlexfecSender::GetRtpState() { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc index ac68162d2637..a10f2e6a21bd 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/forward_error_correction_internal.cc @@ -163,7 +163,7 @@ rtc::ArrayView PacketMaskTable::LookUp(int num_media_packets, // Generate FEC code mask for {num_media_packets(M), num_fec_packets(N)} (use // N FEC packets to protect M media packets) In the mask, each FEC packet - // occupies one row, each bit / coloumn represent one media packet. E.g. Row + // occupies one row, each bit / column represent one media packet. E.g. Row // A, Col/Bit B is set to 1, means FEC packet A will have protection for media // packet B. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc index 6697aff66ca0..2ec4b5366891 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc @@ -210,8 +210,8 @@ class RtpRtcpRtxNackTest : public ::testing::Test { video_header.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp, - timestamp / 90, payload_data, sizeof(payload_data), video_header, 0, - {})); + /*capture_time=*/Timestamp::Millis(timestamp / 90), payload_data, + sizeof(payload_data), video_header, TimeDelta::Zero(), {})); // Min required delay until retransmit = 5 + RTT ms (RTT = 0). fake_clock.AdvanceTimeMilliseconds(5); int length = BuildNackList(nack_list); @@ -261,8 +261,8 @@ TEST_F(RtpRtcpRtxNackTest, LongNackList) { video_header.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( kPayloadType, VideoCodecType::kVideoCodecGeneric, timestamp, - timestamp / 90, payload_data, sizeof(payload_data), video_header, 0, - {})); + Timestamp::Millis(timestamp / 90), payload_data, sizeof(payload_data), + video_header, TimeDelta::Zero(), {})); // Prepare next frame. timestamp += 3000; fake_clock.AdvanceTimeMilliseconds(33); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc index 6fe2000acd20..0e5e40f5022b 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -47,8 +47,7 @@ StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, : ssrc_(ssrc), clock_(clock), delta_internal_unix_epoch_(UnixEpochDelta(*clock_)), - incoming_bitrate_(kStatisticsProcessInterval.ms(), - RateStatistics::kBpsScale), + incoming_bitrate_(/*max_window_size=*/kStatisticsProcessInterval), max_reordering_threshold_(max_reordering_threshold), enable_retransmit_detection_(false), cumulative_loss_is_capped_(false), @@ -117,7 +116,7 @@ void StreamStatisticianImpl::UpdateCounters(const RtpPacketReceived& packet) { RTC_DCHECK_EQ(ssrc_, packet.Ssrc()); Timestamp now = clock_->CurrentTime(); - incoming_bitrate_.Update(packet.size(), now.ms()); + incoming_bitrate_.Update(packet.size(), now); receive_counters_.transmitted.AddPacket(packet); --cumulative_loss_; @@ -260,7 +259,7 @@ void StreamStatisticianImpl::MaybeAppendReportBlockAndReset( int packets_lost = cumulative_loss_ + cumulative_loss_rtcp_offset_; if (packets_lost < 0) { - // Clamp to zero. Work around to accomodate for senders that misbehave with + // Clamp to zero. Work around to accommodate for senders that misbehave with // negative cumulative loss. packets_lost = 0; cumulative_loss_rtcp_offset_ = -cumulative_loss_; @@ -310,7 +309,9 @@ StreamDataCounters StreamStatisticianImpl::GetReceiveStreamDataCounters() } uint32_t StreamStatisticianImpl::BitrateReceived() const { - return incoming_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0); + return incoming_bitrate_.Rate(clock_->CurrentTime()) + .value_or(DataRate::Zero()) + .bps(); } bool StreamStatisticianImpl::IsRetransmitOfOldPacket( @@ -318,7 +319,7 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket( Timestamp now) const { int frequency_hz = packet.payload_type_frequency(); RTC_DCHECK(last_receive_time_.has_value()); - RTC_DCHECK_GT(frequency_hz, 0); + RTC_CHECK_GT(frequency_hz, 0); TimeDelta time_diff = now - *last_receive_time_; // Diff in time stamp since last received in order. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.h index 8ec7c96c17d5..ccac2d55d69a 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -22,9 +22,9 @@ #include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "rtc_base/bitrate_tracker.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" -#include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -80,7 +80,7 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { Clock* const clock_; // Delta used to map internal timestamps to Unix epoch ones. const TimeDelta delta_internal_unix_epoch_; - RateStatistics incoming_bitrate_; + BitrateTracker incoming_bitrate_; // In number of packets or sequence numbers. int max_reordering_threshold_; bool enable_retransmit_detection_; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.cc index 460fbdf5a9aa..bda6ad9a52dd 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -19,6 +19,8 @@ #include #include +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" @@ -55,15 +57,14 @@ using rtcp::CommonHeader; using rtcp::ReportBlock; // The number of RTCP time intervals needed to trigger a timeout. -const int kRrTimeoutIntervals = 3; +constexpr int kRrTimeoutIntervals = 3; -const int64_t kTmmbrTimeoutIntervalMs = 5 * 5000; - -const int64_t kMaxWarningLogIntervalMs = 10000; -const int64_t kRtcpMinFrameLengthMs = 17; +constexpr TimeDelta kTmmbrTimeoutInterval = TimeDelta::Seconds(25); +constexpr TimeDelta kMaxWarningLogInterval = TimeDelta::Seconds(10); +constexpr TimeDelta kRtcpMinFrameLength = TimeDelta::Millis(17); // Maximum number of received RRTRs that will be stored. -const size_t kMaxNumberOfStoredRrtrs = 300; +constexpr size_t kMaxNumberOfStoredRrtrs = 300; constexpr TimeDelta kDefaultVideoReportInterval = TimeDelta::Seconds(1); constexpr TimeDelta kDefaultAudioReportInterval = TimeDelta::Seconds(5); @@ -155,12 +156,12 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, // TODO(bugs.webrtc.org/10774): Remove fallback. remote_ssrc_(0), xr_rrtr_status_(config.non_sender_rtt_measurement), - oldest_tmmbr_info_ms_(0), + oldest_tmmbr_info_(Timestamp::Zero()), cname_callback_(config.rtcp_cname_callback), report_block_data_observer_(config.report_block_data_observer), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), num_skipped_packets_(0), - last_skipped_packets_warning_ms_(clock_->TimeInMilliseconds()) { + last_skipped_packets_warning_(clock_->CurrentTime()) { RTC_DCHECK(owner); } @@ -183,12 +184,12 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, // TODO(bugs.webrtc.org/10774): Remove fallback. remote_ssrc_(0), xr_rrtr_status_(config.non_sender_rtt_measurement), - oldest_tmmbr_info_ms_(0), + oldest_tmmbr_info_(Timestamp::Zero()), cname_callback_(config.rtcp_cname_callback), report_block_data_observer_(config.report_block_data_observer), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), num_skipped_packets_(0), - last_skipped_packets_warning_ms_(clock_->TimeInMilliseconds()) { + last_skipped_packets_warning_(clock_->CurrentTime()) { RTC_DCHECK(owner); // Dear reader - if you're here because of this log statement and are // wondering what this is about, chances are that you are using an instance @@ -315,7 +316,7 @@ absl::optional RTCPReceiver::OnPeriodicRttUpdate( } // Check for expired timers and if so, log and reset. - auto now = clock_->CurrentTime(); + Timestamp now = clock_->CurrentTime(); if (RtcpRrTimeoutLocked(now)) { RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received."; } else if (RtcpRrSequenceNumberTimeoutLocked(now)) { @@ -479,14 +480,14 @@ bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, } if (num_skipped_packets_ > 0) { - const int64_t now_ms = clock_->TimeInMilliseconds(); - if (now_ms - last_skipped_packets_warning_ms_ >= kMaxWarningLogIntervalMs) { - last_skipped_packets_warning_ms_ = now_ms; + const Timestamp now = clock_->CurrentTime(); + if (now - last_skipped_packets_warning_ >= kMaxWarningLogInterval) { + last_skipped_packets_warning_ = now; RTC_LOG(LS_WARNING) << num_skipped_packets_ << " RTCP blocks were skipped due to being malformed or of " "unrecognized/unsupported type, during the past " - << (kMaxWarningLogIntervalMs / 1000) << " second period."; + << kMaxWarningLogInterval << " period."; } } @@ -635,14 +636,14 @@ RTCPReceiver::TmmbrInformation* RTCPReceiver::FindOrCreateTmmbrInfo( // Create or find receive information. TmmbrInformation* tmmbr_info = &tmmbr_infos_[remote_ssrc]; // Update that this remote is alive. - tmmbr_info->last_time_received_ms = clock_->TimeInMilliseconds(); + tmmbr_info->last_time_received = clock_->CurrentTime(); return tmmbr_info; } void RTCPReceiver::UpdateTmmbrRemoteIsAlive(uint32_t remote_ssrc) { auto tmmbr_it = tmmbr_infos_.find(remote_ssrc); if (tmmbr_it != tmmbr_infos_.end()) - tmmbr_it->second.last_time_received_ms = clock_->TimeInMilliseconds(); + tmmbr_it->second.last_time_received = clock_->CurrentTime(); } RTCPReceiver::TmmbrInformation* RTCPReceiver::GetTmmbrInformation( @@ -670,31 +671,30 @@ bool RTCPReceiver::RtcpRrSequenceNumberTimeout() { bool RTCPReceiver::UpdateTmmbrTimers() { MutexLock lock(&rtcp_receiver_lock_); - int64_t now_ms = clock_->TimeInMilliseconds(); - int64_t timeout_ms = now_ms - kTmmbrTimeoutIntervalMs; + Timestamp timeout = clock_->CurrentTime() - kTmmbrTimeoutInterval; - if (oldest_tmmbr_info_ms_ >= timeout_ms) + if (oldest_tmmbr_info_ >= timeout) return false; bool update_bounding_set = false; - oldest_tmmbr_info_ms_ = -1; + oldest_tmmbr_info_ = Timestamp::MinusInfinity(); for (auto tmmbr_it = tmmbr_infos_.begin(); tmmbr_it != tmmbr_infos_.end();) { TmmbrInformation* tmmbr_info = &tmmbr_it->second; - if (tmmbr_info->last_time_received_ms > 0) { - if (tmmbr_info->last_time_received_ms < timeout_ms) { + if (tmmbr_info->last_time_received > Timestamp::Zero()) { + if (tmmbr_info->last_time_received < timeout) { // No rtcp packet for the last 5 regular intervals, reset limitations. tmmbr_info->tmmbr.clear(); // Prevent that we call this over and over again. - tmmbr_info->last_time_received_ms = 0; + tmmbr_info->last_time_received = Timestamp::Zero(); // Send new TMMBN to all channels using the default codec. update_bounding_set = true; - } else if (oldest_tmmbr_info_ms_ == -1 || - tmmbr_info->last_time_received_ms < oldest_tmmbr_info_ms_) { - oldest_tmmbr_info_ms_ = tmmbr_info->last_time_received_ms; + } else if (oldest_tmmbr_info_ == Timestamp::MinusInfinity() || + tmmbr_info->last_time_received < oldest_tmmbr_info_) { + oldest_tmmbr_info_ = tmmbr_info->last_time_received; } ++tmmbr_it; } else if (tmmbr_info->ready_for_delete) { - // When we dont have a last_time_received_ms and the object is marked + // When we dont have a `last_time_received` and the object is marked // ready_for_delete it's removed from the map. tmmbr_it = tmmbr_infos_.erase(tmmbr_it); } else { @@ -943,9 +943,9 @@ bool RTCPReceiver::HandleTmmbr(const CommonHeader& rtcp_block, auto* entry = &tmmbr_info->tmmbr[sender_ssrc]; entry->tmmbr_item = rtcp::TmmbItem(sender_ssrc, request.bitrate_bps(), request.packet_overhead()); - // FindOrCreateTmmbrInfo always sets `last_time_received_ms` to - // `clock_->TimeInMilliseconds()`. - entry->last_updated_ms = tmmbr_info->last_time_received_ms; + // FindOrCreateTmmbrInfo always sets `last_time_received` to + // `clock_->CurrentTime()`. + entry->last_updated = tmmbr_info->last_time_received; packet_information->packet_type_flags |= kRtcpTmmbr; break; @@ -1016,7 +1016,7 @@ bool RTCPReceiver::HandleFir(const CommonHeader& rtcp_block, if (fir.requests().empty()) return true; - const int64_t now_ms = clock_->TimeInMilliseconds(); + const Timestamp now = clock_->CurrentTime(); for (const rtcp::Fir::Request& fir_request : fir.requests()) { // Is it our sender that is requested to generate a new keyframe. if (local_media_ssrc() != fir_request.ssrc) @@ -1024,20 +1024,20 @@ bool RTCPReceiver::HandleFir(const CommonHeader& rtcp_block, ++packet_type_counter_.fir_packets; - auto inserted = last_fir_.insert(std::make_pair( - fir.sender_ssrc(), LastFirStatus(now_ms, fir_request.seq_nr))); - if (!inserted.second) { // There was already an entry. - LastFirStatus* last_fir = &inserted.first->second; + auto [it, inserted] = + last_fir_.try_emplace(fir.sender_ssrc(), now, fir_request.seq_nr); + if (!inserted) { // There was already an entry. + LastFirStatus* last_fir = &it->second; // Check if we have reported this FIRSequenceNumber before. if (fir_request.seq_nr == last_fir->sequence_number) continue; // Sanity: don't go crazy with the callbacks. - if (now_ms - last_fir->request_ms < kRtcpMinFrameLengthMs) + if (now - last_fir->request < kRtcpMinFrameLength) continue; - last_fir->request_ms = now_ms; + last_fir->request = now; last_fir->sequence_number = fir_request.seq_nr; } // Received signal that we need to send a new key frame. @@ -1190,12 +1190,11 @@ std::vector RTCPReceiver::TmmbrReceived() { MutexLock lock(&rtcp_receiver_lock_); std::vector candidates; - int64_t now_ms = clock_->TimeInMilliseconds(); - int64_t timeout_ms = now_ms - kTmmbrTimeoutIntervalMs; + Timestamp timeout = clock_->CurrentTime() - kTmmbrTimeoutInterval; for (auto& kv : tmmbr_infos_) { for (auto it = kv.second.tmmbr.begin(); it != kv.second.tmmbr.end();) { - if (it->second.last_updated_ms < timeout_ms) { + if (it->second.last_updated < timeout) { // Erase timeout entries. it = kv.second.tmmbr.erase(it); } else { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.h index 6e6d82dcf40c..7fc541585cf7 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_receiver.h @@ -20,6 +20,7 @@ #include "api/array_view.h" #include "api/sequence_checker.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -210,10 +211,10 @@ class RTCPReceiver final { struct TmmbrInformation { struct TimedTmmbrItem { rtcp::TmmbItem tmmbr_item; - int64_t last_updated_ms; + Timestamp last_updated = Timestamp::Zero(); }; - int64_t last_time_received_ms = 0; + Timestamp last_time_received = Timestamp::Zero(); bool ready_for_delete = false; @@ -238,9 +239,9 @@ class RTCPReceiver final { }; struct LastFirStatus { - LastFirStatus(int64_t now_ms, uint8_t sequence_number) - : request_ms(now_ms), sequence_number(sequence_number) {} - int64_t request_ms; + LastFirStatus(Timestamp now, uint8_t sequence_number) + : request(now), sequence_number(sequence_number) {} + Timestamp request; uint8_t sequence_number; }; @@ -387,7 +388,7 @@ class RTCPReceiver final { bool xr_rrtr_status_ RTC_GUARDED_BY(rtcp_receiver_lock_); absl::optional xr_rr_rtt_; - int64_t oldest_tmmbr_info_ms_ RTC_GUARDED_BY(rtcp_receiver_lock_); + Timestamp oldest_tmmbr_info_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Mapped by remote ssrc. flat_map tmmbr_infos_ RTC_GUARDED_BY(rtcp_receiver_lock_); @@ -421,7 +422,7 @@ class RTCPReceiver final { RtcpNackStats nack_stats_; size_t num_skipped_packets_; - int64_t last_skipped_packets_warning_ms_; + Timestamp last_skipped_packets_warning_; }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_ diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_sender.cc index c62c65506a76..c14332aca652 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_sender.cc @@ -242,7 +242,7 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state, bool buffering_allowed) { int32_t error_code = -1; auto callback = [&](rtc::ArrayView packet) { - transport_->SendRtcp(packet.data(), packet.size()); + transport_->SendRtcp(packet); error_code = 0; if (event_log_) { event_log_->Log(std::make_unique(packet)); @@ -282,7 +282,7 @@ void RTCPSender::SetRemb(int64_t bitrate_bps, std::vector ssrcs) { RTC_CHECK_GE(bitrate_bps, 0); MutexLock lock(&mutex_rtcp_sender_); if (method_ == RtcpMode::kOff) { - RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; + RTC_LOG(LS_WARNING) << "Can't send RTCP if it is disabled."; return; } remb_bitrate_ = bitrate_bps; @@ -659,7 +659,7 @@ int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state, const uint16_t* nack_list) { int32_t error_code = -1; auto callback = [&](rtc::ArrayView packet) { - if (transport_->SendRtcp(packet.data(), packet.size())) { + if (transport_->SendRtcp(packet)) { error_code = 0; if (event_log_) { event_log_->Log(std::make_unique(packet)); @@ -688,7 +688,7 @@ absl::optional RTCPSender::ComputeCompoundRTCPPacket( const uint16_t* nack_list, PacketSender& sender) { if (method_ == RtcpMode::kOff) { - RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; + RTC_LOG(LS_WARNING) << "Can't send RTCP if it is disabled."; return -1; } // Add the flag as volatile. Non volatile entries will not be overwritten. @@ -877,7 +877,7 @@ void RTCPSender::SetVideoBitrateAllocation( const VideoBitrateAllocation& bitrate) { MutexLock lock(&mutex_rtcp_sender_); if (method_ == RtcpMode::kOff) { - RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; + RTC_LOG(LS_WARNING) << "Can't send RTCP if it is disabled."; return; } // Check if this allocation is first ever, or has a different set of @@ -929,7 +929,7 @@ void RTCPSender::SendCombinedRtcpPacket( { MutexLock lock(&mutex_rtcp_sender_); if (method_ == RtcpMode::kOff) { - RTC_LOG(LS_WARNING) << "Can't send rtcp if it is disabled."; + RTC_LOG(LS_WARNING) << "Can't send RTCP if it is disabled."; return; } @@ -938,7 +938,7 @@ void RTCPSender::SendCombinedRtcpPacket( } RTC_DCHECK_LE(max_packet_size, IP_PACKET_SIZE); auto callback = [&](rtc::ArrayView packet) { - if (transport_->SendRtcp(packet.data(), packet.size())) { + if (transport_->SendRtcp(packet)) { if (event_log_) event_log_->Log(std::make_unique(packet)); } diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc index 69ecddc35420..625cb7fefc6a 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc @@ -15,6 +15,7 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -365,7 +366,7 @@ void RtcpTransceiverImpl::HandleReportBlocks( Timestamp::Millis(now_ntp.ToMs() - rtc::kNtpJan1970Millisecs); for (const rtcp::ReportBlock& block : rtcp_report_blocks) { - std::optional rtt; + absl::optional rtt; if (block.last_sr() != 0) { rtt = CompactNtpRttToTimeDelta( receive_time_ntp - block.delay_since_last_sr() - block.last_sr()); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc index de29fd2075f5..5f32cb7b39bb 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -505,7 +505,11 @@ bool VideoContentTypeExtension::Parse(rtc::ArrayView data, VideoContentType* content_type) { if (data.size() == 1 && videocontenttypehelpers::IsValidContentType(data[0])) { - *content_type = static_cast(data[0]); + // Only the lowest bit of ContentType has a defined meaning. + // Due to previous, now removed, usage of 5 more bits, values with + // those bits set are accepted as valid, but we mask them out before + // converting to a VideoContentType. + *content_type = static_cast(data[0] & 0x1); return true; } return false; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.cc index fd2f5c5ae83a..7181b303e16d 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.cc @@ -406,10 +406,6 @@ uint8_t* RtpPacket::AllocatePayload(size_t size_bytes) { uint8_t* RtpPacket::SetPayloadSize(size_t size_bytes) { RTC_DCHECK_EQ(padding_size_, 0); - if (payload_offset_ + size_bytes > capacity()) { - RTC_LOG(LS_WARNING) << "Cannot set payload, not enough space in buffer."; - return nullptr; - } payload_size_ = size_bytes; buffer_.SetSize(payload_offset_ + payload_size_); return WriteAt(payload_offset_); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.h index 1db4a13017fa..e91ec6368bf1 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_packet.h @@ -153,8 +153,11 @@ class RtpPacket { // Returns view of the raw extension or empty view on failure. rtc::ArrayView FindExtension(ExtensionType type) const; - // Reserve size_bytes for payload. Returns nullptr on failure. + // Returns pointer to the payload of size at least `size_bytes`. + // Keeps original payload, if any. If `size_bytes` is larger than current + // `payload_size()`, remaining bytes are uninitialized. uint8_t* SetPayloadSize(size_t size_bytes); + // Same as SetPayloadSize but doesn't guarantee to keep current payload. uint8_t* AllocatePayload(size_t size_bytes); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index 38e4a736f98e..bafa336e1537 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -39,7 +39,7 @@ namespace webrtc { namespace { const int64_t kRtpRtcpRttProcessTimeMs = 1000; const int64_t kRtpRtcpBitrateProcessTimeMs = 10; -const int64_t kDefaultExpectedRetransmissionTimeMs = 125; +constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125); } // namespace ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext( @@ -475,17 +475,17 @@ absl::optional ModuleRtpRtcpImpl::LastRtt() const { return rtt; } -int64_t ModuleRtpRtcpImpl::ExpectedRetransmissionTimeMs() const { +TimeDelta ModuleRtpRtcpImpl::ExpectedRetransmissionTime() const { int64_t expected_retransmission_time_ms = rtt_ms(); if (expected_retransmission_time_ms > 0) { - return expected_retransmission_time_ms; + return TimeDelta::Millis(expected_retransmission_time_ms); } // No rtt available (`kRtpRtcpRttProcessTimeMs` not yet passed?), so try to // poll avg_rtt_ms directly from rtcp receiver. if (absl::optional rtt = rtcp_receiver_.AverageRtt()) { - return rtt->ms(); + return *rtt; } - return kDefaultExpectedRetransmissionTimeMs; + return kDefaultExpectedRetransmissionTime; } // Force a send of an RTCP packet. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h index e7bb1095271e..0b1266a2db89 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -172,7 +172,7 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl // Get RoundTripTime. absl::optional LastRtt() const override; - int64_t ExpectedRetransmissionTimeMs() const override; + TimeDelta ExpectedRetransmissionTime() const override; // Force a send of an RTCP packet. // Normal SR and RR are triggered via the process function. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index 80d388a4fda2..6bd172317dc0 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -40,8 +40,7 @@ namespace webrtc { namespace { -const int64_t kDefaultExpectedRetransmissionTimeMs = 125; - +constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125); constexpr TimeDelta kRttUpdateInterval = TimeDelta::Millis(1000); RTCPSender::Configuration AddRtcpSendEvaluationCallback( @@ -480,17 +479,17 @@ absl::optional ModuleRtpRtcpImpl2::LastRtt() const { return rtt; } -int64_t ModuleRtpRtcpImpl2::ExpectedRetransmissionTimeMs() const { +TimeDelta ModuleRtpRtcpImpl2::ExpectedRetransmissionTime() const { int64_t expected_retransmission_time_ms = rtt_ms(); if (expected_retransmission_time_ms > 0) { - return expected_retransmission_time_ms; + return TimeDelta::Millis(expected_retransmission_time_ms); } // No rtt available (`kRttUpdateInterval` not yet passed?), so try to // poll avg_rtt_ms directly from rtcp receiver. if (absl::optional rtt = rtcp_receiver_.AverageRtt()) { - return rtt->ms(); + return *rtt; } - return kDefaultExpectedRetransmissionTimeMs; + return kDefaultExpectedRetransmissionTime; } // Force a send of an RTCP packet. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index e9da4d4f2315..54ca61a70565 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -183,7 +183,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, // Get RoundTripTime. absl::optional LastRtt() const override; - int64_t ExpectedRetransmissionTimeMs() const override; + TimeDelta ExpectedRetransmissionTime() const override; // Force a send of an RTCP packet. // Normal SR and RR are triggered via the task queue that's current when this diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc index 6f14b4f3fc2b..7ae953784b19 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc @@ -162,10 +162,10 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, public SendPacketObserver { public: struct SentPacket { - SentPacket(uint16_t packet_id, int64_t capture_time_ms, uint32_t ssrc) - : packet_id(packet_id), capture_time_ms(capture_time_ms), ssrc(ssrc) {} + SentPacket(uint16_t packet_id, Timestamp capture_time, uint32_t ssrc) + : packet_id(packet_id), capture_time(capture_time), ssrc(ssrc) {} uint16_t packet_id; - int64_t capture_time_ms; + Timestamp capture_time; uint32_t ssrc; }; @@ -198,9 +198,9 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, } void OnSendPacket(uint16_t packet_id, - int64_t capture_time_ms, + Timestamp capture_time, uint32_t ssrc) override { - last_sent_packet_.emplace(packet_id, capture_time_ms, ssrc); + last_sent_packet_.emplace(packet_id, capture_time, ssrc); } absl::optional last_sent_packet() const { @@ -360,9 +360,10 @@ class RtpRtcpImpl2Test : public ::testing::Test { const uint8_t payload[100] = {0}; bool success = module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true); - success &= sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8, - rtp_timestamp, capture_time_ms, payload, - sizeof(payload), rtp_video_header, 0, {}); + success &= sender->SendVideo( + kPayloadType, VideoCodecType::kVideoCodecVP8, rtp_timestamp, + Timestamp::Millis(capture_time_ms), payload, sizeof(payload), + rtp_video_header, TimeDelta::Zero(), {}); return success; } @@ -983,16 +984,15 @@ TEST_F(RtpRtcpImpl2Test, AssignsTransmissionTimeOffset) { TEST_F(RtpRtcpImpl2Test, PropagatesSentPacketInfo) { sender_.RegisterHeaderExtension(TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId); - int64_t now_ms = time_controller_.GetClock()->TimeInMilliseconds(); + Timestamp now = time_controller_.GetClock()->CurrentTime(); EXPECT_TRUE(SendFrame(&sender_, sender_video_.get(), kBaseLayerTid)); - EXPECT_THAT( - sender_.last_sent_packet(), - Optional( - AllOf(Field(&RtpRtcpModule::SentPacket::packet_id, - Eq(sender_.last_packet() - .GetExtension())), - Field(&RtpRtcpModule::SentPacket::capture_time_ms, Eq(now_ms)), - Field(&RtpRtcpModule::SentPacket::ssrc, Eq(kSenderSsrc))))); + EXPECT_THAT(sender_.last_sent_packet(), + Optional(AllOf( + Field(&RtpRtcpModule::SentPacket::packet_id, + Eq(sender_.last_packet() + .GetExtension())), + Field(&RtpRtcpModule::SentPacket::capture_time, Eq(now)), + Field(&RtpRtcpModule::SentPacket::ssrc, Eq(kSenderSsrc))))); } TEST_F(RtpRtcpImpl2Test, GeneratesFlexfec) { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc index 4875ee96e020..079b7c7aec1e 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc @@ -232,9 +232,9 @@ class RtpRtcpImplTest : public ::testing::Test { const uint8_t payload[100] = {0}; EXPECT_TRUE(module->impl_->OnSendingRtpFrame(0, 0, kPayloadType, true)); - EXPECT_TRUE(sender->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8, - 0, 0, payload, sizeof(payload), - rtp_video_header, 0, {})); + EXPECT_TRUE(sender->SendVideo( + kPayloadType, VideoCodecType::kVideoCodecVP8, 0, clock_.CurrentTime(), + payload, sizeof(payload), rtp_video_header, TimeDelta::Zero(), {})); } void IncomingRtcpNack(const RtpRtcpModule* module, uint16_t sequence_number) { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h index b0a85d00c68b..2614461b2e0c 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -20,6 +20,7 @@ #include "api/field_trials_view.h" #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" +#include "api/units/time_delta.h" #include "api/video/video_bitrate_allocation.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" @@ -378,7 +379,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { virtual absl::optional LastRtt() const = 0; // Returns the estimated RTT, with fallback to a default value. - virtual int64_t ExpectedRetransmissionTimeMs() const = 0; + virtual TimeDelta ExpectedRetransmissionTime() const = 0; // Forces a send of a RTCP packet. Periodic SR and RR are triggered via the // process function. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc index 13cb30ee7f54..bdb79fe8ccc9 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc @@ -514,15 +514,8 @@ size_t RTPSender::ExpectedPerPacketOverhead() const { std::unique_ptr RTPSender::AllocatePacket() const { MutexLock lock(&send_mutex_); - // TODO(danilchap): Find better motivator and value for extra capacity. - // RtpPacketizer might slightly miscalulate needed size, - // SRTP may benefit from extra space in the buffer and do encryption in place - // saving reallocation. - // While sending slightly oversized packet increase chance of dropped packet, - // it is better than crash on drop packet without trying to send it. - static constexpr int kExtraCapacity = 16; - auto packet = std::make_unique( - &rtp_header_extension_map_, max_packet_size_ + kExtraCapacity); + auto packet = std::make_unique(&rtp_header_extension_map_, + max_packet_size_); packet->SetSsrc(ssrc_); packet->SetCsrcs(csrcs_); @@ -729,8 +722,7 @@ std::unique_ptr RTPSender::BuildRtxPacket( uint8_t* rtx_payload = rtx_packet->AllocatePayload(packet.payload_size() + kRtxHeaderSize); - if (rtx_payload == nullptr) - return nullptr; + RTC_CHECK(rtx_payload); // Add OSN (original sequence number). ByteWriter::WriteBigEndian(rtx_payload, packet.SequenceNumber()); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.h index 1b053b48023d..875d289a08e3 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.h @@ -30,7 +30,6 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/random.h" -#include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc index 47b7b7910bd6..a0f1af5243a7 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc @@ -288,8 +288,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, } uint8_t* payload = packet->AllocatePayload(payload_size); - if (!payload) // Too large payload buffer. - return false; + RTC_CHECK(payload); memcpy(payload, payload_data, payload_size); { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc index 0adb4362865e..f5509009bb54 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -24,10 +24,9 @@ namespace webrtc { namespace { constexpr uint32_t kTimestampTicksPerMs = 90; constexpr TimeDelta kSendSideDelayWindow = TimeDelta::Seconds(1); -constexpr int kBitrateStatisticsWindowMs = 1000; +constexpr TimeDelta kBitrateStatisticsWindow = TimeDelta::Seconds(1); constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; -constexpr TimeDelta kUpdateInterval = - TimeDelta::Millis(kBitrateStatisticsWindowMs); +constexpr TimeDelta kUpdateInterval = kBitrateStatisticsWindow; } // namespace RtpSenderEgress::NonPacedPacketSender::NonPacedPacketSender( @@ -106,8 +105,7 @@ RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, timestamp_offset_(0), max_delay_it_(send_delays_.end()), sum_delays_(TimeDelta::Zero()), - send_rates_(kNumMediaTypes, - {kBitrateStatisticsWindowMs, RateStatistics::kBpsScale}), + send_rates_(kNumMediaTypes, BitrateTracker(kBitrateStatisticsWindow)), rtp_sequence_number_map_(need_rtp_packet_infos_ ? std::make_unique( kRtpSequenceNumberMapMaxEntries) @@ -304,8 +302,7 @@ RtpSendRates RtpSenderEgress::GetSendRates(Timestamp now) const { RtpSendRates current_rates; for (size_t i = 0; i < kNumMediaTypes; ++i) { RtpPacketMediaType type = static_cast(i); - current_rates[type] = - DataRate::BitsPerSec(send_rates_[i].Rate(now.ms()).value_or(0)); + current_rates[type] = send_rates_[i].Rate(now).value_or(DataRate::Zero()); } return current_rates; } @@ -516,7 +513,7 @@ void RtpSenderEgress::UpdateOnSendPacket(int packet_id, return; } - send_packet_observer_->OnSendPacket(packet_id, capture_time.ms(), ssrc); + send_packet_observer_->OnSendPacket(packet_id, capture_time, ssrc); } bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet, @@ -525,7 +522,7 @@ bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet, RTC_DCHECK_RUN_ON(worker_queue_); int bytes_sent = -1; if (transport_) { - bytes_sent = transport_->SendRtp(packet.data(), packet.size(), options) + bytes_sent = transport_->SendRtp(packet, options) ? static_cast(packet.size()) : -1; if (event_log_ && bytes_sent > 0) { @@ -564,7 +561,7 @@ void RtpSenderEgress::UpdateRtpStats(Timestamp now, } counters->transmitted.Add(counter); - send_rates_[static_cast(packet_type)].Update(packet_size, now.ms()); + send_rates_[static_cast(packet_type)].Update(packet_size, now); if (bitrate_callback_) { send_rates = GetSendRates(now); } diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h index 7bb8f80efd4f..3e5b2b21c385 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -32,7 +32,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" -#include "rtc_base/rate_statistics.h" +#include "rtc_base/bitrate_tracker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/repeating_task.h" @@ -170,7 +170,7 @@ class RtpSenderEgress { StreamDataCounters rtp_stats_ RTC_GUARDED_BY(worker_queue_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(worker_queue_); // One element per value in RtpPacketMediaType, with index matching value. - std::vector send_rates_ RTC_GUARDED_BY(worker_queue_); + std::vector send_rates_ RTC_GUARDED_BY(worker_queue_); absl::optional> pending_fec_params_ RTC_GUARDED_BY(worker_queue_); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc index 9389dc77cf60..a793d5b011da 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc @@ -57,7 +57,7 @@ enum : int { class MockSendPacketObserver : public SendPacketObserver { public: - MOCK_METHOD(void, OnSendPacket, (uint16_t, int64_t, uint32_t), (override)); + MOCK_METHOD(void, OnSendPacket, (uint16_t, Timestamp, uint32_t), (override)); }; class MockTransportFeedbackObserver : public TransportFeedbackObserver { @@ -419,9 +419,9 @@ TEST_F(RtpSenderEgressTest, OnSendPacketUpdated) { TransportSequenceNumber::Uri()); const uint16_t kTransportSequenceNumber = 1; - EXPECT_CALL(send_packet_observer_, - OnSendPacket(kTransportSequenceNumber, - clock_->TimeInMilliseconds(), kSsrc)); + EXPECT_CALL( + send_packet_observer_, + OnSendPacket(kTransportSequenceNumber, clock_->CurrentTime(), kSsrc)); std::unique_ptr packet = BuildRtpPacket(); packet->SetExtension(kTransportSequenceNumber); sender->SendPacket(std::move(packet), PacedPacketInfo()); @@ -854,7 +854,7 @@ TEST_F(RtpSenderEgressTest, SendPacketUpdatesStats) { header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); - const int64_t capture_time_ms = clock_->TimeInMilliseconds(); + const Timestamp capture_time = clock_->CurrentTime(); std::unique_ptr video_packet = BuildRtpPacket(); video_packet->set_packet_type(RtpPacketMediaType::kVideo); @@ -882,7 +882,7 @@ TEST_F(RtpSenderEgressTest, SendPacketUpdatesStats) { EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(kDiffMs, kDiffMs, kFlexFecSsrc)); - EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time_ms, kSsrc)); + EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time, kSsrc)); sender->SendPacket(std::move(video_packet), PacedPacketInfo()); @@ -891,7 +891,7 @@ TEST_F(RtpSenderEgressTest, SendPacketUpdatesStats) { sender->SendPacket(std::move(rtx_packet), PacedPacketInfo()); EXPECT_CALL(send_packet_observer_, - OnSendPacket(3, capture_time_ms, kFlexFecSsrc)); + OnSendPacket(3, capture_time, kFlexFecSsrc)); sender->SendPacket(std::move(fec_packet), PacedPacketInfo()); time_controller_.AdvanceTime(TimeDelta::Zero()); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc index 61dde0fec6fd..8d57f39e98f7 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -66,8 +66,8 @@ const uint32_t kRtxSsrc = 12345; const uint32_t kFlexFecSsrc = 45678; const uint64_t kStartTime = 123456789; const uint8_t kPayloadData[] = {47, 11, 32, 93, 89}; -const int64_t kDefaultExpectedRetransmissionTimeMs = 125; -const uint32_t kTimestampTicksPerMs = 90; // 90kHz clock. +constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125); +constexpr uint32_t kTimestampTicksPerMs = 90; // 90kHz clock. constexpr absl::string_view kMid = "mid"; constexpr absl::string_view kRid = "f"; constexpr bool kMarkerBit = true; @@ -1340,12 +1340,12 @@ TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) { .Times(AtLeast(1)); RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; - int64_t capture_time_ms = clock_->TimeInMilliseconds(); + Timestamp capture_time = clock_->CurrentTime(); EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, - capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms, + capture_time.ms() * kCaptureTimeMsToRtpTimestamp, capture_time, kPayloadData, sizeof(kPayloadData), video_header, - kDefaultExpectedRetransmissionTimeMs, {})); + kDefaultExpectedRetransmissionTime, {})); time_controller_.AdvanceTime(TimeDelta::Millis(33)); } @@ -1357,12 +1357,12 @@ TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) { .Times(AtLeast(1)); RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameDelta; - int64_t capture_time_ms = clock_->TimeInMilliseconds(); + Timestamp capture_time = clock_->CurrentTime(); EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, - capture_time_ms * kCaptureTimeMsToRtpTimestamp, capture_time_ms, + capture_time.ms() * kCaptureTimeMsToRtpTimestamp, capture_time, kPayloadData, sizeof(kPayloadData), video_header, - kDefaultExpectedRetransmissionTimeMs, {})); + kDefaultExpectedRetransmissionTime, {})); time_controller_.AdvanceTime(TimeDelta::Millis(33)); } diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index 1cbf0b8dc326..6542235e3911 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -24,6 +24,9 @@ #include "absl/strings/match.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" @@ -45,7 +48,8 @@ namespace webrtc { namespace { constexpr size_t kRedForFecHeaderLength = 1; -constexpr int64_t kMaxUnretransmittableFrameIntervalMs = 33 * 4; +constexpr TimeDelta kMaxUnretransmittableFrameInterval = + TimeDelta::Millis(33 * 4); void BuildRedPayload(const RtpPacketToSend& media_packet, RtpPacketToSend* red_packet) { @@ -94,20 +98,6 @@ bool IsBaseLayer(const RTPVideoHeader& video_header) { return true; } -[[maybe_unused]] const char* FrameTypeToString(VideoFrameType frame_type) { - switch (frame_type) { - case VideoFrameType::kEmptyFrame: - return "empty"; - case VideoFrameType::kVideoFrameKey: - return "video_key"; - case VideoFrameType::kVideoFrameDelta: - return "video_delta"; - default: - RTC_DCHECK_NOTREACHED(); - return ""; - } -} - bool IsNoopDelay(const VideoPlayoutDelay& delay) { return delay.min_ms == -1 && delay.max_ms == -1; } @@ -155,7 +145,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) red_payload_type_(config.red_payload_type), fec_type_(config.fec_type), fec_overhead_bytes_(config.fec_overhead_bytes), - post_encode_overhead_bitrate_(1000, RateStatistics::kBpsScale), + post_encode_overhead_bitrate_(/*max_window_size=*/TimeDelta::Seconds(1)), frame_encryptor_(config.frame_encryptor), require_frame_encryption_(config.require_frame_encryption), generic_descriptor_auth_experiment_(!absl::StartsWith( @@ -196,8 +186,7 @@ void RTPSenderVideo::LogAndSendToNetwork( // unpacketized. if (packetized_payload_size >= encoder_output_size) { post_encode_overhead_bitrate_.Update( - packetized_payload_size - encoder_output_size, - clock_->TimeInMilliseconds()); + packetized_payload_size - encoder_output_size, clock_->CurrentTime()); } } @@ -468,32 +457,18 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, } } -bool RTPSenderVideo::SendVideo( - int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - int64_t capture_time_ms, - rtc::ArrayView payload, - RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms) { - return SendVideo(payload_type, codec_type, rtp_timestamp, capture_time_ms, - payload, payload.size(), video_header, - expected_retransmission_time_ms, - /*csrcs=*/{}); -} - -bool RTPSenderVideo::SendVideo( - int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - int64_t capture_time_ms, - rtc::ArrayView payload, - size_t encoder_output_size, - RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms, - std::vector csrcs) { - TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type", - FrameTypeToString(video_header.frame_type)); +bool RTPSenderVideo::SendVideo(int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + Timestamp capture_time, + rtc::ArrayView payload, + size_t encoder_output_size, + RTPVideoHeader video_header, + TimeDelta expected_retransmission_time, + std::vector csrcs) { + TRACE_EVENT_ASYNC_STEP1( + "webrtc", "Video", capture_time.ms_or(0), "Send", "type", + std::string(VideoFrameTypeToString(video_header.frame_type))); RTC_CHECK_RUNS_SERIALIZED(&send_checker_); if (video_header.frame_type == VideoFrameType::kEmptyFrame) @@ -513,11 +488,11 @@ bool RTPSenderVideo::SendVideo( } const uint8_t temporal_id = GetTemporalId(video_header); // TODO(bugs.webrtc.org/10714): retransmission_settings_ should generally be - // replaced by expected_retransmission_time_ms.has_value(). + // replaced by expected_retransmission_time.IsFinite(). const bool allow_retransmission = - expected_retransmission_time_ms.has_value() && + expected_retransmission_time.IsFinite() && AllowRetransmission(temporal_id, retransmission_settings, - *expected_retransmission_time_ms); + expected_retransmission_time); MaybeUpdateCurrentPlayoutDelay(video_header); if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { @@ -553,11 +528,6 @@ bool RTPSenderVideo::SendVideo( packet_capacity -= rtp_sender_->RtxPacketOverhead(); } - absl::optional capture_time; - if (capture_time_ms > 0) { - capture_time = Timestamp::Millis(capture_time_ms); - } - rtp_sender_->SetCsrcs(std::move(csrcs)); std::unique_ptr single_packet = @@ -565,16 +535,16 @@ bool RTPSenderVideo::SendVideo( RTC_DCHECK_LE(packet_capacity, single_packet->capacity()); single_packet->SetPayloadType(payload_type); single_packet->SetTimestamp(rtp_timestamp); - if (capture_time) - single_packet->set_capture_time(*capture_time); + if (capture_time.IsFinite()) + single_packet->set_capture_time(capture_time); // Construct the absolute capture time extension if not provided. if (!video_header.absolute_capture_time.has_value() && - capture_time.has_value()) { + capture_time.IsFinite()) { video_header.absolute_capture_time.emplace(); video_header.absolute_capture_time->absolute_capture_timestamp = Int64MsToUQ32x32( - clock_->ConvertTimestampToNtpTime(*capture_time).ToMs()); + clock_->ConvertTimestampToNtpTime(capture_time).ToMs()); video_header.absolute_capture_time->estimated_capture_clock_offset = 0; } @@ -777,49 +747,46 @@ bool RTPSenderVideo::SendVideo( send_allocation_ = SendVideoLayersAllocation::kDontSend; } - TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp", + TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time.ms_or(0), "timestamp", rtp_timestamp); return true; } -bool RTPSenderVideo::SendEncodedImage( - int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - const EncodedImage& encoded_image, - RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms) { +bool RTPSenderVideo::SendEncodedImage(int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + const EncodedImage& encoded_image, + RTPVideoHeader video_header, + TimeDelta expected_retransmission_time) { if (frame_transformer_delegate_) { // The frame will be sent async once transformed. return frame_transformer_delegate_->TransformFrame( payload_type, codec_type, rtp_timestamp, encoded_image, video_header, - expected_retransmission_time_ms); + expected_retransmission_time); } return SendVideo(payload_type, codec_type, rtp_timestamp, - encoded_image.capture_time_ms_, encoded_image, + encoded_image.CaptureTime(), encoded_image, encoded_image.size(), video_header, - expected_retransmission_time_ms, rtp_sender_->Csrcs()); + expected_retransmission_time, rtp_sender_->Csrcs()); } DataRate RTPSenderVideo::PostEncodeOverhead() const { MutexLock lock(&stats_mutex_); - return DataRate::BitsPerSec( - post_encode_overhead_bitrate_.Rate(clock_->TimeInMilliseconds()) - .value_or(0)); + return post_encode_overhead_bitrate_.Rate(clock_->CurrentTime()) + .value_or(DataRate::Zero()); } bool RTPSenderVideo::AllowRetransmission( uint8_t temporal_id, int32_t retransmission_settings, - int64_t expected_retransmission_time_ms) { + TimeDelta expected_retransmission_time) { if (retransmission_settings == kRetransmitOff) return false; MutexLock lock(&stats_mutex_); // Media packet storage. if ((retransmission_settings & kConditionallyRetransmitHigherLayers) && - UpdateConditionalRetransmit(temporal_id, - expected_retransmission_time_ms)) { + UpdateConditionalRetransmit(temporal_id, expected_retransmission_time)) { retransmission_settings |= kRetransmitHigherLayers; } @@ -852,39 +819,37 @@ uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) { bool RTPSenderVideo::UpdateConditionalRetransmit( uint8_t temporal_id, - int64_t expected_retransmission_time_ms) { - int64_t now_ms = clock_->TimeInMilliseconds(); + TimeDelta expected_retransmission_time) { + Timestamp now = clock_->CurrentTime(); // Update stats for any temporal layer. TemporalLayerStats* current_layer_stats = &frame_stats_by_temporal_layer_[temporal_id]; - current_layer_stats->frame_rate_fp1000s.Update(1, now_ms); - int64_t tl_frame_interval = now_ms - current_layer_stats->last_frame_time_ms; - current_layer_stats->last_frame_time_ms = now_ms; + current_layer_stats->frame_rate.Update(now); + TimeDelta tl_frame_interval = now - current_layer_stats->last_frame_time; + current_layer_stats->last_frame_time = now; // Conditional retransmit only applies to upper layers. if (temporal_id != kNoTemporalIdx && temporal_id > 0) { - if (tl_frame_interval >= kMaxUnretransmittableFrameIntervalMs) { + if (tl_frame_interval >= kMaxUnretransmittableFrameInterval) { // Too long since a retransmittable frame in this layer, enable NACK // protection. return true; } else { // Estimate when the next frame of any lower layer will be sent. - const int64_t kUndefined = std::numeric_limits::max(); - int64_t expected_next_frame_time = kUndefined; + Timestamp expected_next_frame_time = Timestamp::PlusInfinity(); for (int i = temporal_id - 1; i >= 0; --i) { TemporalLayerStats* stats = &frame_stats_by_temporal_layer_[i]; - absl::optional rate = stats->frame_rate_fp1000s.Rate(now_ms); - if (rate) { - int64_t tl_next = stats->last_frame_time_ms + 1000000 / *rate; - if (tl_next - now_ms > -expected_retransmission_time_ms && + absl::optional rate = stats->frame_rate.Rate(now); + if (rate > Frequency::Zero()) { + Timestamp tl_next = stats->last_frame_time + 1 / *rate; + if (tl_next - now > -expected_retransmission_time && tl_next < expected_next_frame_time) { expected_next_frame_time = tl_next; } } } - if (expected_next_frame_time == kUndefined || - expected_next_frame_time - now_ms > expected_retransmission_time_ms) { + if (expected_next_frame_time - now > expected_retransmission_time) { // The next frame in a lower layer is expected at a later time (or // unable to tell due to lack of data) than a retransmission is // estimated to be able to arrive, so allow this packet to be nacked. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.h index 4da92d10ac55..acf4d94e2992 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video.h @@ -24,6 +24,8 @@ #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_codec_type.h" #include "api/video/video_frame_type.h" #include "api/video/video_layers_allocation.h" @@ -35,9 +37,10 @@ #include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "rtc_base/bitrate_tracker.h" +#include "rtc_base/frequency_tracker.h" #include "rtc_base/one_time_event.h" #include "rtc_base/race_checker.h" -#include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -61,7 +64,7 @@ enum RetransmissionMode : uint8_t { class RTPSenderVideo : public RTPVideoFrameSenderInterface { public: - static constexpr int64_t kTLRateWindowSizeMs = 2500; + static constexpr TimeDelta kTLRateWindowSize = TimeDelta::Millis(2'500); struct Config { Config() = default; @@ -89,35 +92,27 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { virtual ~RTPSenderVideo(); - // expected_retransmission_time_ms.has_value() -> retransmission allowed. - // `capture_time_ms` and `clock::CurrentTime` should be using the same epoch. + // `capture_time` and `clock::CurrentTime` should be using the same epoch. + // `expected_retransmission_time.IsFinite()` -> retransmission allowed. + // `encoder_output_size` is the size of the video frame as it came out of the + // video encoder, excluding any additional overhead. // Calls to this method are assumed to be externally serialized. bool SendVideo(int payload_type, absl::optional codec_type, uint32_t rtp_timestamp, - int64_t capture_time_ms, - rtc::ArrayView payload, - RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms); - // `encoder_output_size` is the size of the video frame as it came out of the - // video encoder, excluding any additional overhead. - bool SendVideo(int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - int64_t capture_time_ms, + Timestamp capture_time, rtc::ArrayView payload, size_t encoder_output_size, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms, + TimeDelta expected_retransmission_time, std::vector csrcs) override; - bool SendEncodedImage( - int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - const EncodedImage& encoded_image, - RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms); + bool SendEncodedImage(int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + const EncodedImage& encoded_image, + RTPVideoHeader video_header, + TimeDelta expected_retransmission_time); // Configures video structures produced by encoder to send using the // dependency descriptor rtp header extension. Next call to SendVideo should @@ -157,18 +152,12 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { static uint8_t GetTemporalId(const RTPVideoHeader& header); bool AllowRetransmission(uint8_t temporal_id, int32_t retransmission_settings, - int64_t expected_retransmission_time_ms); + TimeDelta expected_retransmission_time); private: struct TemporalLayerStats { - TemporalLayerStats() - : frame_rate_fp1000s(kTLRateWindowSizeMs, 1000 * 1000), - last_frame_time_ms(0) {} - // Frame rate, in frames per 1000 seconds. This essentially turns the fps - // value into a fixed point value with three decimals. Improves precision at - // low frame rates. - RateStatistics frame_rate_fp1000s; - int64_t last_frame_time_ms; + FrequencyTracker frame_rate{kTLRateWindowSize}; + Timestamp last_frame_time = Timestamp::Zero(); }; enum class SendVideoLayersAllocation { @@ -196,7 +185,7 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { bool red_enabled() const { return red_payload_type_.has_value(); } bool UpdateConditionalRetransmit(uint8_t temporal_id, - int64_t expected_retransmission_time_ms) + TimeDelta expected_retransmission_time) RTC_EXCLUSIVE_LOCKS_REQUIRED(stats_mutex_); void MaybeUpdateCurrentPlayoutDelay(const RTPVideoHeader& header) @@ -238,7 +227,7 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { const size_t fec_overhead_bytes_; // Per packet max FEC overhead. mutable Mutex stats_mutex_; - RateStatistics post_encode_overhead_bitrate_ RTC_GUARDED_BY(stats_mutex_); + BitrateTracker post_encode_overhead_bitrate_ RTC_GUARDED_BY(stats_mutex_); std::map frame_stats_by_temporal_layer_ RTC_GUARDED_BY(stats_mutex_); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index fb133ff73a56..ded31d22834e 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -24,16 +24,15 @@ namespace { class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { public: - TransformableVideoSenderFrame( - const EncodedImage& encoded_image, - const RTPVideoHeader& video_header, - int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - absl::optional expected_retransmission_time_ms, - uint32_t ssrc, - std::vector csrcs, - const std::string& rid) + TransformableVideoSenderFrame(const EncodedImage& encoded_image, + const RTPVideoHeader& video_header, + int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + TimeDelta expected_retransmission_time, + uint32_t ssrc, + std::vector csrcs, + const std::string& rid) : encoded_data_(encoded_image.GetEncodedData()), pre_transform_payload_size_(encoded_image.size()), header_(video_header), @@ -41,9 +40,9 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { payload_type_(payload_type), codec_type_(codec_type), timestamp_(rtp_timestamp), - capture_time_ms_(encoded_image.capture_time_ms_), + capture_time_(encoded_image.CaptureTime()), capture_time_identifier_(encoded_image.CaptureTimeIdentifier()), - expected_retransmission_time_ms_(expected_retransmission_time_ms), + expected_retransmission_time_(expected_retransmission_time), ssrc_(ssrc), csrcs_(csrcs), rid_(rid) { @@ -67,6 +66,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { } uint32_t GetTimestamp() const override { return timestamp_; } + void SetRTPTimestamp(uint32_t timestamp) override { timestamp_ = timestamp; } + uint32_t GetSsrc() const override { return ssrc_; } bool IsKeyFrame() const override { @@ -89,13 +90,13 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { const RTPVideoHeader& GetHeader() const { return header_; } uint8_t GetPayloadType() const override { return payload_type_; } absl::optional GetCodecType() const { return codec_type_; } - int64_t GetCaptureTimeMs() const { return capture_time_ms_; } + Timestamp GetCaptureTime() const { return capture_time_; } absl::optional GetCaptureTimeIdentifier() const override { return capture_time_identifier_; } - const absl::optional& GetExpectedRetransmissionTimeMs() const { - return expected_retransmission_time_ms_; + TimeDelta GetExpectedRetransmissionTime() const { + return expected_retransmission_time_; } Direction GetDirection() const override { return Direction::kSender; } @@ -109,10 +110,10 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { const VideoFrameType frame_type_; const uint8_t payload_type_; const absl::optional codec_type_ = absl::nullopt; - const uint32_t timestamp_; - const int64_t capture_time_ms_; + uint32_t timestamp_; + const Timestamp capture_time_; const absl::optional capture_time_identifier_; - const absl::optional expected_retransmission_time_ms_; + const TimeDelta expected_retransmission_time_; uint32_t ssrc_; std::vector csrcs_; @@ -146,10 +147,10 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( uint32_t rtp_timestamp, const EncodedImage& encoded_image, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms) { + TimeDelta expected_retransmission_time) { frame_transformer_->Transform(std::make_unique( encoded_image, video_header, payload_type, codec_type, rtp_timestamp, - expected_retransmission_time_ms, ssrc_, csrcs_, rid_)); + expected_retransmission_time, ssrc_, csrcs_, rid_)); return true; } @@ -178,16 +179,15 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo( TransformableFrameInterface::Direction::kSender) { auto* transformed_video_frame = static_cast(transformed_frame.get()); - sender_->SendVideo( - transformed_video_frame->GetPayloadType(), - transformed_video_frame->GetCodecType(), - transformed_video_frame->GetTimestamp(), - transformed_video_frame->GetCaptureTimeMs(), - transformed_video_frame->GetData(), - transformed_video_frame->GetPreTransformPayloadSize(), - transformed_video_frame->GetHeader(), - transformed_video_frame->GetExpectedRetransmissionTimeMs(), - transformed_video_frame->Metadata().GetCsrcs()); + sender_->SendVideo(transformed_video_frame->GetPayloadType(), + transformed_video_frame->GetCodecType(), + transformed_video_frame->GetTimestamp(), + transformed_video_frame->GetCaptureTime(), + transformed_video_frame->GetData(), + transformed_video_frame->GetPreTransformPayloadSize(), + transformed_video_frame->GetHeader(), + transformed_video_frame->GetExpectedRetransmissionTime(), + transformed_video_frame->Metadata().GetCsrcs()); } else { auto* transformed_video_frame = static_cast(transformed_frame.get()); @@ -195,10 +195,11 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo( sender_->SendVideo( transformed_video_frame->GetPayloadType(), metadata.GetCodec(), transformed_video_frame->GetTimestamp(), - /*capture_time_ms=*/0, transformed_video_frame->GetData(), + /*capture_time=*/Timestamp::MinusInfinity(), + transformed_video_frame->GetData(), transformed_video_frame->GetData().size(), RTPVideoHeader::FromMetadata(metadata), - /*expected_retransmission_time_ms_=*/absl::nullopt, + /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), metadata.GetCsrcs()); } } @@ -250,7 +251,7 @@ std::unique_ptr CloneSenderVideoFrame( return std::make_unique( encoded_image, new_header, original->GetPayloadType(), new_header.codec, original->GetTimestamp(), - absl::nullopt, // expected_retransmission_time_ms + /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), original->GetSsrc(), metadata.GetCsrcs(), original->GetRid()); } diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h index a5bf65d073cf..7547785ea057 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -19,6 +19,8 @@ #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_layers_allocation.h" #include "rtc_base/synchronization/mutex.h" @@ -28,16 +30,15 @@ namespace webrtc { // have been applied. class RTPVideoFrameSenderInterface { public: - virtual bool SendVideo( - int payload_type, - absl::optional codec_type, - uint32_t rtp_timestamp, - int64_t capture_time_ms, - rtc::ArrayView payload, - size_t encoder_output_size, - RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms, - std::vector csrcs) = 0; + virtual bool SendVideo(int payload_type, + absl::optional codec_type, + uint32_t rtp_timestamp, + Timestamp capture_time, + rtc::ArrayView payload, + size_t encoder_output_size, + RTPVideoHeader video_header, + TimeDelta expected_retransmission_time, + std::vector csrcs) = 0; virtual void SetVideoStructureAfterTransformation( const FrameDependencyStructure* video_structure) = 0; @@ -69,7 +70,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { uint32_t rtp_timestamp, const EncodedImage& encoded_image, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms); + TimeDelta expected_retransmission_time); // Implements TransformedFrameCallback. Can be called on any thread. Posts // the transformed frame to be sent on the `encoder_queue_`. diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc index 51de035ea5f5..03fc922e3aaf 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc @@ -35,11 +35,11 @@ class MockRTPVideoFrameSenderInterface : public RTPVideoFrameSenderInterface { (int payload_type, absl::optional codec_type, uint32_t rtp_timestamp, - int64_t capture_time_ms, + Timestamp capture_time, rtc::ArrayView payload, size_t encoder_output_size, RTPVideoHeader video_header, - absl::optional expected_retransmission_time_ms, + TimeDelta expected_retransmission_time, std::vector csrcs), (override)); @@ -77,7 +77,7 @@ class RtpSenderVideoFrameTransformerDelegateTest : public ::testing::Test { delegate->TransformFrame( /*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2, encoded_image, RTPVideoHeader(), - /*expected_retransmission_time_ms=*/absl::nullopt); + /*expected_retransmission_time=*/TimeDelta::PlusInfinity()); return frame; } @@ -120,7 +120,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, delegate->TransformFrame( /*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2, encoded_image, RTPVideoHeader(), - /*expected_retransmission_time_ms=*/absl::nullopt); + /*expected_retransmission_time=*/TimeDelta::PlusInfinity()); } TEST_F(RtpSenderVideoFrameTransformerDelegateTest, @@ -255,11 +255,12 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, ASSERT_TRUE(callback); rtc::Event event; - EXPECT_CALL(test_sender_, - SendVideo(payload_type, absl::make_optional(kVideoCodecVP8), - timestamp, /*capture_time_ms=*/0, buffer, _, _, - /*expected_retransmission_time_ms_=*/ - (absl::optional)absl::nullopt, frame_csrcs)) + EXPECT_CALL( + test_sender_, + SendVideo(payload_type, absl::make_optional(kVideoCodecVP8), timestamp, + /*capture_time=*/Timestamp::MinusInfinity(), buffer, _, _, + /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), + frame_csrcs)) .WillOnce(WithoutArgs([&] { event.Set(); return true; @@ -270,5 +271,23 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, event.Wait(TimeDelta::Seconds(1)); } +TEST_F(RtpSenderVideoFrameTransformerDelegateTest, SettingRTPTimestamp) { + auto delegate = rtc::make_ref_counted( + &test_sender_, frame_transformer_, + /*ssrc=*/1111, /*csrcs=*/std::vector(), + time_controller_.CreateTaskQueueFactory().get()); + + std::unique_ptr frame = + GetTransformableFrame(delegate); + ASSERT_TRUE(frame); + auto& video_frame = static_cast(*frame); + + uint32_t rtp_timestamp = 12345; + ASSERT_FALSE(video_frame.GetTimestamp() == rtp_timestamp); + + video_frame.SetRTPTimestamp(rtp_timestamp); + EXPECT_EQ(video_frame.GetTimestamp(), rtp_timestamp); +} + } // namespace } // namespace webrtc diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc index 932d87c579d1..7ce96e0a1234 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc @@ -87,7 +87,7 @@ constexpr uint32_t kSsrc = 725242; constexpr uint32_t kRtxSsrc = 912364; constexpr int kMaxPacketLength = 1500; constexpr Timestamp kStartTime = Timestamp::Millis(123456789); -constexpr int64_t kDefaultExpectedRetransmissionTimeMs = 125; +constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125); class LoopbackTransportTest : public webrtc::Transport { public: @@ -148,10 +148,10 @@ class TestRtpSenderVideo : public RTPSenderVideo { bool AllowRetransmission(const RTPVideoHeader& header, int32_t retransmission_settings, - int64_t expected_retransmission_time_ms) { + TimeDelta expected_retransmission_time) { return RTPSenderVideo::AllowRetransmission(GetTemporalId(header), retransmission_settings, - expected_retransmission_time_ms); + expected_retransmission_time); } }; @@ -201,9 +201,9 @@ TEST_F(RtpSenderVideoTest, KeyFrameHasCVO) { RTPVideoHeader hdr; hdr.rotation = kVideoRotation_0; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoRotation rotation; EXPECT_TRUE( @@ -219,7 +219,7 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { rtp_module_->RegisterRtpHeaderExtension(VideoTimingExtension::Uri(), kVideoTimingExtensionId); - const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds(); + const Timestamp kCaptureTimestamp = fake_clock_.CurrentTime(); RTPVideoHeader hdr; hdr.video_timing.flags = VideoSendTiming::kTriggeredByTimer; @@ -230,7 +230,7 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp, kFrame, sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + kDefaultExpectedRetransmissionTime, {}); VideoSendTiming timing; EXPECT_TRUE(transport_.last_sent_packet().GetExtension( &timing)); @@ -248,14 +248,14 @@ TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { hdr.rotation = kVideoRotation_90; hdr.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, 0, kFrame, sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {})); + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); hdr.rotation = kVideoRotation_0; hdr.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp + 1, 0, kFrame, sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {})); + kPayload, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); VideoRotation rotation; EXPECT_TRUE( @@ -272,13 +272,13 @@ TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { hdr.rotation = kVideoRotation_90; hdr.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, 0, kFrame, sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {})); + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); hdr.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp + 1, 0, kFrame, sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {})); + kPayload, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); VideoRotation rotation; EXPECT_TRUE( @@ -312,14 +312,14 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesGeneric) { header.codec = kVideoCodecGeneric; EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kConditionallyRetransmitHigherLayers, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); } TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) { @@ -329,14 +329,14 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) { header.codec = kVideoCodecH264; EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kConditionallyRetransmitHigherLayers, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); } TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { @@ -346,20 +346,20 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { vp8_header.temporalIdx = 0; EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers | kRetransmitBaseLayer, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( header, kConditionallyRetransmitHigherLayers, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); } TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { @@ -371,14 +371,14 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { vp8_header.temporalIdx = tid; EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers | kRetransmitBaseLayer, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); } } @@ -391,20 +391,20 @@ TEST_F(RtpSenderVideoTest, RetransmissionTypesVP9) { vp9_header.temporal_idx = tid; EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( - header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs)); + header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( header, kRetransmitHigherLayers | kRetransmitBaseLayer, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); } } TEST_F(RtpSenderVideoTest, ConditionalRetransmit) { - const int64_t kFrameIntervalMs = 33; - const int64_t kRttMs = (kFrameIntervalMs * 3) / 2; + constexpr TimeDelta kFrameInterval = TimeDelta::Millis(33); + constexpr TimeDelta kRtt = (kFrameInterval * 3) / 2; const uint8_t kSettings = kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers; @@ -414,14 +414,13 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmit) { // Fill averaging window to prevent rounding errors. constexpr int kNumRepetitions = - (RTPSenderVideo::kTLRateWindowSizeMs + (kFrameIntervalMs / 2)) / - kFrameIntervalMs; + RTPSenderVideo::kTLRateWindowSize / kFrameInterval; constexpr int kPattern[] = {0, 2, 1, 2}; auto& vp8_header = header.video_type_header.emplace(); for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) { vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)]; - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt); + fake_clock_.AdvanceTime(kFrameInterval); } // Since we're at the start of the pattern, the next expected frame in TL0 is @@ -429,40 +428,34 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmit) { // acknowledging that it did not arrive, which means this frame and the next // will not be retransmitted. vp8_header.temporalIdx = 1; - EXPECT_FALSE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); - EXPECT_FALSE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); + fake_clock_.AdvanceTime(kFrameInterval); + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); + fake_clock_.AdvanceTime(kFrameInterval); // The TL0 frame did not arrive. So allow retransmission. - EXPECT_TRUE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); + fake_clock_.AdvanceTime(kFrameInterval); // Insert a frame for TL2. We just had frame in TL1, so the next one there is // in three frames away. TL0 is still too far in the past. So, allow // retransmission. vp8_header.temporalIdx = 2; - EXPECT_TRUE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); + fake_clock_.AdvanceTime(kFrameInterval); // Another TL2, next in TL1 is two frames away. Allow again. - EXPECT_TRUE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); + fake_clock_.AdvanceTime(kFrameInterval); // Yet another TL2, next in TL1 is now only one frame away, so don't store // for retransmission. - EXPECT_FALSE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); + EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); } TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) { - const int64_t kFrameIntervalMs = 200; - const int64_t kRttMs = (kFrameIntervalMs * 3) / 2; + constexpr TimeDelta kFrameInterval = TimeDelta::Millis(200); + constexpr TimeDelta kRtt = (kFrameInterval * 3) / 2; const int32_t kSettings = kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers; @@ -472,15 +465,14 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) { // Fill averaging window to prevent rounding errors. constexpr int kNumRepetitions = - (RTPSenderVideo::kTLRateWindowSizeMs + (kFrameIntervalMs / 2)) / - kFrameIntervalMs; + RTPSenderVideo::kTLRateWindowSize / kFrameInterval; constexpr int kPattern[] = {0, 2, 2, 2}; auto& vp8_header = header.video_type_header.emplace(); for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) { vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)]; - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs); - fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt); + fake_clock_.AdvanceTime(kFrameInterval); } // Since we're at the start of the pattern, the next expected frame will be @@ -489,8 +481,7 @@ TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) { // layer, but that last frame in TL1 was a long time ago in absolute terms, // so allow retransmission anyway. vp8_header.temporalIdx = 1; - EXPECT_TRUE( - rtp_sender_video_->AllowRetransmission(header, kSettings, kRttMs)); + EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); } TEST_F(RtpSenderVideoTest, @@ -518,8 +509,10 @@ TEST_F(RtpSenderVideoTest, uint8_t kPayload[kMaxPacketSize] = {}; EXPECT_TRUE(rtp_sender_video_->SendVideo( kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0, - /*capture_time_ms=*/1'000, kPayload, sizeof(kPayload), header, - /*expected_retransmission_time_ms=*/absl::nullopt, /*csrcs=*/{})); + /*capture_time=*/Timestamp::Seconds(1), kPayload, sizeof(kPayload), + header, + /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), + /*csrcs=*/{})); ASSERT_THAT(transport_.sent_packets(), Not(IsEmpty())); // Ack media ssrc, but not rtx ssrc. rtcp::ReceiverReport rr; @@ -537,8 +530,8 @@ TEST_F(RtpSenderVideoTest, EXPECT_TRUE(rtp_sender_video_->SendVideo( kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0, - /*capture_time_ms=*/1'000, payload, frame_size, header, - /*expected_retransmission_time_ms=*/1'000, /*csrcs=*/{})); + /*capture_time=*/Timestamp::Seconds(1), payload, frame_size, header, + /*expected_retransmission_time=*/TimeDelta::Seconds(1), /*csrcs=*/{})); const RtpPacketReceived& media_packet = transport_.last_sent_packet(); EXPECT_EQ(media_packet.Ssrc(), kSsrc); @@ -576,9 +569,9 @@ TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { generic.decode_target_indications = {DecodeTargetIndication::kSwitch, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key; @@ -603,9 +596,9 @@ TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { generic.decode_target_indications = {DecodeTargetIndication::kNotPresent, DecodeTargetIndication::kRequired}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_EQ(transport_.packets_sent(), 2); DependencyDescriptor descriptor_delta; @@ -653,9 +646,9 @@ TEST_F(RtpSenderVideoTest, generic.decode_target_indications = {DecodeTargetIndication::kSwitch, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key; @@ -670,9 +663,9 @@ TEST_F(RtpSenderVideoTest, generic.decode_target_indications = {DecodeTargetIndication::kNotPresent, DecodeTargetIndication::kRequired}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_EQ(transport_.packets_sent(), 2); EXPECT_FALSE(transport_.last_sent_packet() @@ -700,9 +693,9 @@ TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { DecodeTargetIndication::kSwitch}; generic.chain_diffs = {2}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key; @@ -736,9 +729,9 @@ TEST_F(RtpSenderVideoTest, generic.active_decode_targets = 0b01; generic.chain_diffs = {1}; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key; @@ -775,9 +768,9 @@ TEST_F(RtpSenderVideoTest, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SetVideoStructure(&video_structure1); - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); // Parse 1st extension. ASSERT_EQ(transport_.packets_sent(), 1); DependencyDescriptor descriptor_key1; @@ -792,9 +785,9 @@ TEST_F(RtpSenderVideoTest, generic.decode_target_indications = {DecodeTargetIndication::kDiscardable, DecodeTargetIndication::kNotPresent}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 2); RtpPacket delta_packet = transport_.last_sent_packet(); @@ -805,9 +798,9 @@ TEST_F(RtpSenderVideoTest, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SetVideoStructure(&video_structure2); - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); // Parse the 2nd key frame. ASSERT_EQ(transport_.packets_sent(), 3); DependencyDescriptor descriptor_key2; @@ -860,9 +853,9 @@ TEST_F(RtpSenderVideoTest, EXPECT_CALL(*encryptor, Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _)); - rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, + fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), + hdr, kDefaultExpectedRetransmissionTime, {}); // Double check packet with the dependency descriptor is sent. ASSERT_EQ(transport_.packets_sent(), 1); EXPECT_TRUE(transport_.last_sent_packet() @@ -883,9 +876,9 @@ TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { generic.dependencies.push_back(kFrameId - 1); generic.dependencies.push_back(kFrameId - 500); hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); RtpGenericFrameDescriptor descriptor_wire; EXPECT_EQ(1, transport_.packets_sent()); @@ -919,8 +912,9 @@ void RtpSenderVideoTest:: generic.frame_id = kFrameId; hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo(kPayload, VideoCodecType::kVideoCodecVP8, - kTimestamp, 0, kFrame, sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, + kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); // Expect only minimal 1-byte vp8 descriptor was generated. @@ -956,9 +950,9 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; EXPECT_TRUE( @@ -967,9 +961,9 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { EXPECT_THAT(sent_allocation.active_spatial_layers, ElementsAre(layer)); // Next key frame also have the allocation. - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE( transport_.last_sent_packet() .GetExtension(&sent_allocation)); @@ -995,25 +989,25 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE(transport_.last_sent_packet() .HasExtension()); // No allocation sent on delta frame unless it has been updated. hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE(transport_.last_sent_packet() .HasExtension()); // Update the allocation. rtp_sender_video_->SetVideoLayersAllocation(allocation); - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; EXPECT_TRUE( @@ -1047,9 +1041,9 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1062,9 +1056,9 @@ TEST_F(RtpSenderVideoTest, allocation.active_spatial_layers.push_back(layer); rtp_sender_video_->SetVideoLayersAllocation(allocation); hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; EXPECT_TRUE( @@ -1096,9 +1090,9 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1106,9 +1100,9 @@ TEST_F(RtpSenderVideoTest, allocation.active_spatial_layers[0].frame_rate_fps = 20; rtp_sender_video_->SetVideoLayersAllocation(allocation); hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; EXPECT_TRUE( @@ -1140,9 +1134,9 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1150,9 +1144,9 @@ TEST_F(RtpSenderVideoTest, allocation.active_spatial_layers[0].frame_rate_fps = 9; rtp_sender_video_->SetVideoLayersAllocation(allocation); hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; EXPECT_TRUE( @@ -1179,9 +1173,9 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; EXPECT_TRUE( @@ -1190,17 +1184,17 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { EXPECT_THAT(sent_allocation.active_spatial_layers, SizeIs(1)); // VideoLayersAllocation not sent on the next delta frame. - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE(transport_.last_sent_packet() .HasExtension()); // Update allocation. VideoLayesAllocation should be sent on the next frame. rtp_sender_video_->SetVideoLayersAllocation(allocation); - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE( transport_.last_sent_packet() .GetExtension(&sent_allocation)); @@ -1229,17 +1223,17 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { auto& vp8_header = hdr.video_type_header.emplace(); vp8_header.temporalIdx = 1; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE(transport_.last_sent_packet() .HasExtension()); // Send a delta frame on tl0. vp8_header.temporalIdx = 0; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE(transport_.last_sent_packet() .HasExtension()); } @@ -1253,8 +1247,9 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, - /*capture_time_ms=*/0, kFrame, sizeof(kFrame), - hdr, kDefaultExpectedRetransmissionTimeMs, {}); + /*capture_time=*/Timestamp::MinusInfinity(), + kFrame, sizeof(kFrame), hdr, + kDefaultExpectedRetransmissionTime, {}); // No absolute capture time should be set as the capture_time_ms was the // default value. for (const RtpPacketReceived& packet : transport_.sent_packets()) { @@ -1266,7 +1261,7 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { rtp_sender_video_ = std::make_unique( &fake_clock_, rtp_module_->RtpSender(), field_trials_); - constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678; + constexpr Timestamp kAbsoluteCaptureTimestamp = Timestamp::Millis(12345678); uint8_t kFrame[kMaxPacketLength]; rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), kAbsoluteCaptureTimeExtensionId); @@ -1274,8 +1269,8 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, kAbsoluteCaptureTimestampMs, kFrame, - sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTimeMs, {}); + kPayload, kType, kTimestamp, kAbsoluteCaptureTimestamp, kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); absl::optional absolute_capture_time; @@ -1292,10 +1287,10 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { // Verify the capture timestamp and that the clock offset is set to zero. ASSERT_TRUE(absolute_capture_time.has_value()); - EXPECT_EQ( - absolute_capture_time->absolute_capture_timestamp, - Int64MsToUQ32x32(fake_clock_.ConvertTimestampToNtpTimeInMilliseconds( - kAbsoluteCaptureTimestampMs))); + EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp, + Int64MsToUQ32x32( + fake_clock_.ConvertTimestampToNtpTime(kAbsoluteCaptureTimestamp) + .ToMs())); EXPECT_EQ(absolute_capture_time->estimated_capture_clock_offset, 0); } @@ -1312,8 +1307,9 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { hdr.frame_type = VideoFrameType::kVideoFrameKey; hdr.absolute_capture_time = kAbsoluteCaptureTime; rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, - /*capture_time_ms=*/789, kFrame, sizeof(kFrame), - hdr, kDefaultExpectedRetransmissionTimeMs, {}); + /*capture_time=*/Timestamp::Millis(789), kFrame, + sizeof(kFrame), hdr, + kDefaultExpectedRetransmissionTime, {}); absl::optional absolute_capture_time; @@ -1347,9 +1343,9 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { auto& vp8_header = hdr.video_type_header.emplace(); vp8_header.temporalIdx = 0; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE( transport_.last_sent_packet().HasExtension()); @@ -1357,9 +1353,9 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { hdr.playout_delay = kExpectedDelay; hdr.frame_type = VideoFrameType::kVideoFrameDelta; vp8_header.temporalIdx = 1; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoPlayoutDelay received_delay = VideoPlayoutDelay(); ASSERT_TRUE(transport_.last_sent_packet().GetExtension( &received_delay)); @@ -1369,26 +1365,26 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // be populated since dilvery wasn't guaranteed on the last one. hdr.playout_delay = VideoPlayoutDelay(); // Indicates "no change". vp8_header.temporalIdx = 0; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet().GetExtension( &received_delay)); EXPECT_EQ(received_delay, kExpectedDelay); // The next frame does not need the extensions since it's delivery has // already been guaranteed. - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE( transport_.last_sent_packet().HasExtension()); // Insert key-frame, we need to refresh the state here. hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, - sizeof(kFrame), hdr, - kDefaultExpectedRetransmissionTimeMs, {}); + rtp_sender_video_->SendVideo( + kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet().GetExtension( &received_delay)); EXPECT_EQ(received_delay, kExpectedDelay); @@ -1402,9 +1398,9 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) { // Send keyframe. RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; - ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, kCodecType, 1234, 4321, - kPayload, sizeof(kPayload), - video_header, absl::nullopt, {})); + ASSERT_TRUE(rtp_sender_video_->SendVideo( + kPayloadType, kCodecType, 1234, fake_clock_.CurrentTime(), kPayload, + sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); rtc::ArrayView sent_payload = transport_.last_sent_packet().payload(); @@ -1416,9 +1412,9 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) { // Send delta frame. const uint8_t kDeltaPayload[] = {13, 42, 32, 93, 13}; video_header.frame_type = VideoFrameType::kVideoFrameDelta; - ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, kCodecType, 1234, 4321, - kDeltaPayload, sizeof(kDeltaPayload), - video_header, absl::nullopt, {})); + ASSERT_TRUE(rtp_sender_video_->SendVideo( + kPayloadType, kCodecType, 1234, fake_clock_.CurrentTime(), kDeltaPayload, + sizeof(kDeltaPayload), video_header, TimeDelta::PlusInfinity(), {})); sent_payload = sent_payload = transport_.last_sent_packet().payload(); generic_header = sent_payload[0]; @@ -1434,9 +1430,9 @@ TEST_F(RtpSenderVideoTest, SendRawVideo) { // Send a frame. RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; - ASSERT_TRUE(rtp_sender_video_->SendVideo(kPayloadType, absl::nullopt, 1234, - 4321, kPayload, sizeof(kPayload), - video_header, absl::nullopt, {})); + ASSERT_TRUE(rtp_sender_video_->SendVideo( + kPayloadType, absl::nullopt, 1234, fake_clock_.CurrentTime(), kPayload, + sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); rtc::ArrayView sent_payload = transport_.last_sent_packet().payload(); @@ -1521,7 +1517,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, EXPECT_CALL(*mock_frame_transformer, Transform); rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) @@ -1535,17 +1531,17 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, ValidPayloadTypes) { EXPECT_TRUE(rtp_sender_video->SendEncodedImage( 0, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); EXPECT_TRUE(rtp_sender_video->SendEncodedImage( 127, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs)); + kDefaultExpectedRetransmissionTime)); EXPECT_DEATH(rtp_sender_video->SendEncodedImage( -1, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs), + kDefaultExpectedRetransmissionTime), ""); EXPECT_DEATH(rtp_sender_video->SendEncodedImage( 128, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs), + kDefaultExpectedRetransmissionTime), ""); } #endif @@ -1573,14 +1569,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { encoder_queue->PostTask([&] { rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(transport_.packets_sent(), 1); encoder_queue->PostTask([&] { rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(transport_.packets_sent(), 2); @@ -1615,7 +1611,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, encoder_queue->PostTask([&] { rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Millis(1000 / kFramesPerSecond)); } @@ -1662,7 +1658,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, }); rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); } TEST_F(RtpSenderVideoWithFrameTransformerTest, @@ -1686,7 +1682,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, }); rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); } TEST_F(RtpSenderVideoWithFrameTransformerTest, @@ -1716,14 +1712,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, encoder_queue->PostTask([&] { rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(transport_.packets_sent(), 1); encoder_queue->PostTask([&] { rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, *encoded_image, video_header, - kDefaultExpectedRetransmissionTimeMs); + kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(transport_.packets_sent(), 2); diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc index b920efadff4b..5c6b62d0688d 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -48,6 +48,9 @@ class TransformableVideoReceiverFrame uint8_t GetPayloadType() const override { return frame_->PayloadType(); } uint32_t GetSsrc() const override { return metadata_.GetSsrc(); } uint32_t GetTimestamp() const override { return frame_->Timestamp(); } + void SetRTPTimestamp(uint32_t timestamp) override { + frame_->SetTimestamp(timestamp); + } bool IsKeyFrame() const override { return frame_->FrameType() == VideoFrameType::kVideoFrameKey; diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.cc index 20402fc4d373..cae659cdd7f3 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.cc +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.cc @@ -77,7 +77,7 @@ UlpfecGenerator::UlpfecGenerator(int red_payload_type, num_protected_frames_(0), min_num_media_packets_(1), media_contains_keyframe_(false), - fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) {} + fec_bitrate_(/*max_window_size=*/TimeDelta::Seconds(1)) {} // Used by FlexFecSender, payload types are unused. UlpfecGenerator::UlpfecGenerator(std::unique_ptr fec, @@ -89,7 +89,7 @@ UlpfecGenerator::UlpfecGenerator(std::unique_ptr fec, num_protected_frames_(0), min_num_media_packets_(1), media_contains_keyframe_(false), - fec_bitrate_(/*max_window_size_ms=*/1000, RateStatistics::kBpsScale) {} + fec_bitrate_(/*max_window_size=*/TimeDelta::Seconds(1)) {} UlpfecGenerator::~UlpfecGenerator() = default; @@ -235,15 +235,14 @@ std::vector> UlpfecGenerator::GetFecPackets() { ResetState(); MutexLock lock(&mutex_); - fec_bitrate_.Update(total_fec_size_bytes, clock_->TimeInMilliseconds()); + fec_bitrate_.Update(total_fec_size_bytes, clock_->CurrentTime()); return fec_packets; } DataRate UlpfecGenerator::CurrentFecRate() const { MutexLock lock(&mutex_); - return DataRate::BitsPerSec( - fec_bitrate_.Rate(clock_->TimeInMilliseconds()).value_or(0)); + return fec_bitrate_.Rate(clock_->CurrentTime()).value_or(DataRate::Zero()); } int UlpfecGenerator::Overhead() const { diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.h b/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.h index 88a8b459e601..0058847357a4 100644 --- a/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.h +++ b/third_party/libwebrtc/modules/rtp_rtcp/source/ulpfec_generator.h @@ -21,8 +21,8 @@ #include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "rtc_base/bitrate_tracker.h" #include "rtc_base/race_checker.h" -#include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -115,7 +115,7 @@ class UlpfecGenerator : public VideoFecGenerator { mutable Mutex mutex_; absl::optional pending_params_ RTC_GUARDED_BY(mutex_); - RateStatistics fec_bitrate_ RTC_GUARDED_BY(mutex_); + BitrateTracker fec_bitrate_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/third_party/libwebrtc/modules/third_party/fft/README.chromium b/third_party/libwebrtc/modules/third_party/fft/README.chromium index 94d20d422f28..0c79ef8a6a2b 100644 --- a/third_party/libwebrtc/modules/third_party/fft/README.chromium +++ b/third_party/libwebrtc/modules/third_party/fft/README.chromium @@ -6,6 +6,7 @@ Date: 2018-07-26 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: Multivariate complex Fourier transform, computed in place diff --git a/third_party/libwebrtc/modules/third_party/g711/README.chromium b/third_party/libwebrtc/modules/third_party/g711/README.chromium index 1baa2637a92d..675572d72054 100644 --- a/third_party/libwebrtc/modules/third_party/g711/README.chromium +++ b/third_party/libwebrtc/modules/third_party/g711/README.chromium @@ -6,6 +6,7 @@ Date: 2018-06-25 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: In line A-law and u-law conversion routines diff --git a/third_party/libwebrtc/modules/third_party/g722/README.chromium b/third_party/libwebrtc/modules/third_party/g722/README.chromium index c427ed8cf25b..ba2234f4f15b 100644 --- a/third_party/libwebrtc/modules/third_party/g722/README.chromium +++ b/third_party/libwebrtc/modules/third_party/g722/README.chromium @@ -6,6 +6,7 @@ Date: 2018-06-25 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: The ITU G.722 codec, encode and decode part. diff --git a/third_party/libwebrtc/modules/third_party/portaudio/README.chromium b/third_party/libwebrtc/modules/third_party/portaudio/README.chromium index 722dd943457d..3f7beef65588 100644 --- a/third_party/libwebrtc/modules/third_party/portaudio/README.chromium +++ b/third_party/libwebrtc/modules/third_party/portaudio/README.chromium @@ -6,6 +6,7 @@ Date: 2022-04-12 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: Part of portaudio library to operate with memory barriers and ring buffer. diff --git a/third_party/libwebrtc/modules/utility/source/jvm_android.cc b/third_party/libwebrtc/modules/utility/source/jvm_android.cc index ee9930bcaa66..e0c66d5fe1b3 100644 --- a/third_party/libwebrtc/modules/utility/source/jvm_android.cc +++ b/third_party/libwebrtc/modules/utility/source/jvm_android.cc @@ -27,10 +27,6 @@ struct { const char* name; jclass clazz; } loaded_classes[] = { - {"org/webrtc/voiceengine/BuildInfo", nullptr}, - {"org/webrtc/voiceengine/WebRtcAudioManager", nullptr}, - {"org/webrtc/voiceengine/WebRtcAudioRecord", nullptr}, - {"org/webrtc/voiceengine/WebRtcAudioTrack", nullptr}, }; // Android's FindClass() is trickier than usual because the app-specific diff --git a/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc b/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc index f1142302a1fa..c8af222b5760 100644 --- a/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc +++ b/third_party/libwebrtc/modules/video_capture/test/video_capture_unittest.cc @@ -191,10 +191,9 @@ class VideoCaptureTest : public ::testing::Test { unsigned int number_of_devices_; }; -#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) -// Currently fails on Mac 64-bit and Linux rel, see -// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406 and -// https://bugs.chromium.org/p/webrtc/issues/detail?id=15229 +#ifdef WEBRTC_MAC +// Currently fails on Mac 64-bit, see +// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406 #define MAYBE_CreateDelete DISABLED_CreateDelete #else #define MAYBE_CreateDelete CreateDelete @@ -234,10 +233,9 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) { } } -#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) -// Currently fails on Mac 64-bit and Linux rel, see -// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406 and -// https://bugs.chromium.org/p/webrtc/issues/detail?id=15229 +#ifdef WEBRTC_MAC +// Currently fails on Mac 64-bit, see +// https://bugs.chromium.org/p/webrtc/issues/detail?id=5406 #define MAYBE_Capabilities DISABLED_Capabilities #else #define MAYBE_Capabilities Capabilities @@ -344,10 +342,8 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { EXPECT_EQ(0, module1->StopCapture()); } -#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) +#ifdef WEBRTC_MAC // No VideoCaptureImpl on Mac. -// Currently fails on Mac Linux rel, see -// https://bugs.chromium.org/p/webrtc/issues/detail?id=15229 #define MAYBE_ConcurrentAccess DISABLED_ConcurrentAccess #else #define MAYBE_ConcurrentAccess ConcurrentAccess diff --git a/third_party/libwebrtc/modules/video_coding/BUILD.gn b/third_party/libwebrtc/modules/video_coding/BUILD.gn index fba49e8fed82..738d3d4edf53 100644 --- a/third_party/libwebrtc/modules/video_coding/BUILD.gn +++ b/third_party/libwebrtc/modules/video_coding/BUILD.gn @@ -245,6 +245,7 @@ rtc_library("video_coding") { "../../rtc_base:rtc_event", "../../rtc_base:rtc_numerics", "../../rtc_base:safe_conversions", + "../../rtc_base:stringutils", "../../rtc_base:threading", "../../rtc_base:timeutils", "../../rtc_base/experiments:alr_experiment", @@ -298,10 +299,7 @@ rtc_library("video_codec_interface") { "../../common_video/generic_frame_descriptor", "../../rtc_base/system:rtc_export", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", - ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_coding_legacy") { @@ -446,6 +444,7 @@ rtc_library("video_coding_utility") { "../../rtc_base/task_utils:repeating_task", "../../system_wrappers:field_trial", "../rtp_rtcp:rtp_rtcp_format", + "svc:scalability_mode_util", ] absl_deps = [ "//third_party/abseil-cpp/absl/numeric:bits", @@ -1069,7 +1068,6 @@ if (rtc_include_tests) { deps += [ ":android_codec_factory_helper", "../../sdk/android:hwcodecs_java", - "//modules/audio_device:audio_device_java", "//sdk/android:native_test_jni_onload", "//testing/android/native_test:native_test_support", ] diff --git a/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build index 7162249695f9..69220e6f990e 100644 --- a/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/chain_diff_calculator_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build b/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build index 08853e440baf..6bc880ab1fc8 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc index 9be2d377eb38..826202489c60 100644 --- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc +++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc @@ -224,7 +224,10 @@ class TestEncoder : public VideoCodecTester::Encoder, } void Encode(const VideoFrame& frame, EncodeCallback callback) override { - callbacks_[frame.timestamp()] = std::move(callback); + { + MutexLock lock(&mutex_); + callbacks_[frame.timestamp()] = std::move(callback); + } if (auto fs = frame_settings_.find(frame_num_); fs != frame_settings_.begin() && fs != frame_settings_.end()) { @@ -251,6 +254,7 @@ class TestEncoder : public VideoCodecTester::Encoder, protected: Result OnEncodedImage(const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info) override { + MutexLock lock(&mutex_); auto cb = callbacks_.find(encoded_image.Timestamp()); RTC_CHECK(cb != callbacks_.end()); cb->second(encoded_image); @@ -322,7 +326,8 @@ class TestEncoder : public VideoCodecTester::Encoder, const std::string codec_type_; const std::map& frame_settings_; int frame_num_; - std::map callbacks_; + std::map callbacks_ RTC_GUARDED_BY(mutex_); + Mutex mutex_; }; class TestDecoder : public VideoCodecTester::Decoder, @@ -345,7 +350,11 @@ class TestDecoder : public VideoCodecTester::Decoder, } void Decode(const EncodedImage& frame, DecodeCallback callback) override { - callbacks_[frame.Timestamp()] = std::move(callback); + { + MutexLock lock(&mutex_); + callbacks_[frame.Timestamp()] = std::move(callback); + } + decoder_->Decode(frame, /*missing_frames=*/false, /*render_time_ms=*/0); } @@ -361,6 +370,7 @@ class TestDecoder : public VideoCodecTester::Decoder, protected: int Decoded(VideoFrame& decoded_frame) override { + MutexLock lock(&mutex_); auto cb = callbacks_.find(decoded_frame.timestamp()); RTC_CHECK(cb != callbacks_.end()); cb->second(decoded_frame); @@ -371,7 +381,8 @@ class TestDecoder : public VideoCodecTester::Decoder, std::unique_ptr decoder_; const std::string codec_type_; - std::map callbacks_; + std::map callbacks_ RTC_GUARDED_BY(mutex_); + Mutex mutex_; }; std::unique_ptr CreateVideoSource( diff --git a/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build index 4b3ab25d4f74..8696640ed10f 100644 --- a/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/frame_dependencies_calculator_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/video_coding/generic_decoder.cc b/third_party/libwebrtc/modules/video_coding/generic_decoder.cc index b660e02b7250..46b8c95f6177 100644 --- a/third_party/libwebrtc/modules/video_coding/generic_decoder.cc +++ b/third_party/libwebrtc/modules/video_coding/generic_decoder.cc @@ -25,6 +25,7 @@ #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" @@ -299,8 +300,14 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { _callback->OnDecoderInfoChanged(std::move(decoder_info)); } if (ret < WEBRTC_VIDEO_CODEC_OK) { + const absl::optional ssrc = + !frame_info.packet_infos.empty() + ? absl::make_optional(frame_info.packet_infos[0].ssrc()) + : absl::nullopt; RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp " - << frame.Timestamp() << ", error code: " << ret; + << frame.Timestamp() << ", ssrc " + << (ssrc ? rtc::ToString(*ssrc) : "") + << ", error code: " << ret; _callback->ClearTimestampMap(); } else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT) { // No output. diff --git a/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h b/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h index 46ae0d29e16f..c6522fcc6bb1 100644 --- a/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h +++ b/third_party/libwebrtc/modules/video_coding/include/video_codec_interface.h @@ -13,7 +13,6 @@ #include -#include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/video/video_frame.h" #include "api/video_codecs/scalability_mode.h" @@ -79,8 +78,6 @@ struct CodecSpecificInfoVP9 { // Frame reference data. uint8_t num_ref_pics; uint8_t p_diff[kMaxVp9RefPics]; - - ABSL_DEPRECATED("") bool end_of_picture; }; static_assert(std::is_pod::value, ""); diff --git a/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build index e7cdba8af34c..145273655740 100644 --- a/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/svc/scalability_mode_util_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build index dc072aba46a2..a21e6b601371 100644 --- a/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/svc/scalability_structures_gn/moz.build @@ -133,6 +133,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build b/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build index 0099063164b3..92c952ced8bf 100644 --- a/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/svc/svc_rate_allocator_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/modules/video_coding/utility/simulcast_utility.cc b/third_party/libwebrtc/modules/video_coding/utility/simulcast_utility.cc index 65061ed32aa3..95e9488b01be 100644 --- a/third_party/libwebrtc/modules/video_coding/utility/simulcast_utility.cc +++ b/third_party/libwebrtc/modules/video_coding/utility/simulcast_utility.cc @@ -13,6 +13,7 @@ #include #include +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" namespace webrtc { @@ -79,15 +80,33 @@ bool SimulcastUtility::IsConferenceModeScreenshare(const VideoCodec& codec) { int SimulcastUtility::NumberOfTemporalLayers(const VideoCodec& codec, int spatial_id) { - uint8_t num_temporal_layers = - std::max(1, codec.VP8().numberOfTemporalLayers); + int num_temporal_layers = 0; + if (auto scalability_mode = codec.GetScalabilityMode(); scalability_mode) { + num_temporal_layers = ScalabilityModeToNumTemporalLayers(*scalability_mode); + } else { + switch (codec.codecType) { + case kVideoCodecVP8: + num_temporal_layers = codec.VP8().numberOfTemporalLayers; + break; + case kVideoCodecVP9: + num_temporal_layers = codec.VP9().numberOfTemporalLayers; + break; + case kVideoCodecH264: + num_temporal_layers = codec.H264().numberOfTemporalLayers; + break; + default: + break; + } + } + if (codec.numberOfSimulcastStreams > 0) { RTC_DCHECK_LT(spatial_id, codec.numberOfSimulcastStreams); num_temporal_layers = std::max(num_temporal_layers, - codec.simulcastStream[spatial_id].numberOfTemporalLayers); + static_cast( + codec.simulcastStream[spatial_id].numberOfTemporalLayers)); } - return num_temporal_layers; + return std::max(1, num_temporal_layers); } } // namespace webrtc diff --git a/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build b/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build index 8fd8cbe26304..c2252edfbbe1 100644 --- a/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build +++ b/third_party/libwebrtc/modules/video_coding/video_codec_interface_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/moz-patch-stack/0001.patch b/third_party/libwebrtc/moz-patch-stack/0001.patch index 9c7fbcd8d27f..8098c507329e 100644 --- a/third_party/libwebrtc/moz-patch-stack/0001.patch +++ b/third_party/libwebrtc/moz-patch-stack/0001.patch @@ -413,7 +413,7 @@ index 54132bcdbb..cf8b3ad3dc 100644 bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { diff --git a/call/BUILD.gn b/call/BUILD.gn -index c40cfcf28e..63a3a2d53d 100644 +index 39cbc0a9c1..2e8a997fc4 100644 --- a/call/BUILD.gn +++ b/call/BUILD.gn @@ -20,6 +20,7 @@ rtc_library("call_interfaces") { @@ -435,7 +435,7 @@ index c40cfcf28e..63a3a2d53d 100644 deps = [ ":audio_sender_interface", diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index 48a1ad0758..372fe211d0 100644 +index 12f6bf60c8..542067d30c 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h @@ -206,6 +206,8 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { @@ -497,7 +497,7 @@ index 9f78aca6e2..115bfcf97b 100644 } max_extrapolation_samples_ = 7 * fs_hz / 100; diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h -index fd884f13ff..f39852b58e 100644 +index 9c7ecc78f4..2c9c419bcd 100644 --- a/modules/desktop_capture/desktop_capturer.h +++ b/modules/desktop_capture/desktop_capturer.h @@ -82,6 +82,7 @@ class RTC_EXPORT DesktopCapturer { @@ -774,7 +774,7 @@ index 3745e9cba5..f68cfb94c1 100644 device_names->push_back(rtc::ToUtf8(device.DeviceName)); } diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc -index 75f403d6fa..c62c65506a 100644 +index e057005b16..c14332aca6 100644 --- a/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/modules/rtp_rtcp/source/rtcp_sender.cc @@ -199,7 +199,7 @@ void RTCPSender::SetRTCPStatus(RtcpMode new_method) { @@ -787,7 +787,7 @@ index 75f403d6fa..c62c65506a 100644 method_ = new_method; } diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc -index 81961c69aa..a57d9e7f62 100644 +index 75e3e3c412..88815b5601 100644 --- a/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -446,6 +446,45 @@ bool PlayoutDelayLimits::Write(rtc::ArrayView data, @@ -863,7 +863,7 @@ index 04b2cd63a6..e085f89c03 100644 // Subclasses must defined kId and kUri static constexpr members. class BaseRtpStringExtension { diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc -index 6c7dff322b..9495841984 100644 +index 2a95a3a816..b152cdbd9e 100644 --- a/modules/rtp_rtcp/source/rtp_packet.cc +++ b/modules/rtp_rtcp/source/rtp_packet.cc @@ -205,6 +205,10 @@ void RtpPacket::ZeroMutableExtensions() { @@ -939,7 +939,7 @@ index 3e6aa3baae..9ac7696ce9 100644 constexpr int RTCP_MAX_REPORT_BLOCKS = 31; // RFC 3550 page 37 } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc -index 8c0b2dda0a..a60d742dd4 100644 +index 967d38a8e0..f3d90e3c0b 100644 --- a/modules/rtp_rtcp/source/rtp_sender.cc +++ b/modules/rtp_rtcp/source/rtp_sender.cc @@ -128,6 +128,10 @@ bool IsNonVolatile(RTPExtensionType type) { @@ -1562,10 +1562,10 @@ index 0474e7bc17..1953923f81 100644 std::unique_ptr svc_controller_; absl::optional scalability_mode_; diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn -index ac27ee8251..0005503341 100644 +index f3810fdd36..b8113bd4bc 100644 --- a/rtc_base/BUILD.gn +++ b/rtc_base/BUILD.gn -@@ -423,6 +423,12 @@ rtc_library("logging") { +@@ -458,6 +458,12 @@ rtc_library("logging") { "//third_party/abseil-cpp/absl/types:optional", ] @@ -1625,7 +1625,7 @@ index 61a3c667ba..ad2303735e 100644 // The list of logging streams currently configured. // Note: we explicitly do not clean this up, because of the uncertain ordering diff --git a/rtc_base/logging.h b/rtc_base/logging.h -index d59b9a0ef7..73a8bf7454 100644 +index b171cfe11e..df7f173f58 100644 --- a/rtc_base/logging.h +++ b/rtc_base/logging.h @@ -581,6 +581,12 @@ class LogMessage { @@ -1681,10 +1681,10 @@ index 0a9226ef6f..620c1c02f3 100644 vcm_ = nullptr; } diff --git a/webrtc.gni b/webrtc.gni -index 5e4a5d3c81..928a59104b 100644 +index 097ad4a377..9c67f61aaa 100644 --- a/webrtc.gni +++ b/webrtc.gni -@@ -122,7 +122,7 @@ declare_args() { +@@ -126,7 +126,7 @@ declare_args() { # Selects whether debug dumps for the audio processing module # should be generated. @@ -1694,5 +1694,5 @@ index 5e4a5d3c81..928a59104b 100644 # Selects whether the audio processing module should be excluded. rtc_exclude_audio_processing_module = false -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0002.patch b/third_party/libwebrtc/moz-patch-stack/0002.patch index f5a057494903..f482fb9c89ec 100644 --- a/third_party/libwebrtc/moz-patch-stack/0002.patch +++ b/third_party/libwebrtc/moz-patch-stack/0002.patch @@ -15,7 +15,7 @@ Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/539b69f0e21118a18 2 files changed, 5 insertions(+) diff --git a/modules/desktop_capture/desktop_capture_types.h b/modules/desktop_capture/desktop_capture_types.h -index 9627076eea..381d1021c4 100644 +index a4e3e897fd..e777a45f92 100644 --- a/modules/desktop_capture/desktop_capture_types.h +++ b/modules/desktop_capture/desktop_capture_types.h @@ -13,6 +13,10 @@ @@ -28,7 +28,7 @@ index 9627076eea..381d1021c4 100644 + namespace webrtc { - enum class CaptureType { kWindow, kScreen }; + enum class CaptureType { kWindow, kScreen, kAnyScreenContent }; diff --git a/modules/video_capture/video_capture.h b/modules/video_capture/video_capture.h index e207598d68..58485f28e9 100644 --- a/modules/video_capture/video_capture.h @@ -42,5 +42,5 @@ index e207598d68..58485f28e9 100644 #include "modules/video_capture/video_capture_defines.h" #include -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0003.patch b/third_party/libwebrtc/moz-patch-stack/0003.patch index 7ff8de58953a..6ebce7ff2646 100644 --- a/third_party/libwebrtc/moz-patch-stack/0003.patch +++ b/third_party/libwebrtc/moz-patch-stack/0003.patch @@ -33,5 +33,5 @@ index ad2303735e..9c2d3b0a39 100644 return false; return streams_empty_.load(std::memory_order_relaxed); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0004.patch b/third_party/libwebrtc/moz-patch-stack/0004.patch index cebbcb123e58..eab72045c130 100644 --- a/third_party/libwebrtc/moz-patch-stack/0004.patch +++ b/third_party/libwebrtc/moz-patch-stack/0004.patch @@ -16,7 +16,7 @@ Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/8d832e832ffe51324 1 file changed, 13 insertions(+) diff --git a/rtc_base/logging.h b/rtc_base/logging.h -index 73a8bf7454..a980fb4ad3 100644 +index df7f173f58..a3733d7543 100644 --- a/rtc_base/logging.h +++ b/rtc_base/logging.h @@ -48,6 +48,14 @@ @@ -45,5 +45,5 @@ index 73a8bf7454..a980fb4ad3 100644 + #endif // RTC_BASE_LOGGING_H_ -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0005.patch b/third_party/libwebrtc/moz-patch-stack/0005.patch index f989089073ba..5b2c6f8c8ad2 100644 --- a/third_party/libwebrtc/moz-patch-stack/0005.patch +++ b/third_party/libwebrtc/moz-patch-stack/0005.patch @@ -48,5 +48,5 @@ index 0b74f2ac0a..5c53f48144 100644 // between streams. const auto ssrc_sink_it = sink_by_ssrc_.find(ssrc); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0006.patch b/third_party/libwebrtc/moz-patch-stack/0006.patch index 5edf86b9a863..b984c1ee6cd0 100644 --- a/third_party/libwebrtc/moz-patch-stack/0006.patch +++ b/third_party/libwebrtc/moz-patch-stack/0006.patch @@ -16,7 +16,7 @@ Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/b55b0368d9f21849f 8 files changed, 34 insertions(+) diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index 372fe211d0..918429d191 100644 +index 542067d30c..500ca1447f 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h @@ -151,6 +151,9 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { @@ -30,10 +30,10 @@ index 372fe211d0..918429d191 100644 // single 'timing frame'. absl::optional timing_frame_info; diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc -index dd9d2664ed..708ffbbd47 100644 +index ba5b951f4d..61b88c89b8 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/modules/rtp_rtcp/source/rtcp_receiver.cc -@@ -360,6 +360,13 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { +@@ -361,6 +361,13 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { return last_xr_rtis; } @@ -48,10 +48,10 @@ index dd9d2664ed..708ffbbd47 100644 std::vector result; MutexLock lock(&rtcp_receiver_lock_); diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h -index b0978690ab..938b641eb5 100644 +index a6175d0774..59e0258df5 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/modules/rtp_rtcp/source/rtcp_receiver.h -@@ -116,6 +116,10 @@ class RTCPReceiver final { +@@ -117,6 +117,10 @@ class RTCPReceiver final { std::vector ConsumeReceivedXrReferenceTimeInfo(); @@ -63,7 +63,7 @@ index b0978690ab..938b641eb5 100644 absl::optional LastRtt() const; diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc -index ab5d419648..1edff95826 100644 +index 5a7624f42b..9c628ecd51 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -501,6 +501,11 @@ void ModuleRtpRtcpImpl::GetSendStreamDataCounters( @@ -79,7 +79,7 @@ index ab5d419648..1edff95826 100644 const { return rtcp_receiver_.GetLatestReportBlockData(); diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h -index 50937f574d..5da38a077b 100644 +index d298081432..dd706e569d 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -182,6 +182,9 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl @@ -93,10 +93,10 @@ index 50937f574d..5da38a077b 100644 // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. // Within this list, the `ReportBlockData::source_ssrc()`, which is the SSRC diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -index d6eddb21b1..3b392a1bfb 100644 +index 62c5ad38f5..c391b0f8de 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -@@ -506,6 +506,11 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( +@@ -505,6 +505,11 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( } // Received RTCP report. @@ -109,7 +109,7 @@ index d6eddb21b1..3b392a1bfb 100644 const { return rtcp_receiver_.GetLatestReportBlockData(); diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h -index 06fe5869b6..1b79915025 100644 +index 36e6deffba..663b28af3e 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -194,6 +194,9 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, @@ -123,10 +123,10 @@ index 06fe5869b6..1b79915025 100644 // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. // Within this list, the `ReportBlockData::source_ssrc()`, which is the SSRC diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -index 2184f33801..d60a855b53 100644 +index d366bb77a2..5ad7680d61 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -@@ -387,6 +387,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { +@@ -388,6 +388,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { StreamDataCounters* rtp_counters, StreamDataCounters* rtx_counters) const = 0; @@ -138,5 +138,5 @@ index 2184f33801..d60a855b53 100644 // Within this list, the sender-source SSRC pair is unique and per-pair the // ReportBlockData represents the latest Report Block that was received for -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0007.patch b/third_party/libwebrtc/moz-patch-stack/0007.patch index 63d0ddd50023..e2faca39cfe6 100644 --- a/third_party/libwebrtc/moz-patch-stack/0007.patch +++ b/third_party/libwebrtc/moz-patch-stack/0007.patch @@ -21,7 +21,7 @@ Bug 1654112 - Replace custom upstream code for remote received audio stats with 8 files changed, 21 insertions(+), 10 deletions(-) diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index 918429d191..2817c9bd9a 100644 +index 500ca1447f..95f1a47f4e 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h @@ -153,6 +153,7 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { @@ -33,10 +33,10 @@ index 918429d191..2817c9bd9a 100644 // Timing frame info: all important timestamps for a full lifetime of a // single 'timing frame'. diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc -index 708ffbbd47..c5b4afefc8 100644 +index 61b88c89b8..a98b200c05 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/modules/rtp_rtcp/source/rtcp_receiver.cc -@@ -361,10 +361,12 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { +@@ -362,10 +362,12 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { } void RTCPReceiver::RemoteRTCPSenderInfo(uint32_t* packet_count, @@ -51,10 +51,10 @@ index 708ffbbd47..c5b4afefc8 100644 std::vector RTCPReceiver::GetLatestReportBlockData() const { diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h -index 938b641eb5..4ed3efad82 100644 +index 59e0258df5..e748b257e8 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/modules/rtp_rtcp/source/rtcp_receiver.h -@@ -118,7 +118,8 @@ class RTCPReceiver final { +@@ -119,7 +119,8 @@ class RTCPReceiver final { // Get received sender packet and octet counts void RemoteRTCPSenderInfo(uint32_t* packet_count, @@ -65,7 +65,7 @@ index 938b641eb5..4ed3efad82 100644 absl::optional AverageRtt() const; absl::optional LastRtt() const; diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc -index 1edff95826..b360327f27 100644 +index 9c628ecd51..525497c489 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -502,8 +502,10 @@ void ModuleRtpRtcpImpl::GetSendStreamDataCounters( @@ -82,7 +82,7 @@ index 1edff95826..b360327f27 100644 std::vector ModuleRtpRtcpImpl::GetLatestReportBlockData() diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h -index 5da38a077b..f78a0da41a 100644 +index dd706e569d..742a69cce3 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -183,7 +183,8 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl @@ -96,10 +96,10 @@ index 5da38a077b..f78a0da41a 100644 // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -index 3b392a1bfb..241ed27a56 100644 +index c391b0f8de..c171cf47a4 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -@@ -507,8 +507,10 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( +@@ -506,8 +506,10 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( // Received RTCP report. void ModuleRtpRtcpImpl2::RemoteRTCPSenderInfo(uint32_t* packet_count, @@ -113,7 +113,7 @@ index 3b392a1bfb..241ed27a56 100644 std::vector ModuleRtpRtcpImpl2::GetLatestReportBlockData() diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h -index 1b79915025..f9c77a57b0 100644 +index 663b28af3e..a9d18ec44a 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -195,7 +195,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, @@ -127,10 +127,10 @@ index 1b79915025..f9c77a57b0 100644 // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -index d60a855b53..7b5ec77f3d 100644 +index 5ad7680d61..dc86f92bf5 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -@@ -388,9 +388,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { +@@ -389,9 +389,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { StreamDataCounters* rtx_counters) const = 0; @@ -144,5 +144,5 @@ index d60a855b53..7b5ec77f3d 100644 // Within this list, the sender-source SSRC pair is unique and per-pair the // ReportBlockData represents the latest Report Block that was received for -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0008.patch b/third_party/libwebrtc/moz-patch-stack/0008.patch index b2572f39963a..4c244e44ebfd 100644 --- a/third_party/libwebrtc/moz-patch-stack/0008.patch +++ b/third_party/libwebrtc/moz-patch-stack/0008.patch @@ -64,5 +64,5 @@ index 3db4332cd1..512103ab5e 100644 DesktopSize size(round(nssize.width * scale), -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0009.patch b/third_party/libwebrtc/moz-patch-stack/0009.patch index fefc561fb6eb..138f5c517f08 100644 --- a/third_party/libwebrtc/moz-patch-stack/0009.patch +++ b/third_party/libwebrtc/moz-patch-stack/0009.patch @@ -52,5 +52,5 @@ index 5d881662ea..989ec7ea54 100644 // windows are allowed for the same pid. if (itr != pid_itr_map.end() && (itr->second != sources.end())) { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0010.patch b/third_party/libwebrtc/moz-patch-stack/0010.patch index 85ce450963a9..f1c49df481f7 100644 --- a/third_party/libwebrtc/moz-patch-stack/0010.patch +++ b/third_party/libwebrtc/moz-patch-stack/0010.patch @@ -22,5 +22,5 @@ index 51b0fcd492..17e0e33b1d 100644 } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0011.patch b/third_party/libwebrtc/moz-patch-stack/0011.patch index e22acbc03a3f..1e70e57bde6c 100644 --- a/third_party/libwebrtc/moz-patch-stack/0011.patch +++ b/third_party/libwebrtc/moz-patch-stack/0011.patch @@ -41,5 +41,5 @@ index 66fd0a0ebe..7071776d0a 100644 if (conversionResult != 0) { RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type " -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0012.patch b/third_party/libwebrtc/moz-patch-stack/0012.patch index a3dd386ca6bf..63993797f077 100644 --- a/third_party/libwebrtc/moz-patch-stack/0012.patch +++ b/third_party/libwebrtc/moz-patch-stack/0012.patch @@ -149,5 +149,5 @@ index eff720371a..94aed09c48 100644 if (number_of_cores <= 0) { RTC_LOG(LS_ERROR) << "Failed to get number of cores"; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0013.patch b/third_party/libwebrtc/moz-patch-stack/0013.patch index 6abcfa67d8bf..83d293b9f03a 100644 --- a/third_party/libwebrtc/moz-patch-stack/0013.patch +++ b/third_party/libwebrtc/moz-patch-stack/0013.patch @@ -28,5 +28,5 @@ index 33ea941808..2dfdd9a5df 100644 fdmax = fd; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0014.patch b/third_party/libwebrtc/moz-patch-stack/0014.patch index 37123c79a11f..1369ad01cd07 100644 --- a/third_party/libwebrtc/moz-patch-stack/0014.patch +++ b/third_party/libwebrtc/moz-patch-stack/0014.patch @@ -45,5 +45,5 @@ index 6d369d747e..556204ac89 100644 &attr, joinable ? PTHREAD_CREATE_JOINABLE : PTHREAD_CREATE_DETACHED); PlatformThread::Handle handle; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0015.patch b/third_party/libwebrtc/moz-patch-stack/0015.patch index 41abd1a9ba86..546526c0df2c 100644 --- a/third_party/libwebrtc/moz-patch-stack/0015.patch +++ b/third_party/libwebrtc/moz-patch-stack/0015.patch @@ -33,5 +33,5 @@ index 7071776d0a..02404697ad 100644 // In Windows, the image starts bottom left, instead of top left. // Setting a negative source height, inverts the image (within LibYuv). -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0016.patch b/third_party/libwebrtc/moz-patch-stack/0016.patch index b231dd11cf2b..e8538290e8c5 100644 --- a/third_party/libwebrtc/moz-patch-stack/0016.patch +++ b/third_party/libwebrtc/moz-patch-stack/0016.patch @@ -43,5 +43,5 @@ index 7cccdb51a7..2a6afb3147 100644 const int32_t diffWidth = capability.width - requested.width; const int32_t diffHeight = capability.height - requested.height; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0017.patch b/third_party/libwebrtc/moz-patch-stack/0017.patch index 270869ed50c7..2e4e4a82cc14 100644 --- a/third_party/libwebrtc/moz-patch-stack/0017.patch +++ b/third_party/libwebrtc/moz-patch-stack/0017.patch @@ -92,5 +92,5 @@ index 1b52645cde..dc7b9b1a24 100644 } // namespace videocapturemodule } // namespace webrtc -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0018.patch b/third_party/libwebrtc/moz-patch-stack/0018.patch index 0cb21038c7f6..5a55f4a1ee84 100644 --- a/third_party/libwebrtc/moz-patch-stack/0018.patch +++ b/third_party/libwebrtc/moz-patch-stack/0018.patch @@ -93,5 +93,5 @@ index dc7b9b1a24..ed2a726d6f 100644 } // namespace videocapturemodule } // namespace webrtc -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0019.patch b/third_party/libwebrtc/moz-patch-stack/0019.patch index 9377e9e72c6f..1616f3fd57aa 100644 --- a/third_party/libwebrtc/moz-patch-stack/0019.patch +++ b/third_party/libwebrtc/moz-patch-stack/0019.patch @@ -94,5 +94,5 @@ index 19b32f16b9..f77d791033 100644 } } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0020.patch b/third_party/libwebrtc/moz-patch-stack/0020.patch index f91e70959166..ca116f68e110 100644 --- a/third_party/libwebrtc/moz-patch-stack/0020.patch +++ b/third_party/libwebrtc/moz-patch-stack/0020.patch @@ -52,5 +52,5 @@ index a7eca19bbf..9d22f42f4d 100644 _hdevnotify = RegisterDeviceNotification(_hwnd, &di, DEVICE_NOTIFY_WINDOW_HANDLE); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0021.patch b/third_party/libwebrtc/moz-patch-stack/0021.patch index 3015b817a8a5..84055946a4f0 100644 --- a/third_party/libwebrtc/moz-patch-stack/0021.patch +++ b/third_party/libwebrtc/moz-patch-stack/0021.patch @@ -54,5 +54,5 @@ index 5457402542..bceb321951 100644 #else denoiser_state = kDenoiserOnAdaptive; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0022.patch b/third_party/libwebrtc/moz-patch-stack/0022.patch index 9a1a5bd71541..d90bb77010d6 100644 --- a/third_party/libwebrtc/moz-patch-stack/0022.patch +++ b/third_party/libwebrtc/moz-patch-stack/0022.patch @@ -25,5 +25,5 @@ index 10a5bcfe7c..fedf9f5961 100644 wd2 = (s->band[band].sg[0] == s->band[band].sg[1]) ? -wd1 : wd1; if (wd2 > 32767) -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0023.patch b/third_party/libwebrtc/moz-patch-stack/0023.patch index 1650eca6765e..633388cd3e04 100644 --- a/third_party/libwebrtc/moz-patch-stack/0023.patch +++ b/third_party/libwebrtc/moz-patch-stack/0023.patch @@ -144,5 +144,5 @@ index 7a70c2ff88..00cede01cb 100644 return true; } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0024.patch b/third_party/libwebrtc/moz-patch-stack/0024.patch index 497bddfbee7e..2558a1c609c0 100644 --- a/third_party/libwebrtc/moz-patch-stack/0024.patch +++ b/third_party/libwebrtc/moz-patch-stack/0024.patch @@ -24,5 +24,5 @@ index c08fc4c29b..8e0e40ef70 100644 /** -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0025.patch b/third_party/libwebrtc/moz-patch-stack/0025.patch index f60da1a8a433..d33bb501e740 100644 --- a/third_party/libwebrtc/moz-patch-stack/0025.patch +++ b/third_party/libwebrtc/moz-patch-stack/0025.patch @@ -39,5 +39,5 @@ index 877986134a..c1d2d61a71 100644 - private static native void nativeAddSample(long handle, int sample); } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0026.patch b/third_party/libwebrtc/moz-patch-stack/0026.patch index b47a28d51451..407fcccce226 100644 --- a/third_party/libwebrtc/moz-patch-stack/0026.patch +++ b/third_party/libwebrtc/moz-patch-stack/0026.patch @@ -32,5 +32,5 @@ index dec97a2c25..d5ee80c73e 100644 checkIsOnCameraThread(); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0027.patch b/third_party/libwebrtc/moz-patch-stack/0027.patch index 66803ae7cc05..ce511cc80319 100644 --- a/third_party/libwebrtc/moz-patch-stack/0027.patch +++ b/third_party/libwebrtc/moz-patch-stack/0027.patch @@ -86,5 +86,5 @@ index 95432a509d..e3c2395f49 100644 #ifdef WEBRTC_LINUX void HandleEvent(inotify_event* event, int fd); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0028.patch b/third_party/libwebrtc/moz-patch-stack/0028.patch index 90ec99b6167e..88c0ca915125 100644 --- a/third_party/libwebrtc/moz-patch-stack/0028.patch +++ b/third_party/libwebrtc/moz-patch-stack/0028.patch @@ -1,66 +1,67 @@ -From: Randell Jesup -Date: Sat, 11 Jul 2020 12:31:00 +0000 -Subject: Bug 1112392 - Move webrtc Tab Sharing to work in e10s/fission - r=dminor +From: Paul Adenot +Date: Wed, 4 Nov 2020 13:03:00 +0000 +Subject: Bug 1675042 - Put IR camera last in the device selection list, so + that they are never the default. r=dminor -Also we drop support for an independent-of-scroll/viewport capture, which -the old Tab Sharing supported, for security reasons (and we don't need it). - -Differential Revision: https://phabricator.services.mozilla.com/D80974 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d57a030e6e3ae9ff56f14e8cc732b0e1d3869858 +Differential Revision: https://phabricator.services.mozilla.com/D95764 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/754900ff2a2b1c32878baf3c76d7c0e8219419ff --- - modules/desktop_capture/desktop_capturer.cc | 11 +++++++++++ - modules/desktop_capture/desktop_capturer.h | 8 ++++++++ - 2 files changed, 19 insertions(+) + sdk/android/api/org/webrtc/Camera1Enumerator.java | 5 +++++ + sdk/android/api/org/webrtc/Camera2Enumerator.java | 12 ++++++++++++ + sdk/android/api/org/webrtc/CameraEnumerator.java | 1 + + 3 files changed, 18 insertions(+) -diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc -index 5211f1acec..4baa93cab9 100644 ---- a/modules/desktop_capture/desktop_capturer.cc -+++ b/modules/desktop_capture/desktop_capturer.cc -@@ -101,6 +101,17 @@ std::unique_ptr DesktopCapturer::CreateScreenCapturer( - return capturer; - } +diff --git a/sdk/android/api/org/webrtc/Camera1Enumerator.java b/sdk/android/api/org/webrtc/Camera1Enumerator.java +index fb1a21f323..4a1aacdb05 100644 +--- a/sdk/android/api/org/webrtc/Camera1Enumerator.java ++++ b/sdk/android/api/org/webrtc/Camera1Enumerator.java +@@ -63,6 +63,11 @@ public class Camera1Enumerator implements CameraEnumerator { + return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK; + } -+// static -+std::unique_ptr DesktopCapturer::CreateTabCapturer( -+ const DesktopCaptureOptions& options) { -+ std::unique_ptr capturer = CreateRawTabCapturer(options); -+ if (capturer && options.detect_updated_region()) { -+ capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); ++ @Override ++ public boolean isInfrared(String deviceName) { ++ return false; + } + -+ return capturer; -+} -+ - #if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - bool DesktopCapturer::IsRunningUnderWayland() { - const char* xdg_session_type = getenv("XDG_SESSION_TYPE"); -diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h -index f39852b58e..9b667739a8 100644 ---- a/modules/desktop_capture/desktop_capturer.h -+++ b/modules/desktop_capture/desktop_capturer.h -@@ -187,6 +187,10 @@ class RTC_EXPORT DesktopCapturer { - static std::unique_ptr CreateScreenCapturer( - const DesktopCaptureOptions& options); + @Override + public List getSupportedFormats(String deviceName) { + return getSupportedFormats(getCameraIndex(deviceName)); +diff --git a/sdk/android/api/org/webrtc/Camera2Enumerator.java b/sdk/android/api/org/webrtc/Camera2Enumerator.java +index 456d8cd060..44e239ad8e 100644 +--- a/sdk/android/api/org/webrtc/Camera2Enumerator.java ++++ b/sdk/android/api/org/webrtc/Camera2Enumerator.java +@@ -74,6 +74,18 @@ public class Camera2Enumerator implements CameraEnumerator { + == CameraMetadata.LENS_FACING_BACK; + } -+ // Creates a DesktopCapturer instance which targets to capture tab. -+ static std::unique_ptr CreateTabCapturer( -+ const DesktopCaptureOptions& options); ++ @Override ++ public boolean isInfrared(String deviceName) { ++ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName); + - #if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - static bool IsRunningUnderWayland(); - -@@ -212,6 +216,10 @@ class RTC_EXPORT DesktopCapturer { - // capture screens. - static std::unique_ptr CreateRawScreenCapturer( - const DesktopCaptureOptions& options); ++ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { ++ Integer colors = characteristics.get(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); ++ return colors != null && colors.equals(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR); ++ } + -+ // Creates a DesktopCapturer instance which targets to capture tabs -+ static std::unique_ptr CreateRawTabCapturer( -+ const DesktopCaptureOptions& options); - }; ++ return false; ++ } ++ + @Nullable + @Override + public List getSupportedFormats(String deviceName) { +diff --git a/sdk/android/api/org/webrtc/CameraEnumerator.java b/sdk/android/api/org/webrtc/CameraEnumerator.java +index dc954b62e0..db34d542c8 100644 +--- a/sdk/android/api/org/webrtc/CameraEnumerator.java ++++ b/sdk/android/api/org/webrtc/CameraEnumerator.java +@@ -18,6 +18,7 @@ public interface CameraEnumerator { + public String[] getDeviceNames(); + public boolean isFrontFacing(String deviceName); + public boolean isBackFacing(String deviceName); ++ public boolean isInfrared(String deviceName); + public List getSupportedFormats(String deviceName); - } // namespace webrtc + public CameraVideoCapturer createCapturer( -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0029.patch b/third_party/libwebrtc/moz-patch-stack/0029.patch index 7c8893bc1aa2..6d3e3bc0b303 100644 --- a/third_party/libwebrtc/moz-patch-stack/0029.patch +++ b/third_party/libwebrtc/moz-patch-stack/0029.patch @@ -1,67 +1,25 @@ -From: Paul Adenot -Date: Wed, 4 Nov 2020 13:03:00 +0000 -Subject: Bug 1675042 - Put IR camera last in the device selection list, so - that they are never the default. r=dminor +From: Jan-Ivar Bruaroey +Date: Fri, 18 Aug 2023 13:26:37 -0500 +Subject: Bug 1701809: Close dev/video* driver file descriptors after camera + access on linux. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D95764 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/754900ff2a2b1c32878baf3c76d7c0e8219419ff +Differential Revision: https://phabricator.services.mozilla.com/D110589 --- - sdk/android/api/org/webrtc/Camera1Enumerator.java | 5 +++++ - sdk/android/api/org/webrtc/Camera2Enumerator.java | 12 ++++++++++++ - sdk/android/api/org/webrtc/CameraEnumerator.java | 1 + - 3 files changed, 18 insertions(+) + modules/video_capture/linux/device_info_v4l2.cc | 1 + + 1 file changed, 1 insertion(+) -diff --git a/sdk/android/api/org/webrtc/Camera1Enumerator.java b/sdk/android/api/org/webrtc/Camera1Enumerator.java -index fb1a21f323..4a1aacdb05 100644 ---- a/sdk/android/api/org/webrtc/Camera1Enumerator.java -+++ b/sdk/android/api/org/webrtc/Camera1Enumerator.java -@@ -63,6 +63,11 @@ public class Camera1Enumerator implements CameraEnumerator { - return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK; - } +diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc +index cd0ba6e3df..c5d33ff9a4 100644 +--- a/modules/video_capture/linux/device_info_v4l2.cc ++++ b/modules/video_capture/linux/device_info_v4l2.cc +@@ -330,6 +330,7 @@ int32_t DeviceInfoV4l2::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { + if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { + // skip devices without video capture capability + if (!IsVideoCaptureDevice(&cap)) { ++ close(fd); + continue; + } -+ @Override -+ public boolean isInfrared(String deviceName) { -+ return false; -+ } -+ - @Override - public List getSupportedFormats(String deviceName) { - return getSupportedFormats(getCameraIndex(deviceName)); -diff --git a/sdk/android/api/org/webrtc/Camera2Enumerator.java b/sdk/android/api/org/webrtc/Camera2Enumerator.java -index 456d8cd060..44e239ad8e 100644 ---- a/sdk/android/api/org/webrtc/Camera2Enumerator.java -+++ b/sdk/android/api/org/webrtc/Camera2Enumerator.java -@@ -74,6 +74,18 @@ public class Camera2Enumerator implements CameraEnumerator { - == CameraMetadata.LENS_FACING_BACK; - } - -+ @Override -+ public boolean isInfrared(String deviceName) { -+ CameraCharacteristics characteristics = getCameraCharacteristics(deviceName); -+ -+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { -+ Integer colors = characteristics.get(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT); -+ return colors != null && colors.equals(CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR); -+ } -+ -+ return false; -+ } -+ - @Nullable - @Override - public List getSupportedFormats(String deviceName) { -diff --git a/sdk/android/api/org/webrtc/CameraEnumerator.java b/sdk/android/api/org/webrtc/CameraEnumerator.java -index dc954b62e0..db34d542c8 100644 ---- a/sdk/android/api/org/webrtc/CameraEnumerator.java -+++ b/sdk/android/api/org/webrtc/CameraEnumerator.java -@@ -18,6 +18,7 @@ public interface CameraEnumerator { - public String[] getDeviceNames(); - public boolean isFrontFacing(String deviceName); - public boolean isBackFacing(String deviceName); -+ public boolean isInfrared(String deviceName); - public List getSupportedFormats(String deviceName); - - public CameraVideoCapturer createCapturer( -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0030.patch b/third_party/libwebrtc/moz-patch-stack/0030.patch index e892062c71ef..bd029cc53f0b 100644 --- a/third_party/libwebrtc/moz-patch-stack/0030.patch +++ b/third_party/libwebrtc/moz-patch-stack/0030.patch @@ -1,25 +1,1420 @@ -From: Jan-Ivar Bruaroey -Date: Fri, 18 Aug 2023 13:26:37 -0500 -Subject: Bug 1701809: Close dev/video* driver file descriptors after camera - access on linux. r=ng +From: Dan Minor +Date: Thu, 5 Nov 2020 07:47:00 +0000 +Subject: Bug 1654112 - Tweak upstream gn files for Firefox build. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D110589 +Differential Revision: https://phabricator.services.mozilla.com/D130075 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/127ace4d8887f11abb201d300a849772a2b519f8 + +Bug 1820869 - avoid building unreachable files. r=ng,webrtc-reviewers + +Differential Revision: https://phabricator.services.mozilla.com/D171922 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/88b3cc6bbece7c53d00e124713330f3d34d2789d + +Bug 1822194 - (fix-acabb3641b) Break the new SetParametersCallback stuff into stand-alone files. + +acabb3641b from upstream added a callback mechanism to allow failures to be +propagated back to RTCRtpSender.setParameters. Unfortunately, this callback +mechanism was (needlessly) tightly coupled to libwebrtc's implementation of +RTCRtpSender, and also their media channel code. This introduced a lot of +unnecessary dependencies throughout libwebrtc, that spilled into our code as +well. +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/59232687efa00e5f7b7bd3d6befca129149e2bf5 + +Bug 1828517 - (fix-794d599741) account for moved files in BUILD.gn that we don't want to build. + +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/4a969f6709183d4f55215adaffb8a52b790a8492 + +Bug 1839451 - (fix-186ebdc1b0) remove BUILD.gn refs to gone files delayable.h, media_channel.h + +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d0f4d1733cb1a2d8189097af4b5537118ebc95a6 + +Bug 1839451 - (fix-f6eae959bf) s/rtc_encoder_simulcast_proxy/rtc_simulcast_encoder_adapter/ BUILD ref. + +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/876b3f5821cd5c30564a82c1da7d057d79d17b01 + +Bug 1828517 - (fix-a138c6c8a5) handle file moves in BUILD.gn + +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/cf7e333da17689b3c115a6ffd07fab042bc5f086 + +Bug 1817024 - (fix-0e2cf6cc01) Skip library create_peer_connection_quality_test_frame_generator. r?mjf! + +Differential Revision: https://phabricator.services.mozilla.com/D170887 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/fbbc1bf963fda30bca26ae6aac0c3459b8ebea6f + +Bug 1826428 - remove libwebrtc's jvm_android.cc from build r=ng,webrtc-reviewers + +Based on info from John Lin and previous try runs, we're almost +certainly not using this. Let's try removing it from the build +and landing it. If no problems emerge, we'll be able to remove +our custom changes to upstream code in jvm_android.cc. + +Differential Revision: https://phabricator.services.mozilla.com/D174793 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/dca1b97525487ae57d43ced1ebdb4a2d9c9dae89 + +Bug 1774628 - re-enable support for Windows.Graphics.Capture APIs in libwebrtc. r=pehrsons,webrtc-reviewers + +Differential Revision: https://phabricator.services.mozilla.com/D186862 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/08567f4539a12b54202aecbf554ec6540fb99ab2 --- - modules/video_capture/linux/device_info_v4l2.cc | 1 + - 1 file changed, 1 insertion(+) + .gn | 2 + + BUILD.gn | 45 ++++++++++++++++++- + api/BUILD.gn | 34 +++++++++++++- + api/rtp_sender_interface.h | 4 +- + api/rtp_sender_setparameters_callback.cc | 27 +++++++++++ + api/rtp_sender_setparameters_callback.h | 28 ++++++++++++ + api/task_queue/BUILD.gn | 2 + + api/transport/BUILD.gn | 2 + + call/BUILD.gn | 14 +++++- + call/audio_send_stream.h | 2 +- + call/video_send_stream.h | 2 +- + common_audio/BUILD.gn | 4 -- + common_audio/fir_filter_avx2.cc | 2 + + common_audio/intrin.h | 8 ++++ + media/BUILD.gn | 35 ++++++++++++++- + media/base/media_channel.h | 4 -- + media/base/media_channel_impl.cc | 13 ------ + modules/audio_coding/BUILD.gn | 2 +- + modules/audio_device/BUILD.gn | 17 +++++-- + modules/audio_processing/aec3/BUILD.gn | 13 +++--- + .../aec3/adaptive_fir_filter_avx2.cc | 2 +- + .../audio_processing/agc2/rnn_vad/BUILD.gn | 2 +- + modules/desktop_capture/BUILD.gn | 29 +----------- + modules/portal/BUILD.gn | 24 ++++++++++ + modules/utility/BUILD.gn | 4 ++ + modules/video_capture/BUILD.gn | 11 +---- + rtc_base/BUILD.gn | 30 ++++++++++++- + rtc_base/system/BUILD.gn | 2 +- + test/BUILD.gn | 10 +++++ + video/BUILD.gn | 4 +- + webrtc.gni | 32 ++++++++----- + 31 files changed, 311 insertions(+), 99 deletions(-) + create mode 100644 api/rtp_sender_setparameters_callback.cc + create mode 100644 api/rtp_sender_setparameters_callback.h + create mode 100644 common_audio/intrin.h -diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc -index cd0ba6e3df..c5d33ff9a4 100644 ---- a/modules/video_capture/linux/device_info_v4l2.cc -+++ b/modules/video_capture/linux/device_info_v4l2.cc -@@ -330,6 +330,7 @@ int32_t DeviceInfoV4l2::CreateCapabilityMap(const char* deviceUniqueIdUTF8) { - if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) { - // skip devices without video capture capability - if (!IsVideoCaptureDevice(&cap)) { -+ close(fd); - continue; - } +diff --git a/.gn b/.gn +index 02cfb6de64..3208b6bb4e 100644 +--- a/.gn ++++ b/.gn +@@ -69,6 +69,8 @@ default_args = { + # Prevent jsoncpp to pass -Wno-deprecated-declarations to users + jsoncpp_no_deprecated_declarations = false ++ use_custom_libcxx = false ++ + # Fixes the abi-revision issue. + # TODO(https://bugs.webrtc.org/14437): Remove this section if general + # Chromium fix resolves the problem. +diff --git a/BUILD.gn b/BUILD.gn +index 661e36f16a..155e3862af 100644 +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -32,7 +32,7 @@ if (is_android) { + import("//build/config/android/rules.gni") + } + +-if (!build_with_chromium) { ++if (!build_with_chromium && !build_with_mozilla) { + # This target should (transitively) cause everything to be built; if you run + # 'ninja default' and then 'ninja all', the second build should do no work. + group("default") { +@@ -157,6 +157,10 @@ config("common_inherited_config") { + defines += [ "WEBRTC_ENABLE_OBJC_SYMBOL_EXPORT" ] + } + ++ if (build_with_mozilla) { ++ defines += [ "WEBRTC_MOZILLA_BUILD" ] ++ } ++ + if (!rtc_builtin_ssl_root_certificates) { + defines += [ "WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS" ] + } +@@ -471,9 +475,11 @@ config("common_config") { + } + } + ++if (is_mac) { + config("common_objc") { + frameworks = [ "Foundation.framework" ] + } ++} + + if (!build_with_chromium) { + # Target to build all the WebRTC production code. +@@ -523,6 +529,33 @@ if (!build_with_chromium) { + "sdk", + "video", + ] ++ if (build_with_mozilla) { ++ deps -= [ ++ "api:create_peerconnection_factory", ++ "api:rtc_error", ++ "api:transport_api", ++ "api/crypto", ++ "api/rtc_event_log:rtc_event_log_factory", ++ "api/task_queue", ++ "api/task_queue:default_task_queue_factory", ++ "api/test/metrics", ++ "api/video_codecs:video_decoder_factory_template", ++ "api/video_codecs:video_decoder_factory_template_dav1d_adapter", ++ "api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", ++ "api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter", ++ "api/video_codecs:video_decoder_factory_template_open_h264_adapter", ++ "api/video_codecs:video_encoder_factory_template", ++ "api/video_codecs:video_encoder_factory_template_libaom_av1_adapter", ++ "api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", ++ "api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", ++ "api/video_codecs:video_encoder_factory_template_open_h264_adapter", ++ "logging:rtc_event_log_api", ++ "p2p:rtc_p2p", ++ "pc:libjingle_peerconnection", ++ "pc:rtc_pc", ++ "sdk", ++ ] ++ } + + if (rtc_include_builtin_audio_codecs) { + deps += [ +@@ -535,6 +568,16 @@ if (!build_with_chromium) { + deps += [ + "api/video:video_frame", + "api/video:video_rtp_headers", ++ "test:rtp_test_utils", ++ ] ++ # Added when we removed deps in other places to avoid building ++ # unreachable sources. See Bug 1820869. ++ deps += [ ++ "api/video_codecs:video_codecs_api", ++ "api/video_codecs:rtc_software_fallback_wrappers", ++ "media:rtc_simulcast_encoder_adapter", ++ "modules/video_coding:webrtc_vp8", ++ "modules/video_coding:webrtc_vp9", + ] + } else { + deps += [ +diff --git a/api/BUILD.gn b/api/BUILD.gn +index ca5a869a40..9a60e912ec 100644 +--- a/api/BUILD.gn ++++ b/api/BUILD.gn +@@ -35,7 +35,7 @@ rtc_source_set("callfactory_api") { + ] + } + +-if (!build_with_chromium) { ++if (!build_with_chromium && !build_with_mozilla) { + rtc_library("create_peerconnection_factory") { + visibility = [ "*" ] + allow_poison = [ "default_task_queue" ] +@@ -186,6 +186,10 @@ rtc_source_set("ice_transport_interface") { + } + + rtc_library("dtls_transport_interface") { ++# Previously, Mozilla has tried to limit including this dep, but as ++# upstream changes, it requires whack-a-mole. Making it an empty ++# definition has the same effect, but only requires one change. ++if (!build_with_mozilla) { + visibility = [ "*" ] + + sources = [ +@@ -202,6 +206,7 @@ rtc_library("dtls_transport_interface") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } ++} + + rtc_library("dtmf_sender_interface") { + visibility = [ "*" ] +@@ -214,6 +219,10 @@ rtc_library("dtmf_sender_interface") { + } + + rtc_library("rtp_sender_interface") { ++# Previously, Mozilla has tried to limit including this dep, but as ++# upstream changes, it requires whack-a-mole. Making it an empty ++# definition has the same effect, but only requires one change. ++if (!build_with_mozilla) { + visibility = [ "*" ] + + sources = [ +@@ -225,8 +234,8 @@ rtc_library("rtp_sender_interface") { + ":dtmf_sender_interface", + ":frame_transformer_interface", + ":media_stream_interface", +- ":rtc_error", + ":rtp_parameters", ++ ":rtp_sender_setparameters_callback", + ":scoped_refptr", + "../rtc_base:checks", + "../rtc_base:refcount", +@@ -234,10 +243,24 @@ rtc_library("rtp_sender_interface") { + "crypto:frame_encryptor_interface", + "video_codecs:video_codecs_api", + ] ++} ++} ++ ++rtc_library("rtp_sender_setparameters_callback") { ++ visibility = [ "*" ] ++ ++ sources = [ ++ "rtp_sender_setparameters_callback.cc", ++ "rtp_sender_setparameters_callback.h", ++ ] ++ deps = [ ++ ":rtc_error", ++ ] + absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] + } + + rtc_library("libjingle_peerconnection_api") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + cflags = [] + sources = [ +@@ -354,6 +377,7 @@ rtc_library("libjingle_peerconnection_api") { + "//third_party/abseil-cpp/absl/types:optional", + ] + } ++} + + rtc_source_set("frame_transformer_interface") { + visibility = [ "*" ] +@@ -544,6 +568,7 @@ rtc_source_set("peer_network_dependencies") { + } + + rtc_source_set("peer_connection_quality_test_fixture_api") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + testonly = true + sources = [ "test/peerconnection_quality_test_fixture.h" ] +@@ -594,6 +619,7 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { + "//third_party/abseil-cpp/absl/types:optional", + ] + } ++} + + rtc_source_set("frame_generator_api") { + visibility = [ "*" ] +@@ -712,6 +738,7 @@ rtc_library("create_frame_generator") { + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + ++if (!build_with_mozilla) { + rtc_library("create_peer_connection_quality_test_frame_generator") { + visibility = [ "*" ] + testonly = true +@@ -728,6 +755,7 @@ rtc_library("create_peer_connection_quality_test_frame_generator") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } ++} + + rtc_source_set("libjingle_logging_api") { + visibility = [ "*" ] +@@ -900,6 +928,7 @@ rtc_source_set("refcountedbase") { + ] + } + ++if (!build_with_mozilla) { + rtc_library("ice_transport_factory") { + visibility = [ "*" ] + sources = [ +@@ -918,6 +947,7 @@ rtc_library("ice_transport_factory") { + "rtc_event_log:rtc_event_log", + ] + } ++} + + rtc_library("neteq_simulator_api") { + visibility = [ "*" ] +diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h +index 2786a2ac19..98ee91b1cc 100644 +--- a/api/rtp_sender_interface.h ++++ b/api/rtp_sender_interface.h +@@ -32,9 +32,9 @@ + #include "rtc_base/ref_count.h" + #include "rtc_base/system/rtc_export.h" + +-namespace webrtc { ++#include "api/rtp_sender_setparameters_callback.h" + +-using SetParametersCallback = absl::AnyInvocable; ++namespace webrtc { + + class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { + public: +diff --git a/api/rtp_sender_setparameters_callback.cc b/api/rtp_sender_setparameters_callback.cc +new file mode 100644 +index 0000000000..99728ef95e +--- /dev/null ++++ b/api/rtp_sender_setparameters_callback.cc +@@ -0,0 +1,27 @@ ++/* ++ * Copyright 2015 The WebRTC project authors. All Rights Reserved. ++ * ++ * Use of this source code is governed by a BSD-style license ++ * that can be found in the LICENSE file in the root of the source ++ * tree. An additional intellectual property rights grant can be found ++ * in the file PATENTS. All contributing project authors may ++ * be found in the AUTHORS file in the root of the source tree. ++ */ ++ ++// File added by mozilla, to decouple this from libwebrtc's implementation of ++// RTCRtpSender. ++ ++#include "api/rtp_sender_setparameters_callback.h" ++ ++namespace webrtc { ++ ++webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, ++ RTCError error) { ++ if (callback) { ++ std::move(callback)(error); ++ callback = nullptr; ++ } ++ return error; ++} ++ ++} // namespace webrtc +diff --git a/api/rtp_sender_setparameters_callback.h b/api/rtp_sender_setparameters_callback.h +new file mode 100644 +index 0000000000..45194f5ace +--- /dev/null ++++ b/api/rtp_sender_setparameters_callback.h +@@ -0,0 +1,28 @@ ++/* ++ * Copyright 2015 The WebRTC project authors. All Rights Reserved. ++ * ++ * Use of this source code is governed by a BSD-style license ++ * that can be found in the LICENSE file in the root of the source ++ * tree. An additional intellectual property rights grant can be found ++ * in the file PATENTS. All contributing project authors may ++ * be found in the AUTHORS file in the root of the source tree. ++ */ ++ ++// File added by mozilla, to decouple this from libwebrtc's implementation of ++// RTCRtpSender. ++ ++#ifndef API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ ++#define API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ ++ ++#include "api/rtc_error.h" ++#include "absl/functional/any_invocable.h" ++ ++namespace webrtc { ++ ++using SetParametersCallback = absl::AnyInvocable; ++ ++webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, ++ RTCError error); ++} // namespace webrtc ++ ++#endif // API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ +diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn +index 65bb1ae21b..116a50762e 100644 +--- a/api/task_queue/BUILD.gn ++++ b/api/task_queue/BUILD.gn +@@ -31,6 +31,7 @@ rtc_library("task_queue") { + ] + } + ++if (rtc_include_tests) { + rtc_library("task_queue_test") { + visibility = [ "*" ] + testonly = true +@@ -79,6 +80,7 @@ rtc_library("task_queue_test") { + ] + } + } ++} + + rtc_library("default_task_queue_factory") { + visibility = [ "*" ] +diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn +index 12a1f57066..d8f095c700 100644 +--- a/api/transport/BUILD.gn ++++ b/api/transport/BUILD.gn +@@ -90,6 +90,7 @@ rtc_source_set("sctp_transport_factory_interface") { + } + + rtc_source_set("stun_types") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "stun.cc", +@@ -110,6 +111,7 @@ rtc_source_set("stun_types") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } ++} + + if (rtc_include_tests) { + rtc_source_set("test_feedback_generator_interface") { +diff --git a/call/BUILD.gn b/call/BUILD.gn +index 2e8a997fc4..173c220b9b 100644 +--- a/call/BUILD.gn ++++ b/call/BUILD.gn +@@ -46,7 +46,7 @@ rtc_library("call_interfaces") { + "../api:rtc_error", + "../api:rtp_headers", + "../api:rtp_parameters", +- "../api:rtp_sender_interface", ++ "../api:rtp_sender_setparameters_callback", + "../api:scoped_refptr", + "../api:transport_api", + "../api/adaptation:resource_adaptation_api", +@@ -347,6 +347,16 @@ rtc_library("call") { + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] ++ if (build_with_mozilla) { # See Bug 1820869. ++ sources -= [ ++ "call_factory.cc", ++ "degraded_call.cc", ++ ] ++ deps -= [ ++ ":fake_network", ++ ":simulated_network", ++ ] ++ } + } + + rtc_source_set("receive_stream_interface") { +@@ -374,7 +384,7 @@ rtc_library("video_stream_api") { + "../api:frame_transformer_interface", + "../api:rtp_headers", + "../api:rtp_parameters", +- "../api:rtp_sender_interface", ++ "../api:rtp_sender_setparameters_callback", + "../api:scoped_refptr", + "../api:transport_api", + "../api/adaptation:resource_adaptation_api", +diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h +index 9c2fad652f..f9e49db574 100644 +--- a/call/audio_send_stream.h ++++ b/call/audio_send_stream.h +@@ -25,7 +25,7 @@ + #include "api/crypto/frame_encryptor_interface.h" + #include "api/frame_transformer_interface.h" + #include "api/rtp_parameters.h" +-#include "api/rtp_sender_interface.h" ++#include "api/rtp_sender_setparameters_callback.h" + #include "api/scoped_refptr.h" + #include "call/audio_sender.h" + #include "call/rtp_config.h" +diff --git a/call/video_send_stream.h b/call/video_send_stream.h +index 5fde44a719..1a0261be1b 100644 +--- a/call/video_send_stream.h ++++ b/call/video_send_stream.h +@@ -23,7 +23,7 @@ + #include "api/crypto/crypto_options.h" + #include "api/frame_transformer_interface.h" + #include "api/rtp_parameters.h" +-#include "api/rtp_sender_interface.h" ++#include "api/rtp_sender_setparameters_callback.h" + #include "api/scoped_refptr.h" + #include "api/video/video_content_type.h" + #include "api/video/video_frame.h" +diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn +index 2ae6d32710..a45214f754 100644 +--- a/common_audio/BUILD.gn ++++ b/common_audio/BUILD.gn +@@ -267,14 +267,10 @@ if (current_cpu == "x86" || current_cpu == "x64") { + "resampler/sinc_resampler_avx2.cc", + ] + +- if (is_win) { +- cflags = [ "/arch:AVX2" ] +- } else { + cflags = [ + "-mavx2", + "-mfma", + ] +- } + + deps = [ + ":fir_filter", +diff --git a/common_audio/fir_filter_avx2.cc b/common_audio/fir_filter_avx2.cc +index 9cb0f770ca..0031392f8a 100644 +--- a/common_audio/fir_filter_avx2.cc ++++ b/common_audio/fir_filter_avx2.cc +@@ -15,6 +15,8 @@ + #include + #include + ++#include "common_audio/intrin.h" ++ + #include "rtc_base/checks.h" + #include "rtc_base/memory/aligned_malloc.h" + +diff --git a/common_audio/intrin.h b/common_audio/intrin.h +new file mode 100644 +index 0000000000..f6ff7f218f +--- /dev/null ++++ b/common_audio/intrin.h +@@ -0,0 +1,8 @@ ++#if defined (__SSE__) ++ #include ++ #if defined (__clang__) ++ #include ++ #include ++ #include ++ #endif ++#endif +diff --git a/media/BUILD.gn b/media/BUILD.gn +index 22eb02ab4f..9dd837c851 100644 +--- a/media/BUILD.gn ++++ b/media/BUILD.gn +@@ -64,7 +64,7 @@ rtc_library("rtc_media_base") { + "../api:media_stream_interface", + "../api:rtc_error", + "../api:rtp_parameters", +- "../api:rtp_sender_interface", ++ "../api:rtp_sender_setparameters_callback", + "../api:scoped_refptr", + "../api:sequence_checker", + "../api:transport_api", +@@ -139,9 +139,19 @@ rtc_library("rtc_media_base") { + "base/video_source_base.cc", + "base/video_source_base.h", + ] ++ if (build_with_mozilla) { ++ sources -= [ ++ "base/adapted_video_track_source.cc", ++ "base/adapted_video_track_source.h", ++ "base/audio_source.h", ++ "base/media_engine.cc", ++ "base/media_engine.h", ++ ] ++ } + } + + rtc_library("media_channel_impl") { ++if (!build_with_mozilla) { + sources = [ + "base/media_channel_impl.cc", + "base/media_channel_impl.h", +@@ -191,6 +201,7 @@ rtc_library("media_channel_impl") { + "//third_party/abseil-cpp/absl/types:optional", + ] + } ++} + + rtc_source_set("media_channel") { + sources = [ "base/media_channel.h" ] +@@ -269,6 +280,7 @@ rtc_library("codec") { + } + + rtc_library("rtp_utils") { ++if (!build_with_mozilla) { + sources = [ + "base/rtp_utils.cc", + "base/rtp_utils.h", +@@ -285,8 +297,10 @@ rtc_library("rtp_utils") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } ++} + + rtc_library("stream_params") { ++if (!build_with_mozilla) { + sources = [ + "base/stream_params.cc", + "base/stream_params.h", +@@ -299,6 +313,7 @@ rtc_library("stream_params") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] + } ++} + + rtc_library("media_constants") { + sources = [ +@@ -309,6 +324,7 @@ rtc_library("media_constants") { + } + + rtc_library("turn_utils") { ++if (!build_with_mozilla) { + sources = [ + "base/turn_utils.cc", + "base/turn_utils.h", +@@ -319,14 +335,17 @@ rtc_library("turn_utils") { + "../rtc_base/system:rtc_export", + ] + } ++} + + rtc_library("rid_description") { ++if (!build_with_mozilla) { + sources = [ + "base/rid_description.cc", + "base/rid_description.h", + ] + deps = [] + } ++} + + rtc_library("rtc_simulcast_encoder_adapter") { + visibility = [ "*" ] +@@ -398,6 +417,12 @@ rtc_library("rtc_internal_video_codecs") { + "../system_wrappers:field_trial", + "../test:fake_video_codecs", + ] ++ if (build_with_mozilla) { ++ deps -= [ ++ "../modules/video_coding:webrtc_multiplex", # See Bug 1820869. ++ "../test:fake_video_codecs", ++ ] ++ } + + if (enable_libaom) { + defines += [ "RTC_USE_LIBAOM_AV1_ENCODER" ] +@@ -423,6 +448,14 @@ rtc_library("rtc_internal_video_codecs") { + "engine/multiplex_codec_factory.cc", + "engine/multiplex_codec_factory.h", + ] ++ if (build_with_mozilla) { ++ sources -= [ ++ "engine/fake_video_codec_factory.cc", ++ "engine/fake_video_codec_factory.h", ++ "engine/internal_encoder_factory.cc", # See Bug 1820869. ++ "engine/multiplex_codec_factory.cc", # See Bug 1820869. ++ ] ++ } + } + + rtc_library("rtc_audio_video") { +diff --git a/media/base/media_channel.h b/media/base/media_channel.h +index 01aa2b251c..0d09670239 100644 +--- a/media/base/media_channel.h ++++ b/media/base/media_channel.h +@@ -65,10 +65,6 @@ class Timing; + + namespace webrtc { + class VideoFrame; +- +-webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, +- RTCError error); +- + } // namespace webrtc + + namespace cricket { +diff --git a/media/base/media_channel_impl.cc b/media/base/media_channel_impl.cc +index 88aa279470..5fee8a2fb0 100644 +--- a/media/base/media_channel_impl.cc ++++ b/media/base/media_channel_impl.cc +@@ -31,19 +31,6 @@ + #include "modules/rtp_rtcp/include/report_block_data.h" + #include "rtc_base/checks.h" + +-namespace webrtc { +- +-webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, +- RTCError error) { +- if (callback) { +- std::move(callback)(error); +- callback = nullptr; +- } +- return error; +-} +- +-} // namespace webrtc +- + namespace cricket { + using webrtc::FrameDecryptorInterface; + using webrtc::FrameEncryptorInterface; +diff --git a/modules/audio_coding/BUILD.gn b/modules/audio_coding/BUILD.gn +index 1420aa6385..143a6eca41 100644 +--- a/modules/audio_coding/BUILD.gn ++++ b/modules/audio_coding/BUILD.gn +@@ -553,7 +553,7 @@ rtc_library("webrtc_opus_wrapper") { + deps += [ rtc_opus_dir ] + public_configs = [ "//third_party/opus:opus_config" ] + } else if (build_with_mozilla) { +- include_dirs = [ getenv("DIST") + "/include/opus" ] ++ public_configs = [ "//third_party/opus:opus_config" ] + } + } + +diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn +index 162981c427..0bfad84206 100644 +--- a/modules/audio_device/BUILD.gn ++++ b/modules/audio_device/BUILD.gn +@@ -30,6 +30,7 @@ rtc_source_set("audio_device_default") { + } + + rtc_source_set("audio_device") { ++if (!build_with_mozilla) { # See Bug 1820869. + visibility = [ "*" ] + public_deps = [ + ":audio_device_api", +@@ -40,6 +41,7 @@ rtc_source_set("audio_device") { + ":audio_device_impl", + ] + } ++} + + rtc_source_set("audio_device_api") { + visibility = [ "*" ] +@@ -62,6 +64,7 @@ rtc_library("audio_device_config") { + } + + rtc_library("audio_device_buffer") { ++if (!build_with_mozilla) { # See Bug 1820869. + sources = [ + "audio_device_buffer.cc", + "audio_device_buffer.h", +@@ -88,6 +91,7 @@ rtc_library("audio_device_buffer") { + "../../system_wrappers:metrics", + ] + } ++} + + rtc_library("audio_device_generic") { + sources = [ +@@ -265,6 +269,7 @@ if (!build_with_chromium) { + # Contains default implementations of webrtc::AudioDeviceModule for Windows, + # Linux, Mac, iOS and Android. + rtc_library("audio_device_impl") { ++if (!build_with_mozilla) { # See Bug 1820869. + visibility = [ "*" ] + deps = [ + ":audio_device_api", +@@ -314,9 +319,9 @@ rtc_library("audio_device_impl") { + sources = [ "include/fake_audio_device.h" ] + + if (build_with_mozilla) { +- sources += [ +- "opensl/single_rw_fifo.cc", +- "opensl/single_rw_fifo.h", ++ sources -= [ ++ "include/test_audio_device.cc", ++ "include/test_audio_device.h", + ] + } + +@@ -421,6 +426,7 @@ rtc_library("audio_device_impl") { + sources += [ "dummy/file_audio_device_factory.h" ] + } + } ++} + + if (is_mac) { + rtc_source_set("audio_device_impl_frameworks") { +@@ -438,6 +444,7 @@ if (is_mac) { + } + } + ++if (!build_with_mozilla) { # See Bug 1820869. + rtc_source_set("mock_audio_device") { + visibility = [ "*" ] + testonly = true +@@ -454,8 +461,10 @@ rtc_source_set("mock_audio_device") { + "../../test:test_support", + ] + } ++} + +-if (rtc_include_tests && !build_with_chromium) { ++# See Bug 1820869 for !build_with_mozilla. ++if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { + rtc_library("audio_device_unittests") { + testonly = true + +diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn +index f5eb5d5951..3e11a245a1 100644 +--- a/modules/audio_processing/aec3/BUILD.gn ++++ b/modules/audio_processing/aec3/BUILD.gn +@@ -264,14 +264,11 @@ if (current_cpu == "x86" || current_cpu == "x64") { + "vector_math_avx2.cc", + ] + +- if (is_win) { +- cflags = [ "/arch:AVX2" ] +- } else { +- cflags = [ +- "-mavx2", +- "-mfma", +- ] +- } ++ cflags = [ ++ "-mavx", ++ "-mavx2", ++ "-mfma", ++ ] + + deps = [ + ":adaptive_fir_filter", +diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc b/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc +index b6eda9f117..8d6e1cf3d7 100644 +--- a/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc ++++ b/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc +@@ -8,7 +8,7 @@ + * be found in the AUTHORS file in the root of the source tree. + */ + +-#include ++#include "common_audio/intrin.h" + + #include "modules/audio_processing/aec3/adaptive_fir_filter.h" + #include "rtc_base/checks.h" +diff --git a/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/modules/audio_processing/agc2/rnn_vad/BUILD.gn +index 9093a68cf3..3003a585bd 100644 +--- a/modules/audio_processing/agc2/rnn_vad/BUILD.gn ++++ b/modules/audio_processing/agc2/rnn_vad/BUILD.gn +@@ -122,7 +122,7 @@ rtc_source_set("vector_math") { + if (current_cpu == "x86" || current_cpu == "x64") { + rtc_library("vector_math_avx2") { + sources = [ "vector_math_avx2.cc" ] +- if (is_win) { ++ if (is_win && !build_with_mozilla) { + cflags = [ "/arch:AVX2" ] + } else { + cflags = [ +diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn +index 6f3e9d26c5..42d72865b9 100644 +--- a/modules/desktop_capture/BUILD.gn ++++ b/modules/desktop_capture/BUILD.gn +@@ -341,37 +341,12 @@ rtc_library("desktop_capture") { + ] + deps += [ ":desktop_capture_objc" ] + } +- +- if (build_with_mozilla) { +- sources += [ +- "desktop_device_info.cc", +- "desktop_device_info.h", +- ] +- if (is_win) { +- sources += [ +- "app_capturer_win.cc", +- "win/desktop_device_info_win.cc", +- "win/win_shared.cc", +- ] +- } +- } + if (rtc_use_x11_extensions || rtc_use_pipewire) { + sources += [ + "mouse_cursor_monitor_linux.cc", + "screen_capturer_linux.cc", + "window_capturer_linux.cc", + ] +- +- if (build_with_mozilla && (is_linux || is_chromeos)) { +- sources += [ +- "app_capturer_linux.cc", +- "linux/x11/app_capturer_x11.cc", +- "linux/x11/desktop_device_info_linux.cc", +- "linux/x11/desktop_device_info_linux.h", +- "linux/x11/shared_x_util.cc", +- "linux/x11/shared_x_util.h", +- ] +- } + } + + if (rtc_use_x11_extensions) { +@@ -537,9 +512,7 @@ rtc_library("desktop_capture") { + deps += [ "../../rtc_base:sanitizer" ] + } + +- if (!build_with_mozilla) { +- deps += [ "//third_party/libyuv" ] +- } ++ deps += [ "//third_party/libyuv" ] + + if (use_desktop_capture_differ_sse2) { + deps += [ ":desktop_capture_differ_sse2" ] +diff --git a/modules/portal/BUILD.gn b/modules/portal/BUILD.gn +index d0756f269b..d7768b2323 100644 +--- a/modules/portal/BUILD.gn ++++ b/modules/portal/BUILD.gn +@@ -11,6 +11,7 @@ import("//tools/generate_stubs/rules.gni") + import("../../webrtc.gni") + + if ((is_linux || is_chromeos) && rtc_use_pipewire) { ++if (!build_with_mozilla) { + pkg_config("gio") { + packages = [ + "gio-2.0", +@@ -88,6 +89,12 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { + defines += [ "WEBRTC_USE_GIO" ] + } + } ++} else { ++ config("pipewire_all") { ++ } ++ config("pipewire_config") { ++ } ++} + + rtc_library("portal") { + sources = [ +@@ -120,5 +127,22 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { + + deps += [ ":pipewire_stubs" ] + } ++ ++ if (build_with_mozilla) { ++ configs -= [ ++ ":gio", ++ ":pipewire", ++ ":pipewire_config", ++ ] ++ deps -= [ ":pipewire_stubs" ] ++ defines -= [ "WEBRTC_DLOPEN_PIPEWIRE" ] ++ public_deps = [ ++ "//third_party/pipewire", ++ "//third_party/drm", ++ "//third_party/gbm", ++ "//third_party/libepoxy" ++ ] ++ } + } + } ++ +diff --git a/modules/utility/BUILD.gn b/modules/utility/BUILD.gn +index 8cefe5653c..b8d75865f7 100644 +--- a/modules/utility/BUILD.gn ++++ b/modules/utility/BUILD.gn +@@ -25,5 +25,9 @@ rtc_source_set("utility") { + "../../rtc_base:platform_thread", + "../../rtc_base/system:arch", + ] ++ ++ if (build_with_mozilla) { ++ sources -= [ "source/jvm_android.cc" ] ++ } + } + } +diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn +index 730ec9bfdd..d473dbb74c 100644 +--- a/modules/video_capture/BUILD.gn ++++ b/modules/video_capture/BUILD.gn +@@ -125,21 +125,12 @@ if (!build_with_chromium || is_linux || is_chromeos) { + "strmiids.lib", + "user32.lib", + ] +- +- if (build_with_mozilla) { +- sources += [ +- "windows/BaseFilter.cpp", +- "windows/BaseInputPin.cpp", +- "windows/BasePin.cpp", +- "windows/MediaType.cpp", +- ] +- } + } + if (is_fuchsia) { + sources += [ "video_capture_factory_null.cc" ] + } + +- if (build_with_mozilla && is_android) { ++ if (!build_with_mozilla && is_android) { + include_dirs = [ + "/config/external/nspr", + "/nsprpub/lib/ds", +diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn +index b8113bd4bc..6ad19ce618 100644 +--- a/rtc_base/BUILD.gn ++++ b/rtc_base/BUILD.gn +@@ -323,6 +323,7 @@ rtc_library("sample_counter") { + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + ++if (!build_with_mozilla) { # See Bug 1820869. + rtc_library("timestamp_aligner") { + visibility = [ "*" ] + sources = [ +@@ -336,6 +337,7 @@ rtc_library("timestamp_aligner") { + "system:rtc_export", + ] + } ++} + + rtc_library("zero_memory") { + visibility = [ "*" ] +@@ -870,7 +872,9 @@ rtc_library("rtc_json") { + "strings/json.h", + ] + deps = [ ":stringutils" ] ++if (!build_with_mozilla) { + all_dependent_configs = [ "//third_party/jsoncpp:jsoncpp_config" ] ++} + if (rtc_build_json) { + deps += [ "//third_party/jsoncpp" ] + } else { +@@ -1203,6 +1207,7 @@ if (!build_with_chromium) { + } + + rtc_library("network") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "network.cc", +@@ -1241,16 +1246,20 @@ rtc_library("network") { + deps += [ ":win32" ] + } + } ++} + + rtc_library("socket_address_pair") { ++if (!build_with_mozilla) { + sources = [ + "socket_address_pair.cc", + "socket_address_pair.h", + ] + deps = [ ":socket_address" ] + } ++} + + rtc_library("net_helper") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "net_helper.cc", +@@ -1259,8 +1268,10 @@ rtc_library("net_helper") { + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ "system:rtc_export" ] + } ++} + + rtc_library("socket_adapters") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "socket_adapters.cc", +@@ -1280,6 +1291,7 @@ rtc_library("socket_adapters") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } ++} + + rtc_library("network_route") { + sources = [ +@@ -1294,6 +1306,7 @@ rtc_library("network_route") { + } + + rtc_library("async_tcp_socket") { ++if (!build_with_mozilla) { + sources = [ + "async_tcp_socket.cc", + "async_tcp_socket.h", +@@ -1312,8 +1325,10 @@ rtc_library("async_tcp_socket") { + "third_party/sigslot", + ] + } ++} + + rtc_library("async_udp_socket") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "async_udp_socket.cc", +@@ -1335,8 +1350,10 @@ rtc_library("async_udp_socket") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } ++} + + rtc_library("async_packet_socket") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "async_packet_socket.cc", +@@ -1354,6 +1371,7 @@ rtc_library("async_packet_socket") { + "third_party/sigslot", + ] + } ++} + + rtc_library("mdns_responder_interface") { + sources = [ "mdns_responder_interface.h" ] +@@ -1366,6 +1384,7 @@ rtc_library("dscp") { + } + + rtc_library("proxy_info") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "proxy_info.cc", +@@ -1376,6 +1395,7 @@ rtc_library("proxy_info") { + ":socket_address", + ] + } ++} + + rtc_library("file_rotating_stream") { + sources = [ +@@ -1404,6 +1424,7 @@ rtc_library("data_rate_limiter") { + } + + rtc_library("unique_id_generator") { ++if (!build_with_mozilla) { + sources = [ + "unique_id_generator.cc", + "unique_id_generator.h", +@@ -1418,6 +1439,7 @@ rtc_library("unique_id_generator") { + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } ++} + + rtc_library("crc32") { + sources = [ +@@ -1445,6 +1467,7 @@ rtc_library("stream") { + } + + rtc_library("rtc_certificate_generator") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "rtc_certificate_generator.cc", +@@ -1462,8 +1485,10 @@ rtc_library("rtc_certificate_generator") { + "//third_party/abseil-cpp/absl/types:optional", + ] + } ++} + + rtc_library("ssl") { ++if (!build_with_mozilla) { + visibility = [ "*" ] + sources = [ + "helpers.cc", +@@ -1563,6 +1588,7 @@ rtc_library("ssl") { + deps += [ ":win32" ] + } + } ++} + + rtc_library("crypt_string") { + sources = [ +@@ -1572,6 +1598,7 @@ rtc_library("crypt_string") { + } + + rtc_library("http_common") { ++if (!build_with_mozilla) { + sources = [ + "http_common.cc", + "http_common.h", +@@ -1588,6 +1615,7 @@ rtc_library("http_common") { + + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + } ++} + + rtc_source_set("gtest_prod") { + sources = [ "gtest_prod_util.h" ] +@@ -2146,7 +2174,7 @@ if (rtc_include_tests) { + } + } + +-if (is_android) { ++if (is_android && !build_with_mozilla) { + rtc_android_library("base_java") { + visibility = [ "*" ] + sources = [ +diff --git a/rtc_base/system/BUILD.gn b/rtc_base/system/BUILD.gn +index 77f5139a2f..486b37590c 100644 +--- a/rtc_base/system/BUILD.gn ++++ b/rtc_base/system/BUILD.gn +@@ -101,7 +101,7 @@ if (is_mac || is_ios) { + rtc_source_set("warn_current_thread_is_deadlocked") { + sources = [ "warn_current_thread_is_deadlocked.h" ] + deps = [] +- if (is_android && !build_with_chromium) { ++ if (is_android && (!build_with_chromium && !build_with_mozilla)) { + sources += [ "warn_current_thread_is_deadlocked.cc" ] + deps += [ + "..:logging", +diff --git a/test/BUILD.gn b/test/BUILD.gn +index e60febb0a9..860ae3a089 100644 +--- a/test/BUILD.gn ++++ b/test/BUILD.gn +@@ -241,6 +241,7 @@ rtc_library("audio_test_common") { + absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] + } + ++if (!build_with_mozilla) { + if (!build_with_chromium) { + if (is_mac || is_ios) { + rtc_library("video_test_mac") { +@@ -294,8 +295,12 @@ if (!build_with_chromium) { + } + } + } ++} + + rtc_library("rtp_test_utils") { ++ if (build_with_mozilla) { ++ sources = [] ++ } else { + testonly = true + sources = [ + "rtcp_packet_parser.cc", +@@ -305,6 +310,7 @@ rtc_library("rtp_test_utils") { + "rtp_file_writer.cc", + "rtp_file_writer.h", + ] ++ } + + deps = [ + "../api:array_view", +@@ -561,7 +567,9 @@ rtc_library("video_test_support") { + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + + if (!is_ios) { ++ if (!build_with_mozilla) { + deps += [ "//third_party:jpeg" ] ++ } + sources += [ "testsupport/jpeg_frame_writer.cc" ] + } else { + sources += [ "testsupport/jpeg_frame_writer_ios.cc" ] +@@ -1317,6 +1325,7 @@ if (!build_with_chromium) { + } + } + ++if (!build_with_mozilla) { + if (!build_with_chromium && is_android) { + rtc_android_library("native_test_java") { + testonly = true +@@ -1330,6 +1339,7 @@ if (!build_with_chromium && is_android) { + ] + } + } ++} + + rtc_library("call_config_utils") { + # TODO(bugs.webrtc.org/10814): Remove rtc_json_suppressions as soon as it +diff --git a/video/BUILD.gn b/video/BUILD.gn +index 54291e46bb..7bcecad957 100644 +--- a/video/BUILD.gn ++++ b/video/BUILD.gn +@@ -17,7 +17,7 @@ rtc_library("video_stream_encoder_interface") { + "../api:fec_controller_api", + "../api:rtc_error", + "../api:rtp_parameters", +- "../api:rtp_sender_interface", ++ "../api:rtp_sender_setparameters_callback", + "../api:scoped_refptr", + "../api/adaptation:resource_adaptation_api", + "../api/units:data_rate", +@@ -374,7 +374,7 @@ rtc_library("video_stream_encoder_impl") { + ":video_stream_encoder_interface", + "../api:field_trials_view", + "../api:rtp_parameters", +- "../api:rtp_sender_interface", ++ "../api:rtp_sender_setparameters_callback", + "../api:sequence_checker", + "../api/adaptation:resource_adaptation_api", + "../api/task_queue:pending_task_safety_flag", +diff --git a/webrtc.gni b/webrtc.gni +index 9c67f61aaa..1861347fa1 100644 +--- a/webrtc.gni ++++ b/webrtc.gni +@@ -35,6 +35,11 @@ if (is_mac) { + import("//build/config/mac/rules.gni") + } + ++if (is_android) { ++ import("//build/config/android/config.gni") ++ import("//build/config/android/rules.gni") ++} ++ + if (is_fuchsia) { + import("//build/config/fuchsia/config.gni") + } +@@ -42,6 +47,11 @@ if (is_fuchsia) { + # This declare_args is separated from the next one because args declared + # in this one, can be read from the next one (args defined in the same + # declare_args cannot be referenced in that scope). ++declare_args() { ++ # Enable to use the Mozilla internal settings. ++ build_with_mozilla = true ++} ++ + declare_args() { + # Setting this to true will make RTC_EXPORT (see rtc_base/system/rtc_export.h) + # expand to code that will manage symbols visibility. +@@ -92,7 +102,7 @@ declare_args() { + # will tell the pre-processor to remove the default definition of the + # SystemTimeNanos() which is defined in rtc_base/system_time.cc. In + # that case a new implementation needs to be provided. +- rtc_exclude_system_time = build_with_chromium ++ rtc_exclude_system_time = build_with_chromium || build_with_mozilla + + # Setting this to false will require the API user to pass in their own + # SSLCertificateVerifier to verify the certificates presented from a +@@ -118,7 +128,7 @@ declare_args() { + + # Used to specify an external OpenSSL include path when not compiling the + # library that comes with WebRTC (i.e. rtc_build_ssl == 0). +- rtc_ssl_root = "" ++ rtc_ssl_root = "unused" + + # Enable when an external authentication mechanism is used for performing + # packet authentication for RTP packets instead of libsrtp. +@@ -135,13 +145,13 @@ declare_args() { + rtc_enable_bwe_test_logging = false + + # Set this to false to skip building examples. +- rtc_build_examples = true ++ rtc_build_examples = false + + # Set this to false to skip building tools. +- rtc_build_tools = true ++ rtc_build_tools = false + + # Set this to false to skip building code that requires X11. +- rtc_use_x11 = ozone_platform_x11 ++ rtc_use_x11 = use_x11 + + # Set this to use PipeWire on the Wayland display server. + # By default it's only enabled on desktop Linux (excludes ChromeOS) and +@@ -152,9 +162,6 @@ declare_args() { + # Set this to link PipeWire and required libraries directly instead of using the dlopen. + rtc_link_pipewire = false + +- # Enable to use the Mozilla internal settings. +- build_with_mozilla = false +- + # Experimental: enable use of Android AAudio which requires Android SDK 26 or above + # and NDK r16 or above. + rtc_enable_android_aaudio = false +@@ -268,7 +275,7 @@ declare_args() { + rtc_build_json = !build_with_mozilla + rtc_build_libsrtp = !build_with_mozilla + rtc_build_libvpx = !build_with_mozilla +- rtc_libvpx_build_vp9 = !build_with_mozilla ++ rtc_libvpx_build_vp9 = true + rtc_build_opus = !build_with_mozilla + rtc_build_ssl = !build_with_mozilla + +@@ -287,7 +294,7 @@ declare_args() { + + # Chromium uses its own IO handling, so the internal ADM is only built for + # standalone WebRTC. +- rtc_include_internal_audio_device = !build_with_chromium ++ rtc_include_internal_audio_device = !build_with_chromium && !build_with_mozilla + + # Set this to true to enable the avx2 support in webrtc. + # TODO: Make sure that AVX2 works also for non-clang compilers. +@@ -331,6 +338,9 @@ declare_args() { + rtc_enable_grpc = rtc_enable_protobuf && (is_linux || is_mac) + } + ++# Enable liboam only on non-mozilla builds. ++enable_libaom = !build_with_mozilla ++ + # Make it possible to provide custom locations for some libraries (move these + # up into declare_args should we need to actually use them for the GN build). + rtc_libvpx_dir = "//third_party/libvpx" +@@ -1129,7 +1139,7 @@ if (is_mac || is_ios) { + } + } + +-if (is_android) { ++if (is_android && !build_with_mozilla) { + template("rtc_android_library") { + android_library(target_name) { + forward_variables_from(invoker, -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0031.patch b/third_party/libwebrtc/moz-patch-stack/0031.patch index c6016762ad30..ea258647e321 100644 --- a/third_party/libwebrtc/moz-patch-stack/0031.patch +++ b/third_party/libwebrtc/moz-patch-stack/0031.patch @@ -1,1433 +1,29 @@ From: Dan Minor -Date: Thu, 5 Nov 2020 07:47:00 +0000 -Subject: Bug 1654112 - Tweak upstream gn files for Firefox build. r=ng +Date: Fri, 13 Nov 2020 14:34:00 -0500 +Subject: Bug 1654112 - Fully quality AudioLevel::kUri in channel_send.cc. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D130075 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/127ace4d8887f11abb201d300a849772a2b519f8 - -Bug 1820869 - avoid building unreachable files. r=ng,webrtc-reviewers - -Differential Revision: https://phabricator.services.mozilla.com/D171922 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/88b3cc6bbece7c53d00e124713330f3d34d2789d - -Bug 1822194 - (fix-acabb3641b) Break the new SetParametersCallback stuff into stand-alone files. - -acabb3641b from upstream added a callback mechanism to allow failures to be -propagated back to RTCRtpSender.setParameters. Unfortunately, this callback -mechanism was (needlessly) tightly coupled to libwebrtc's implementation of -RTCRtpSender, and also their media channel code. This introduced a lot of -unnecessary dependencies throughout libwebrtc, that spilled into our code as -well. -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/59232687efa00e5f7b7bd3d6befca129149e2bf5 - -Bug 1828517 - (fix-794d599741) account for moved files in BUILD.gn that we don't want to build. - -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/4a969f6709183d4f55215adaffb8a52b790a8492 - -Bug 1839451 - (fix-186ebdc1b0) remove BUILD.gn refs to gone files delayable.h, media_channel.h - -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d0f4d1733cb1a2d8189097af4b5537118ebc95a6 - -Bug 1839451 - (fix-f6eae959bf) s/rtc_encoder_simulcast_proxy/rtc_simulcast_encoder_adapter/ BUILD ref. - -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/876b3f5821cd5c30564a82c1da7d057d79d17b01 - -Bug 1828517 - (fix-a138c6c8a5) handle file moves in BUILD.gn - -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/cf7e333da17689b3c115a6ffd07fab042bc5f086 - -Bug 1817024 - (fix-0e2cf6cc01) Skip library create_peer_connection_quality_test_frame_generator. r?mjf! - -Differential Revision: https://phabricator.services.mozilla.com/D170887 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/fbbc1bf963fda30bca26ae6aac0c3459b8ebea6f - -Bug 1826428 - remove libwebrtc's jvm_android.cc from build r=ng,webrtc-reviewers - -Based on info from John Lin and previous try runs, we're almost -certainly not using this. Let's try removing it from the build -and landing it. If no problems emerge, we'll be able to remove -our custom changes to upstream code in jvm_android.cc. - -Differential Revision: https://phabricator.services.mozilla.com/D174793 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/dca1b97525487ae57d43ced1ebdb4a2d9c9dae89 - -Bug 1774628 - re-enable support for Windows.Graphics.Capture APIs in libwebrtc. r=pehrsons,webrtc-reviewers - -Differential Revision: https://phabricator.services.mozilla.com/D186862 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/08567f4539a12b54202aecbf554ec6540fb99ab2 +Differential Revision: https://phabricator.services.mozilla.com/D130082 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/7163801a480d607005042292ed9e4fbb892f440d --- - .gn | 2 + - BUILD.gn | 45 ++++++++++++++++++- - api/BUILD.gn | 34 +++++++++++++- - api/rtp_sender_interface.h | 4 +- - api/rtp_sender_setparameters_callback.cc | 27 +++++++++++ - api/rtp_sender_setparameters_callback.h | 28 ++++++++++++ - api/task_queue/BUILD.gn | 2 + - api/transport/BUILD.gn | 2 + - call/BUILD.gn | 14 +++++- - call/audio_send_stream.h | 2 +- - call/video_send_stream.h | 2 +- - common_audio/BUILD.gn | 4 -- - common_audio/fir_filter_avx2.cc | 2 + - common_audio/intrin.h | 8 ++++ - media/BUILD.gn | 35 ++++++++++++++- - media/base/media_channel.h | 4 -- - media/base/media_channel_impl.cc | 13 ------ - modules/audio_coding/BUILD.gn | 2 +- - modules/audio_device/BUILD.gn | 19 +++++--- - modules/audio_processing/aec3/BUILD.gn | 13 +++--- - .../aec3/adaptive_fir_filter_avx2.cc | 2 +- - .../audio_processing/agc2/rnn_vad/BUILD.gn | 2 +- - modules/desktop_capture/BUILD.gn | 29 +----------- - modules/portal/BUILD.gn | 24 ++++++++++ - modules/utility/BUILD.gn | 4 ++ - modules/video_capture/BUILD.gn | 11 +---- - rtc_base/BUILD.gn | 30 ++++++++++++- - rtc_base/system/BUILD.gn | 2 +- - test/BUILD.gn | 10 +++++ - video/BUILD.gn | 4 +- - webrtc.gni | 32 ++++++++----- - 31 files changed, 312 insertions(+), 100 deletions(-) - create mode 100644 api/rtp_sender_setparameters_callback.cc - create mode 100644 api/rtp_sender_setparameters_callback.h - create mode 100644 common_audio/intrin.h + audio/channel_send.cc | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) -diff --git a/.gn b/.gn -index ed92a1b296..d5cf5c031c 100644 ---- a/.gn -+++ b/.gn -@@ -69,6 +69,8 @@ default_args = { - # Prevent jsoncpp to pass -Wno-deprecated-declarations to users - jsoncpp_no_deprecated_declarations = false - -+ use_custom_libcxx = false -+ - # Fixes the abi-revision issue. - # TODO(https://bugs.webrtc.org/14437): Remove this section if general - # Chromium fix resolves the problem. -diff --git a/BUILD.gn b/BUILD.gn -index a13df86db3..427bde39c0 100644 ---- a/BUILD.gn -+++ b/BUILD.gn -@@ -32,7 +32,7 @@ if (is_android) { - import("//build/config/android/rules.gni") - } - --if (!build_with_chromium) { -+if (!build_with_chromium && !build_with_mozilla) { - # This target should (transitively) cause everything to be built; if you run - # 'ninja default' and then 'ninja all', the second build should do no work. - group("default") { -@@ -149,6 +149,10 @@ config("common_inherited_config") { - defines += [ "WEBRTC_ENABLE_OBJC_SYMBOL_EXPORT" ] - } - -+ if (build_with_mozilla) { -+ defines += [ "WEBRTC_MOZILLA_BUILD" ] -+ } -+ - if (!rtc_builtin_ssl_root_certificates) { - defines += [ "WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS" ] - } -@@ -463,9 +467,11 @@ config("common_config") { - } - } - -+if (is_mac) { - config("common_objc") { - frameworks = [ "Foundation.framework" ] - } -+} - - if (!build_with_chromium) { - # Target to build all the WebRTC production code. -@@ -515,6 +521,33 @@ if (!build_with_chromium) { - "sdk", - "video", - ] -+ if (build_with_mozilla) { -+ deps -= [ -+ "api:create_peerconnection_factory", -+ "api:rtc_error", -+ "api:transport_api", -+ "api/crypto", -+ "api/rtc_event_log:rtc_event_log_factory", -+ "api/task_queue", -+ "api/task_queue:default_task_queue_factory", -+ "api/test/metrics", -+ "api/video_codecs:video_decoder_factory_template", -+ "api/video_codecs:video_decoder_factory_template_dav1d_adapter", -+ "api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", -+ "api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter", -+ "api/video_codecs:video_decoder_factory_template_open_h264_adapter", -+ "api/video_codecs:video_encoder_factory_template", -+ "api/video_codecs:video_encoder_factory_template_libaom_av1_adapter", -+ "api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", -+ "api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", -+ "api/video_codecs:video_encoder_factory_template_open_h264_adapter", -+ "logging:rtc_event_log_api", -+ "p2p:rtc_p2p", -+ "pc:libjingle_peerconnection", -+ "pc:rtc_pc", -+ "sdk", -+ ] -+ } - - if (rtc_include_builtin_audio_codecs) { - deps += [ -@@ -527,6 +560,16 @@ if (!build_with_chromium) { - deps += [ - "api/video:video_frame", - "api/video:video_rtp_headers", -+ "test:rtp_test_utils", -+ ] -+ # Added when we removed deps in other places to avoid building -+ # unreachable sources. See Bug 1820869. -+ deps += [ -+ "api/video_codecs:video_codecs_api", -+ "api/video_codecs:rtc_software_fallback_wrappers", -+ "media:rtc_simulcast_encoder_adapter", -+ "modules/video_coding:webrtc_vp8", -+ "modules/video_coding:webrtc_vp9", - ] - } else { - deps += [ -diff --git a/api/BUILD.gn b/api/BUILD.gn -index a9217400fe..1c3084a074 100644 ---- a/api/BUILD.gn -+++ b/api/BUILD.gn -@@ -35,7 +35,7 @@ rtc_source_set("callfactory_api") { - ] - } - --if (!build_with_chromium) { -+if (!build_with_chromium && !build_with_mozilla) { - rtc_library("create_peerconnection_factory") { - visibility = [ "*" ] - allow_poison = [ "default_task_queue" ] -@@ -186,6 +186,10 @@ rtc_source_set("ice_transport_interface") { - } - - rtc_library("dtls_transport_interface") { -+# Previously, Mozilla has tried to limit including this dep, but as -+# upstream changes, it requires whack-a-mole. Making it an empty -+# definition has the same effect, but only requires one change. -+if (!build_with_mozilla) { - visibility = [ "*" ] - - sources = [ -@@ -202,6 +206,7 @@ rtc_library("dtls_transport_interface") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } -+} - - rtc_library("dtmf_sender_interface") { - visibility = [ "*" ] -@@ -214,6 +219,10 @@ rtc_library("dtmf_sender_interface") { - } - - rtc_library("rtp_sender_interface") { -+# Previously, Mozilla has tried to limit including this dep, but as -+# upstream changes, it requires whack-a-mole. Making it an empty -+# definition has the same effect, but only requires one change. -+if (!build_with_mozilla) { - visibility = [ "*" ] - - sources = [ -@@ -225,8 +234,8 @@ rtc_library("rtp_sender_interface") { - ":dtmf_sender_interface", - ":frame_transformer_interface", - ":media_stream_interface", -- ":rtc_error", - ":rtp_parameters", -+ ":rtp_sender_setparameters_callback", - ":scoped_refptr", - "../rtc_base:checks", - "../rtc_base:refcount", -@@ -234,10 +243,24 @@ rtc_library("rtp_sender_interface") { - "crypto:frame_encryptor_interface", - "video_codecs:video_codecs_api", - ] -+} -+} -+ -+rtc_library("rtp_sender_setparameters_callback") { -+ visibility = [ "*" ] -+ -+ sources = [ -+ "rtp_sender_setparameters_callback.cc", -+ "rtp_sender_setparameters_callback.h", -+ ] -+ deps = [ -+ ":rtc_error", -+ ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] - } - - rtc_library("libjingle_peerconnection_api") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - cflags = [] - sources = [ -@@ -354,6 +377,7 @@ rtc_library("libjingle_peerconnection_api") { - "//third_party/abseil-cpp/absl/types:optional", - ] - } -+} - - rtc_source_set("frame_transformer_interface") { - visibility = [ "*" ] -@@ -544,6 +568,7 @@ rtc_source_set("peer_network_dependencies") { - } - - rtc_source_set("peer_connection_quality_test_fixture_api") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - testonly = true - sources = [ "test/peerconnection_quality_test_fixture.h" ] -@@ -594,6 +619,7 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { - "//third_party/abseil-cpp/absl/types:optional", - ] - } -+} - - rtc_source_set("frame_generator_api") { - visibility = [ "*" ] -@@ -712,6 +738,7 @@ rtc_library("create_frame_generator") { - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - -+if (!build_with_mozilla) { - rtc_library("create_peer_connection_quality_test_frame_generator") { - visibility = [ "*" ] - testonly = true -@@ -728,6 +755,7 @@ rtc_library("create_peer_connection_quality_test_frame_generator") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } -+} - - rtc_source_set("libjingle_logging_api") { - visibility = [ "*" ] -@@ -899,6 +927,7 @@ rtc_source_set("refcountedbase") { - ] - } - -+if (!build_with_mozilla) { - rtc_library("ice_transport_factory") { - visibility = [ "*" ] - sources = [ -@@ -917,6 +946,7 @@ rtc_library("ice_transport_factory") { - "rtc_event_log:rtc_event_log", - ] - } -+} - - rtc_library("neteq_simulator_api") { - visibility = [ "*" ] -diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h -index 2786a2ac19..98ee91b1cc 100644 ---- a/api/rtp_sender_interface.h -+++ b/api/rtp_sender_interface.h -@@ -32,9 +32,9 @@ - #include "rtc_base/ref_count.h" - #include "rtc_base/system/rtc_export.h" - --namespace webrtc { -+#include "api/rtp_sender_setparameters_callback.h" - --using SetParametersCallback = absl::AnyInvocable; -+namespace webrtc { - - class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { - public: -diff --git a/api/rtp_sender_setparameters_callback.cc b/api/rtp_sender_setparameters_callback.cc -new file mode 100644 -index 0000000000..99728ef95e ---- /dev/null -+++ b/api/rtp_sender_setparameters_callback.cc -@@ -0,0 +1,27 @@ -+/* -+ * Copyright 2015 The WebRTC project authors. All Rights Reserved. -+ * -+ * Use of this source code is governed by a BSD-style license -+ * that can be found in the LICENSE file in the root of the source -+ * tree. An additional intellectual property rights grant can be found -+ * in the file PATENTS. All contributing project authors may -+ * be found in the AUTHORS file in the root of the source tree. -+ */ -+ -+// File added by mozilla, to decouple this from libwebrtc's implementation of -+// RTCRtpSender. -+ -+#include "api/rtp_sender_setparameters_callback.h" -+ -+namespace webrtc { -+ -+webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, -+ RTCError error) { -+ if (callback) { -+ std::move(callback)(error); -+ callback = nullptr; -+ } -+ return error; -+} -+ -+} // namespace webrtc -diff --git a/api/rtp_sender_setparameters_callback.h b/api/rtp_sender_setparameters_callback.h -new file mode 100644 -index 0000000000..45194f5ace ---- /dev/null -+++ b/api/rtp_sender_setparameters_callback.h -@@ -0,0 +1,28 @@ -+/* -+ * Copyright 2015 The WebRTC project authors. All Rights Reserved. -+ * -+ * Use of this source code is governed by a BSD-style license -+ * that can be found in the LICENSE file in the root of the source -+ * tree. An additional intellectual property rights grant can be found -+ * in the file PATENTS. All contributing project authors may -+ * be found in the AUTHORS file in the root of the source tree. -+ */ -+ -+// File added by mozilla, to decouple this from libwebrtc's implementation of -+// RTCRtpSender. -+ -+#ifndef API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ -+#define API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ -+ -+#include "api/rtc_error.h" -+#include "absl/functional/any_invocable.h" -+ -+namespace webrtc { -+ -+using SetParametersCallback = absl::AnyInvocable; -+ -+webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, -+ RTCError error); -+} // namespace webrtc -+ -+#endif // API_RTP_SENDER_SETPARAMETERS_CALLBACK_H_ -diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn -index 65bb1ae21b..116a50762e 100644 ---- a/api/task_queue/BUILD.gn -+++ b/api/task_queue/BUILD.gn -@@ -31,6 +31,7 @@ rtc_library("task_queue") { - ] - } - -+if (rtc_include_tests) { - rtc_library("task_queue_test") { - visibility = [ "*" ] - testonly = true -@@ -79,6 +80,7 @@ rtc_library("task_queue_test") { - ] - } - } -+} - - rtc_library("default_task_queue_factory") { - visibility = [ "*" ] -diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn -index 12a1f57066..d8f095c700 100644 ---- a/api/transport/BUILD.gn -+++ b/api/transport/BUILD.gn -@@ -90,6 +90,7 @@ rtc_source_set("sctp_transport_factory_interface") { - } - - rtc_source_set("stun_types") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "stun.cc", -@@ -110,6 +111,7 @@ rtc_source_set("stun_types") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } -+} - - if (rtc_include_tests) { - rtc_source_set("test_feedback_generator_interface") { -diff --git a/call/BUILD.gn b/call/BUILD.gn -index 63a3a2d53d..a85ad0c0d4 100644 ---- a/call/BUILD.gn -+++ b/call/BUILD.gn -@@ -46,7 +46,7 @@ rtc_library("call_interfaces") { - "../api:rtc_error", - "../api:rtp_headers", - "../api:rtp_parameters", -- "../api:rtp_sender_interface", -+ "../api:rtp_sender_setparameters_callback", - "../api:scoped_refptr", - "../api:transport_api", - "../api/adaptation:resource_adaptation_api", -@@ -346,6 +346,16 @@ rtc_library("call") { - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -+ if (build_with_mozilla) { # See Bug 1820869. -+ sources -= [ -+ "call_factory.cc", -+ "degraded_call.cc", -+ ] -+ deps -= [ -+ ":fake_network", -+ ":simulated_network", -+ ] -+ } - } - - rtc_source_set("receive_stream_interface") { -@@ -373,7 +383,7 @@ rtc_library("video_stream_api") { - "../api:frame_transformer_interface", - "../api:rtp_headers", - "../api:rtp_parameters", -- "../api:rtp_sender_interface", -+ "../api:rtp_sender_setparameters_callback", - "../api:scoped_refptr", - "../api:transport_api", - "../api/adaptation:resource_adaptation_api", -diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h -index 9c2fad652f..f9e49db574 100644 ---- a/call/audio_send_stream.h -+++ b/call/audio_send_stream.h -@@ -25,7 +25,7 @@ - #include "api/crypto/frame_encryptor_interface.h" - #include "api/frame_transformer_interface.h" - #include "api/rtp_parameters.h" --#include "api/rtp_sender_interface.h" -+#include "api/rtp_sender_setparameters_callback.h" - #include "api/scoped_refptr.h" - #include "call/audio_sender.h" - #include "call/rtp_config.h" -diff --git a/call/video_send_stream.h b/call/video_send_stream.h -index 431c267e1e..de18fc7b92 100644 ---- a/call/video_send_stream.h -+++ b/call/video_send_stream.h -@@ -23,7 +23,7 @@ - #include "api/crypto/crypto_options.h" - #include "api/frame_transformer_interface.h" - #include "api/rtp_parameters.h" --#include "api/rtp_sender_interface.h" -+#include "api/rtp_sender_setparameters_callback.h" - #include "api/scoped_refptr.h" - #include "api/video/video_content_type.h" - #include "api/video/video_frame.h" -diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn -index 2ae6d32710..a45214f754 100644 ---- a/common_audio/BUILD.gn -+++ b/common_audio/BUILD.gn -@@ -267,14 +267,10 @@ if (current_cpu == "x86" || current_cpu == "x64") { - "resampler/sinc_resampler_avx2.cc", - ] - -- if (is_win) { -- cflags = [ "/arch:AVX2" ] -- } else { - cflags = [ - "-mavx2", - "-mfma", - ] -- } - - deps = [ - ":fir_filter", -diff --git a/common_audio/fir_filter_avx2.cc b/common_audio/fir_filter_avx2.cc -index 9cb0f770ca..0031392f8a 100644 ---- a/common_audio/fir_filter_avx2.cc -+++ b/common_audio/fir_filter_avx2.cc -@@ -15,6 +15,8 @@ - #include - #include - -+#include "common_audio/intrin.h" -+ - #include "rtc_base/checks.h" - #include "rtc_base/memory/aligned_malloc.h" - -diff --git a/common_audio/intrin.h b/common_audio/intrin.h -new file mode 100644 -index 0000000000..f6ff7f218f ---- /dev/null -+++ b/common_audio/intrin.h -@@ -0,0 +1,8 @@ -+#if defined (__SSE__) -+ #include -+ #if defined (__clang__) -+ #include -+ #include -+ #include -+ #endif -+#endif -diff --git a/media/BUILD.gn b/media/BUILD.gn -index 8508a7b5c5..a7d18b3410 100644 ---- a/media/BUILD.gn -+++ b/media/BUILD.gn -@@ -64,7 +64,7 @@ rtc_library("rtc_media_base") { - "../api:media_stream_interface", - "../api:rtc_error", - "../api:rtp_parameters", -- "../api:rtp_sender_interface", -+ "../api:rtp_sender_setparameters_callback", - "../api:scoped_refptr", - "../api:sequence_checker", - "../api:transport_api", -@@ -139,6 +139,15 @@ rtc_library("rtc_media_base") { - "base/video_source_base.cc", - "base/video_source_base.h", - ] -+ if (build_with_mozilla) { -+ sources -= [ -+ "base/adapted_video_track_source.cc", -+ "base/adapted_video_track_source.h", -+ "base/audio_source.h", -+ "base/media_engine.cc", -+ "base/media_engine.h", -+ ] -+ } - } - - rtc_library("media_channel_shim") { -@@ -176,6 +185,7 @@ rtc_library("media_channel_shim") { - } - - rtc_library("media_channel_impl") { -+if (!build_with_mozilla) { - sources = [ - "base/media_channel_impl.cc", - "base/media_channel_impl.h", -@@ -225,6 +235,7 @@ rtc_library("media_channel_impl") { - "//third_party/abseil-cpp/absl/types:optional", - ] - } -+} - - rtc_source_set("media_channel") { - sources = [ "base/media_channel.h" ] -@@ -308,6 +319,7 @@ rtc_library("codec") { - } - - rtc_library("rtp_utils") { -+if (!build_with_mozilla) { - sources = [ - "base/rtp_utils.cc", - "base/rtp_utils.h", -@@ -324,8 +336,10 @@ rtc_library("rtp_utils") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } -+} - - rtc_library("stream_params") { -+if (!build_with_mozilla) { - sources = [ - "base/stream_params.cc", - "base/stream_params.h", -@@ -338,6 +352,7 @@ rtc_library("stream_params") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] - } -+} - - rtc_library("media_constants") { - sources = [ -@@ -348,6 +363,7 @@ rtc_library("media_constants") { - } - - rtc_library("turn_utils") { -+if (!build_with_mozilla) { - sources = [ - "base/turn_utils.cc", - "base/turn_utils.h", -@@ -358,14 +374,17 @@ rtc_library("turn_utils") { - "../rtc_base/system:rtc_export", - ] - } -+} - - rtc_library("rid_description") { -+if (!build_with_mozilla) { - sources = [ - "base/rid_description.cc", - "base/rid_description.h", - ] - deps = [] - } -+} - - rtc_library("rtc_simulcast_encoder_adapter") { - visibility = [ "*" ] -@@ -437,6 +456,12 @@ rtc_library("rtc_internal_video_codecs") { - "../system_wrappers:field_trial", - "../test:fake_video_codecs", - ] -+ if (build_with_mozilla) { -+ deps -= [ -+ "../modules/video_coding:webrtc_multiplex", # See Bug 1820869. -+ "../test:fake_video_codecs", -+ ] -+ } - - if (enable_libaom) { - defines += [ "RTC_USE_LIBAOM_AV1_ENCODER" ] -@@ -462,6 +487,14 @@ rtc_library("rtc_internal_video_codecs") { - "engine/multiplex_codec_factory.cc", - "engine/multiplex_codec_factory.h", - ] -+ if (build_with_mozilla) { -+ sources -= [ -+ "engine/fake_video_codec_factory.cc", -+ "engine/fake_video_codec_factory.h", -+ "engine/internal_encoder_factory.cc", # See Bug 1820869. -+ "engine/multiplex_codec_factory.cc", # See Bug 1820869. -+ ] -+ } - } - - rtc_library("rtc_audio_video") { -diff --git a/media/base/media_channel.h b/media/base/media_channel.h -index ea6384ab95..79ae2a3935 100644 ---- a/media/base/media_channel.h -+++ b/media/base/media_channel.h -@@ -66,10 +66,6 @@ class Timing; - - namespace webrtc { - class VideoFrame; -- --webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, -- RTCError error); -- - } // namespace webrtc - - namespace cricket { -diff --git a/media/base/media_channel_impl.cc b/media/base/media_channel_impl.cc -index f2a5af59ab..16980af5c6 100644 ---- a/media/base/media_channel_impl.cc -+++ b/media/base/media_channel_impl.cc -@@ -31,19 +31,6 @@ - #include "modules/rtp_rtcp/include/report_block_data.h" - #include "rtc_base/checks.h" - --namespace webrtc { -- --webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, -- RTCError error) { -- if (callback) { -- std::move(callback)(error); -- callback = nullptr; -- } -- return error; --} -- --} // namespace webrtc -- - namespace cricket { - using webrtc::FrameDecryptorInterface; - using webrtc::FrameEncryptorInterface; -diff --git a/modules/audio_coding/BUILD.gn b/modules/audio_coding/BUILD.gn -index 1420aa6385..143a6eca41 100644 ---- a/modules/audio_coding/BUILD.gn -+++ b/modules/audio_coding/BUILD.gn -@@ -553,7 +553,7 @@ rtc_library("webrtc_opus_wrapper") { - deps += [ rtc_opus_dir ] - public_configs = [ "//third_party/opus:opus_config" ] - } else if (build_with_mozilla) { -- include_dirs = [ getenv("DIST") + "/include/opus" ] -+ public_configs = [ "//third_party/opus:opus_config" ] - } - } - -diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn -index a0a6f53e9a..6704328108 100644 ---- a/modules/audio_device/BUILD.gn -+++ b/modules/audio_device/BUILD.gn -@@ -30,6 +30,7 @@ rtc_source_set("audio_device_default") { - } - - rtc_source_set("audio_device") { -+if (!build_with_mozilla) { # See Bug 1820869. - visibility = [ "*" ] - public_deps = [ - ":audio_device_api", -@@ -40,6 +41,7 @@ rtc_source_set("audio_device") { - ":audio_device_impl", - ] - } -+} - - rtc_source_set("audio_device_api") { - visibility = [ "*" ] -@@ -58,6 +60,7 @@ rtc_source_set("audio_device_api") { - } - - rtc_library("audio_device_buffer") { -+if (!build_with_mozilla) { # See Bug 1820869. - sources = [ - "audio_device_buffer.cc", - "audio_device_buffer.h", -@@ -85,6 +88,7 @@ rtc_library("audio_device_buffer") { - "../../system_wrappers:metrics", - ] - } -+} - - rtc_library("audio_device_generic") { - sources = [ -@@ -212,6 +216,7 @@ if (!build_with_chromium) { - # Contains default implementations of webrtc::AudioDeviceModule for Windows, - # Linux, Mac, iOS and Android. - rtc_library("audio_device_impl") { -+if (!build_with_mozilla) { # See Bug 1820869. - visibility = [ "*" ] - deps = [ - ":audio_device_api", -@@ -263,9 +268,9 @@ rtc_library("audio_device_impl") { - ] - - if (build_with_mozilla) { -- sources += [ -- "opensl/single_rw_fifo.cc", -- "opensl/single_rw_fifo.h", -+ sources -= [ -+ "include/test_audio_device.cc", -+ "include/test_audio_device.h", - ] - } - -@@ -403,6 +408,7 @@ rtc_library("audio_device_impl") { - ] - } - } -+} - - if (is_mac) { - rtc_source_set("audio_device_impl_frameworks") { -@@ -420,6 +426,7 @@ if (is_mac) { - } - } - -+if (!build_with_mozilla) { # See Bug 1820869. - rtc_source_set("mock_audio_device") { - visibility = [ "*" ] - testonly = true -@@ -436,8 +443,10 @@ rtc_source_set("mock_audio_device") { - "../../test:test_support", - ] - } -+} - --if (rtc_include_tests && !build_with_chromium) { -+# See Bug 1820869 for !build_with_mozilla. -+if (rtc_include_tests && !build_with_chromium && !build_with_mozilla) { - rtc_library("audio_device_unittests") { - testonly = true - -@@ -508,7 +517,7 @@ if (rtc_include_tests && !build_with_chromium) { - } - } - --if (!build_with_chromium && is_android) { -+if ((!build_with_chromium && !build_with_mozilla) && is_android) { - rtc_android_library("audio_device_java") { - sources = [ - "android/java/src/org/webrtc/voiceengine/BuildInfo.java", -diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn -index f5eb5d5951..3e11a245a1 100644 ---- a/modules/audio_processing/aec3/BUILD.gn -+++ b/modules/audio_processing/aec3/BUILD.gn -@@ -264,14 +264,11 @@ if (current_cpu == "x86" || current_cpu == "x64") { - "vector_math_avx2.cc", - ] - -- if (is_win) { -- cflags = [ "/arch:AVX2" ] -- } else { -- cflags = [ -- "-mavx2", -- "-mfma", -- ] -- } -+ cflags = [ -+ "-mavx", -+ "-mavx2", -+ "-mfma", -+ ] - - deps = [ - ":adaptive_fir_filter", -diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc b/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc -index b6eda9f117..8d6e1cf3d7 100644 ---- a/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc -+++ b/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc -@@ -8,7 +8,7 @@ - * be found in the AUTHORS file in the root of the source tree. - */ - --#include -+#include "common_audio/intrin.h" - - #include "modules/audio_processing/aec3/adaptive_fir_filter.h" - #include "rtc_base/checks.h" -diff --git a/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/modules/audio_processing/agc2/rnn_vad/BUILD.gn -index 9093a68cf3..3003a585bd 100644 ---- a/modules/audio_processing/agc2/rnn_vad/BUILD.gn -+++ b/modules/audio_processing/agc2/rnn_vad/BUILD.gn -@@ -122,7 +122,7 @@ rtc_source_set("vector_math") { - if (current_cpu == "x86" || current_cpu == "x64") { - rtc_library("vector_math_avx2") { - sources = [ "vector_math_avx2.cc" ] -- if (is_win) { -+ if (is_win && !build_with_mozilla) { - cflags = [ "/arch:AVX2" ] - } else { - cflags = [ -diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn -index 6f3e9d26c5..42d72865b9 100644 ---- a/modules/desktop_capture/BUILD.gn -+++ b/modules/desktop_capture/BUILD.gn -@@ -341,37 +341,12 @@ rtc_library("desktop_capture") { - ] - deps += [ ":desktop_capture_objc" ] - } -- -- if (build_with_mozilla) { -- sources += [ -- "desktop_device_info.cc", -- "desktop_device_info.h", -- ] -- if (is_win) { -- sources += [ -- "app_capturer_win.cc", -- "win/desktop_device_info_win.cc", -- "win/win_shared.cc", -- ] -- } -- } - if (rtc_use_x11_extensions || rtc_use_pipewire) { - sources += [ - "mouse_cursor_monitor_linux.cc", - "screen_capturer_linux.cc", - "window_capturer_linux.cc", - ] -- -- if (build_with_mozilla && (is_linux || is_chromeos)) { -- sources += [ -- "app_capturer_linux.cc", -- "linux/x11/app_capturer_x11.cc", -- "linux/x11/desktop_device_info_linux.cc", -- "linux/x11/desktop_device_info_linux.h", -- "linux/x11/shared_x_util.cc", -- "linux/x11/shared_x_util.h", -- ] -- } - } - - if (rtc_use_x11_extensions) { -@@ -537,9 +512,7 @@ rtc_library("desktop_capture") { - deps += [ "../../rtc_base:sanitizer" ] - } - -- if (!build_with_mozilla) { -- deps += [ "//third_party/libyuv" ] -- } -+ deps += [ "//third_party/libyuv" ] - - if (use_desktop_capture_differ_sse2) { - deps += [ ":desktop_capture_differ_sse2" ] -diff --git a/modules/portal/BUILD.gn b/modules/portal/BUILD.gn -index d0756f269b..d7768b2323 100644 ---- a/modules/portal/BUILD.gn -+++ b/modules/portal/BUILD.gn -@@ -11,6 +11,7 @@ import("//tools/generate_stubs/rules.gni") - import("../../webrtc.gni") - - if ((is_linux || is_chromeos) && rtc_use_pipewire) { -+if (!build_with_mozilla) { - pkg_config("gio") { - packages = [ - "gio-2.0", -@@ -88,6 +89,12 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { - defines += [ "WEBRTC_USE_GIO" ] - } - } -+} else { -+ config("pipewire_all") { -+ } -+ config("pipewire_config") { -+ } -+} - - rtc_library("portal") { - sources = [ -@@ -120,5 +127,22 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { - - deps += [ ":pipewire_stubs" ] - } -+ -+ if (build_with_mozilla) { -+ configs -= [ -+ ":gio", -+ ":pipewire", -+ ":pipewire_config", -+ ] -+ deps -= [ ":pipewire_stubs" ] -+ defines -= [ "WEBRTC_DLOPEN_PIPEWIRE" ] -+ public_deps = [ -+ "//third_party/pipewire", -+ "//third_party/drm", -+ "//third_party/gbm", -+ "//third_party/libepoxy" -+ ] -+ } - } - } -+ -diff --git a/modules/utility/BUILD.gn b/modules/utility/BUILD.gn -index 8cefe5653c..b8d75865f7 100644 ---- a/modules/utility/BUILD.gn -+++ b/modules/utility/BUILD.gn -@@ -25,5 +25,9 @@ rtc_source_set("utility") { - "../../rtc_base:platform_thread", - "../../rtc_base/system:arch", - ] -+ -+ if (build_with_mozilla) { -+ sources -= [ "source/jvm_android.cc" ] -+ } - } - } -diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn -index 730ec9bfdd..d473dbb74c 100644 ---- a/modules/video_capture/BUILD.gn -+++ b/modules/video_capture/BUILD.gn -@@ -125,21 +125,12 @@ if (!build_with_chromium || is_linux || is_chromeos) { - "strmiids.lib", - "user32.lib", - ] -- -- if (build_with_mozilla) { -- sources += [ -- "windows/BaseFilter.cpp", -- "windows/BaseInputPin.cpp", -- "windows/BasePin.cpp", -- "windows/MediaType.cpp", -- ] -- } - } - if (is_fuchsia) { - sources += [ "video_capture_factory_null.cc" ] - } - -- if (build_with_mozilla && is_android) { -+ if (!build_with_mozilla && is_android) { - include_dirs = [ - "/config/external/nspr", - "/nsprpub/lib/ds", -diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn -index 0005503341..f2fedb3063 100644 ---- a/rtc_base/BUILD.gn -+++ b/rtc_base/BUILD.gn -@@ -288,6 +288,7 @@ rtc_library("sample_counter") { - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - -+if (!build_with_mozilla) { # See Bug 1820869. - rtc_library("timestamp_aligner") { - visibility = [ "*" ] - sources = [ -@@ -301,6 +302,7 @@ rtc_library("timestamp_aligner") { - "system:rtc_export", - ] - } -+} - - rtc_library("zero_memory") { - visibility = [ "*" ] -@@ -833,7 +835,9 @@ rtc_library("rtc_json") { - "strings/json.h", - ] - deps = [ ":stringutils" ] -+if (!build_with_mozilla) { - all_dependent_configs = [ "//third_party/jsoncpp:jsoncpp_config" ] -+} - if (rtc_build_json) { - deps += [ "//third_party/jsoncpp" ] +diff --git a/audio/channel_send.cc b/audio/channel_send.cc +index fccd58b76c..e1e44ef7e4 100644 +--- a/audio/channel_send.cc ++++ b/audio/channel_send.cc +@@ -620,9 +620,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + include_audio_level_indication_.store(enable); + if (enable) { +- rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::Uri(), id); ++ rtp_rtcp_->RegisterRtpHeaderExtension(webrtc::AudioLevel::Uri(), id); } else { -@@ -1147,6 +1151,7 @@ if (!build_with_chromium) { - } - - rtc_library("network") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "network.cc", -@@ -1185,16 +1190,20 @@ rtc_library("network") { - deps += [ ":win32" ] - } - } -+} - - rtc_library("socket_address_pair") { -+if (!build_with_mozilla) { - sources = [ - "socket_address_pair.cc", - "socket_address_pair.h", - ] - deps = [ ":socket_address" ] - } -+} - - rtc_library("net_helper") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "net_helper.cc", -@@ -1203,8 +1212,10 @@ rtc_library("net_helper") { - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - deps = [ "system:rtc_export" ] - } -+} - - rtc_library("socket_adapters") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "socket_adapters.cc", -@@ -1224,6 +1235,7 @@ rtc_library("socket_adapters") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } -+} - - rtc_library("network_route") { - sources = [ -@@ -1238,6 +1250,7 @@ rtc_library("network_route") { - } - - rtc_library("async_tcp_socket") { -+if (!build_with_mozilla) { - sources = [ - "async_tcp_socket.cc", - "async_tcp_socket.h", -@@ -1256,8 +1269,10 @@ rtc_library("async_tcp_socket") { - "third_party/sigslot", - ] - } -+} - - rtc_library("async_udp_socket") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "async_udp_socket.cc", -@@ -1279,8 +1294,10 @@ rtc_library("async_udp_socket") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } -+} - - rtc_library("async_packet_socket") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "async_packet_socket.cc", -@@ -1298,6 +1315,7 @@ rtc_library("async_packet_socket") { - "third_party/sigslot", - ] - } -+} - - rtc_library("mdns_responder_interface") { - sources = [ "mdns_responder_interface.h" ] -@@ -1310,6 +1328,7 @@ rtc_library("dscp") { - } - - rtc_library("proxy_info") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "proxy_info.cc", -@@ -1320,6 +1339,7 @@ rtc_library("proxy_info") { - ":socket_address", - ] - } -+} - - rtc_library("file_rotating_stream") { - sources = [ -@@ -1348,6 +1368,7 @@ rtc_library("data_rate_limiter") { - } - - rtc_library("unique_id_generator") { -+if (!build_with_mozilla) { - sources = [ - "unique_id_generator.cc", - "unique_id_generator.h", -@@ -1362,6 +1383,7 @@ rtc_library("unique_id_generator") { - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } -+} - - rtc_library("crc32") { - sources = [ -@@ -1389,6 +1411,7 @@ rtc_library("stream") { - } - - rtc_library("rtc_certificate_generator") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "rtc_certificate_generator.cc", -@@ -1406,8 +1429,10 @@ rtc_library("rtc_certificate_generator") { - "//third_party/abseil-cpp/absl/types:optional", - ] - } -+} - - rtc_library("ssl") { -+if (!build_with_mozilla) { - visibility = [ "*" ] - sources = [ - "helpers.cc", -@@ -1507,6 +1532,7 @@ rtc_library("ssl") { - deps += [ ":win32" ] - } - } -+} - - rtc_library("crypt_string") { - sources = [ -@@ -1516,6 +1542,7 @@ rtc_library("crypt_string") { - } - - rtc_library("http_common") { -+if (!build_with_mozilla) { - sources = [ - "http_common.cc", - "http_common.h", -@@ -1532,6 +1559,7 @@ rtc_library("http_common") { - - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - } -+} - - rtc_source_set("gtest_prod") { - sources = [ "gtest_prod_util.h" ] -@@ -2080,7 +2108,7 @@ if (rtc_include_tests) { +- rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::Uri()); ++ rtp_rtcp_->DeregisterSendRtpHeaderExtension(webrtc::AudioLevel::Uri()); } } --if (is_android) { -+if (is_android && !build_with_mozilla) { - rtc_android_library("base_java") { - visibility = [ "*" ] - sources = [ -diff --git a/rtc_base/system/BUILD.gn b/rtc_base/system/BUILD.gn -index 77f5139a2f..486b37590c 100644 ---- a/rtc_base/system/BUILD.gn -+++ b/rtc_base/system/BUILD.gn -@@ -101,7 +101,7 @@ if (is_mac || is_ios) { - rtc_source_set("warn_current_thread_is_deadlocked") { - sources = [ "warn_current_thread_is_deadlocked.h" ] - deps = [] -- if (is_android && !build_with_chromium) { -+ if (is_android && (!build_with_chromium && !build_with_mozilla)) { - sources += [ "warn_current_thread_is_deadlocked.cc" ] - deps += [ - "..:logging", -diff --git a/test/BUILD.gn b/test/BUILD.gn -index cce1b61bd6..f687a9b212 100644 ---- a/test/BUILD.gn -+++ b/test/BUILD.gn -@@ -240,6 +240,7 @@ rtc_library("audio_test_common") { - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] - } - -+if (!build_with_mozilla) { - if (!build_with_chromium) { - if (is_mac || is_ios) { - rtc_library("video_test_mac") { -@@ -292,8 +293,12 @@ if (!build_with_chromium) { - } - } - } -+} - - rtc_library("rtp_test_utils") { -+ if (build_with_mozilla) { -+ sources = [] -+ } else { - testonly = true - sources = [ - "rtcp_packet_parser.cc", -@@ -303,6 +308,7 @@ rtc_library("rtp_test_utils") { - "rtp_file_writer.cc", - "rtp_file_writer.h", - ] -+ } - - deps = [ - "../api:array_view", -@@ -555,7 +561,9 @@ rtc_library("video_test_support") { - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - - if (!is_ios) { -+ if (!build_with_mozilla) { - deps += [ "//third_party:jpeg" ] -+ } - sources += [ "testsupport/jpeg_frame_writer.cc" ] - } else { - sources += [ "testsupport/jpeg_frame_writer_ios.cc" ] -@@ -1310,6 +1318,7 @@ if (!build_with_chromium) { - } - } - -+if (!build_with_mozilla) { - if (!build_with_chromium && is_android) { - rtc_android_library("native_test_java") { - testonly = true -@@ -1323,6 +1332,7 @@ if (!build_with_chromium && is_android) { - ] - } - } -+} - - rtc_library("call_config_utils") { - # TODO(bugs.webrtc.org/10814): Remove rtc_json_suppressions as soon as it -diff --git a/video/BUILD.gn b/video/BUILD.gn -index 54291e46bb..7bcecad957 100644 ---- a/video/BUILD.gn -+++ b/video/BUILD.gn -@@ -17,7 +17,7 @@ rtc_library("video_stream_encoder_interface") { - "../api:fec_controller_api", - "../api:rtc_error", - "../api:rtp_parameters", -- "../api:rtp_sender_interface", -+ "../api:rtp_sender_setparameters_callback", - "../api:scoped_refptr", - "../api/adaptation:resource_adaptation_api", - "../api/units:data_rate", -@@ -374,7 +374,7 @@ rtc_library("video_stream_encoder_impl") { - ":video_stream_encoder_interface", - "../api:field_trials_view", - "../api:rtp_parameters", -- "../api:rtp_sender_interface", -+ "../api:rtp_sender_setparameters_callback", - "../api:sequence_checker", - "../api/adaptation:resource_adaptation_api", - "../api/task_queue:pending_task_safety_flag", -diff --git a/webrtc.gni b/webrtc.gni -index 928a59104b..c25caf85e0 100644 ---- a/webrtc.gni -+++ b/webrtc.gni -@@ -35,6 +35,11 @@ if (is_mac) { - import("//build/config/mac/rules.gni") - } - -+if (is_android) { -+ import("//build/config/android/config.gni") -+ import("//build/config/android/rules.gni") -+} -+ - if (is_fuchsia) { - import("//build/config/fuchsia/config.gni") - } -@@ -42,6 +47,11 @@ if (is_fuchsia) { - # This declare_args is separated from the next one because args declared - # in this one, can be read from the next one (args defined in the same - # declare_args cannot be referenced in that scope). -+declare_args() { -+ # Enable to use the Mozilla internal settings. -+ build_with_mozilla = true -+} -+ - declare_args() { - # Setting this to true will make RTC_EXPORT (see rtc_base/system/rtc_export.h) - # expand to code that will manage symbols visibility. -@@ -88,7 +98,7 @@ declare_args() { - # will tell the pre-processor to remove the default definition of the - # SystemTimeNanos() which is defined in rtc_base/system_time.cc. In - # that case a new implementation needs to be provided. -- rtc_exclude_system_time = build_with_chromium -+ rtc_exclude_system_time = build_with_chromium || build_with_mozilla - - # Setting this to false will require the API user to pass in their own - # SSLCertificateVerifier to verify the certificates presented from a -@@ -114,7 +124,7 @@ declare_args() { - - # Used to specify an external OpenSSL include path when not compiling the - # library that comes with WebRTC (i.e. rtc_build_ssl == 0). -- rtc_ssl_root = "" -+ rtc_ssl_root = "unused" - - # Enable when an external authentication mechanism is used for performing - # packet authentication for RTP packets instead of libsrtp. -@@ -131,13 +141,13 @@ declare_args() { - rtc_enable_bwe_test_logging = false - - # Set this to false to skip building examples. -- rtc_build_examples = true -+ rtc_build_examples = false - - # Set this to false to skip building tools. -- rtc_build_tools = true -+ rtc_build_tools = false - - # Set this to false to skip building code that requires X11. -- rtc_use_x11 = ozone_platform_x11 -+ rtc_use_x11 = use_x11 - - # Set this to use PipeWire on the Wayland display server. - # By default it's only enabled on desktop Linux (excludes ChromeOS) and -@@ -148,9 +158,6 @@ declare_args() { - # Set this to link PipeWire and required libraries directly instead of using the dlopen. - rtc_link_pipewire = false - -- # Enable to use the Mozilla internal settings. -- build_with_mozilla = false -- - # Experimental: enable use of Android AAudio which requires Android SDK 26 or above - # and NDK r16 or above. - rtc_enable_android_aaudio = false -@@ -260,7 +267,7 @@ declare_args() { - rtc_build_json = !build_with_mozilla - rtc_build_libsrtp = !build_with_mozilla - rtc_build_libvpx = !build_with_mozilla -- rtc_libvpx_build_vp9 = !build_with_mozilla -+ rtc_libvpx_build_vp9 = true - rtc_build_opus = !build_with_mozilla - rtc_build_ssl = !build_with_mozilla - -@@ -279,7 +286,7 @@ declare_args() { - - # Chromium uses its own IO handling, so the internal ADM is only built for - # standalone WebRTC. -- rtc_include_internal_audio_device = !build_with_chromium -+ rtc_include_internal_audio_device = !build_with_chromium && !build_with_mozilla - - # Set this to true to enable the avx2 support in webrtc. - # TODO: Make sure that AVX2 works also for non-clang compilers. -@@ -323,6 +330,9 @@ declare_args() { - rtc_enable_grpc = rtc_enable_protobuf && (is_linux || is_mac) - } - -+# Enable liboam only on non-mozilla builds. -+enable_libaom = !build_with_mozilla -+ - # Make it possible to provide custom locations for some libraries (move these - # up into declare_args should we need to actually use them for the GN build). - rtc_libvpx_dir = "//third_party/libvpx" -@@ -1118,7 +1128,7 @@ if (is_mac || is_ios) { - } - } - --if (is_android) { -+if (is_android && !build_with_mozilla) { - template("rtc_android_library") { - android_library(target_name) { - forward_variables_from(invoker, -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0032.patch b/third_party/libwebrtc/moz-patch-stack/0032.patch index 859e3f31f516..e7f9b54d3681 100644 --- a/third_party/libwebrtc/moz-patch-stack/0032.patch +++ b/third_party/libwebrtc/moz-patch-stack/0032.patch @@ -1,29 +1,32 @@ From: Dan Minor -Date: Fri, 13 Nov 2020 14:34:00 -0500 -Subject: Bug 1654112 - Fully quality AudioLevel::kUri in channel_send.cc. r=ng +Date: Tue, 1 Dec 2020 09:36:00 -0500 +Subject: Bug 1654112 - Disable creating av1 encoder and decoder. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D130082 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/7163801a480d607005042292ed9e4fbb892f440d +Differential Revision: https://phabricator.services.mozilla.com/D130089 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/ef548d7758c7de6e78d38af299c2296bf9d20ec9 --- - audio/channel_send.cc | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) + media/engine/internal_decoder_factory.cc | 2 ++ + 1 file changed, 2 insertions(+) -diff --git a/audio/channel_send.cc b/audio/channel_send.cc -index fccd58b76c..e1e44ef7e4 100644 ---- a/audio/channel_send.cc -+++ b/audio/channel_send.cc -@@ -620,9 +620,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) { - RTC_DCHECK_RUN_ON(&worker_thread_checker_); - include_audio_level_indication_.store(enable); - if (enable) { -- rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::Uri(), id); -+ rtp_rtcp_->RegisterRtpHeaderExtension(webrtc::AudioLevel::Uri(), id); - } else { -- rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::Uri()); -+ rtp_rtcp_->DeregisterSendRtpHeaderExtension(webrtc::AudioLevel::Uri()); - } - } +diff --git a/media/engine/internal_decoder_factory.cc b/media/engine/internal_decoder_factory.cc +index e761fd60c8..001c666313 100644 +--- a/media/engine/internal_decoder_factory.cc ++++ b/media/engine/internal_decoder_factory.cc +@@ -49,12 +49,14 @@ std::vector InternalDecoderFactory::GetSupportedFormats() + for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) + formats.push_back(h264_format); ++#if !defined(WEBRTC_MOZILLA_BUILD) + if (kDav1dIsIncluded) { + formats.push_back(SdpVideoFormat(cricket::kAv1CodecName)); + formats.push_back(SdpVideoFormat( + cricket::kAv1CodecName, + {{kAV1FmtpProfile, AV1ProfileToString(AV1Profile::kProfile1).data()}})); + } ++#endif + + return formats; + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0033.patch b/third_party/libwebrtc/moz-patch-stack/0033.patch index b112a5b912f3..44004eff7f2e 100644 --- a/third_party/libwebrtc/moz-patch-stack/0033.patch +++ b/third_party/libwebrtc/moz-patch-stack/0033.patch @@ -1,32 +1,133 @@ -From: Dan Minor -Date: Tue, 1 Dec 2020 09:36:00 -0500 -Subject: Bug 1654112 - Disable creating av1 encoder and decoder. r=ng +From: Andreas Pehrson +Date: Mon, 18 Jan 2021 11:04:00 +0100 +Subject: Bug 1654112 - Include RtcpPacketTypeCounter in audio send stats, to + not regress nackCount. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D130089 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/ef548d7758c7de6e78d38af299c2296bf9d20ec9 +This is similar to how it's already included for video send. + +Differential Revision: https://phabricator.services.mozilla.com/D102273 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d380a43d59f4f7cbc001f4eab9b63ee993b32cd8 --- - media/engine/internal_decoder_factory.cc | 2 ++ - 1 file changed, 2 insertions(+) + audio/audio_send_stream.cc | 1 + + audio/channel_send.cc | 31 +++++++++++++++++++++++++++++++ + audio/channel_send.h | 1 + + call/audio_send_stream.h | 2 ++ + 4 files changed, 35 insertions(+) -diff --git a/media/engine/internal_decoder_factory.cc b/media/engine/internal_decoder_factory.cc -index e761fd60c8..001c666313 100644 ---- a/media/engine/internal_decoder_factory.cc -+++ b/media/engine/internal_decoder_factory.cc -@@ -49,12 +49,14 @@ std::vector InternalDecoderFactory::GetSupportedFormats() - for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) - formats.push_back(h264_format); +diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc +index 0caf59a20e..bffb910832 100644 +--- a/audio/audio_send_stream.cc ++++ b/audio/audio_send_stream.cc +@@ -431,6 +431,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( + stats.target_bitrate_bps = channel_send_->GetTargetBitrate(); -+#if !defined(WEBRTC_MOZILLA_BUILD) - if (kDav1dIsIncluded) { - formats.push_back(SdpVideoFormat(cricket::kAv1CodecName)); - formats.push_back(SdpVideoFormat( - cricket::kAv1CodecName, - {{kAV1FmtpProfile, AV1ProfileToString(AV1Profile::kProfile1).data()}})); - } -+#endif + webrtc::CallSendStatistics call_stats = channel_send_->GetRTCPStatistics(); ++ stats.rtcp_packet_type_counts = call_stats.rtcp_packet_type_counts; + stats.payload_bytes_sent = call_stats.payload_bytes_sent; + stats.header_and_padding_bytes_sent = + call_stats.header_and_padding_bytes_sent; +diff --git a/audio/channel_send.cc b/audio/channel_send.cc +index e1e44ef7e4..4c1d056cc5 100644 +--- a/audio/channel_send.cc ++++ b/audio/channel_send.cc +@@ -55,6 +55,31 @@ constexpr int64_t kMinRetransmissionWindowMs = 30; + class RtpPacketSenderProxy; + class TransportSequenceNumberProxy; - return formats; - } ++class RtcpCounterObserver : public RtcpPacketTypeCounterObserver { ++ public: ++ explicit RtcpCounterObserver(uint32_t ssrc) : ssrc_(ssrc) {} ++ ++ void RtcpPacketTypesCounterUpdated( ++ uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) override { ++ if (ssrc_ != ssrc) { ++ return; ++ } ++ ++ MutexLock lock(&mutex_); ++ packet_counter_ = packet_counter; ++ } ++ ++ RtcpPacketTypeCounter GetCounts() { ++ MutexLock lock(&mutex_); ++ return packet_counter_; ++ } ++ ++ private: ++ Mutex mutex_; ++ const uint32_t ssrc_; ++ RtcpPacketTypeCounter packet_counter_; ++}; ++ + class ChannelSend : public ChannelSendInterface, + public AudioPacketizationCallback, // receive encoded + // packets from the ACM +@@ -207,6 +232,8 @@ class ChannelSend : public ChannelSendInterface, + bool input_mute_ RTC_GUARDED_BY(volume_settings_mutex_) = false; + bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_) = false; + ++ const std::unique_ptr rtcp_counter_observer_; ++ + PacketRouter* packet_router_ RTC_GUARDED_BY(&worker_thread_checker_) = + nullptr; + const std::unique_ptr rtp_packet_pacer_proxy_; +@@ -384,6 +411,7 @@ ChannelSend::ChannelSend( + const FieldTrialsView& field_trials) + : ssrc_(ssrc), + event_log_(rtc_event_log), ++ rtcp_counter_observer_(new RtcpCounterObserver(ssrc)), + rtp_packet_pacer_proxy_(new RtpPacketSenderProxy()), + retransmission_rate_limiter_( + new RateLimiter(clock, kMaxRetransmissionWindowMs)), +@@ -408,6 +436,8 @@ ChannelSend::ChannelSend( + + configuration.event_log = event_log_; + configuration.rtt_stats = rtcp_rtt_stats; ++ configuration.rtcp_packet_type_counter_observer = ++ rtcp_counter_observer_.get(); + configuration.retransmission_rate_limiter = + retransmission_rate_limiter_.get(); + configuration.extmap_allow_mixed = extmap_allow_mixed; +@@ -668,6 +698,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + CallSendStatistics stats = {0}; + stats.rttMs = GetRTT(); ++ stats.rtcp_packet_type_counts = rtcp_counter_observer_->GetCounts(); + + StreamDataCounters rtp_stats; + StreamDataCounters rtx_stats; +diff --git a/audio/channel_send.h b/audio/channel_send.h +index 00d954c952..f0c9232296 100644 +--- a/audio/channel_send.h ++++ b/audio/channel_send.h +@@ -43,6 +43,7 @@ struct CallSendStatistics { + TimeDelta total_packet_send_delay = TimeDelta::Zero(); + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedpacketssent + uint64_t retransmitted_packets_sent; ++ RtcpPacketTypeCounter rtcp_packet_type_counts; + // A snapshot of Report Blocks with additional data of interest to statistics. + // Within this list, the sender-source SSRC pair is unique and per-pair the + // ReportBlockData represents the latest Report Block that was received for +diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h +index f9e49db574..5f4f871bf0 100644 +--- a/call/audio_send_stream.h ++++ b/call/audio_send_stream.h +@@ -31,6 +31,7 @@ + #include "call/rtp_config.h" + #include "modules/audio_processing/include/audio_processing_statistics.h" + #include "modules/rtp_rtcp/include/report_block_data.h" ++#include "modules/rtp_rtcp/include/rtcp_statistics.h" + + namespace webrtc { + +@@ -65,6 +66,7 @@ class AudioSendStream : public AudioSender { + + ANAStats ana_statistics; + AudioProcessingStats apm_statistics; ++ RtcpPacketTypeCounter rtcp_packet_type_counts; + + int64_t target_bitrate_bps = 0; + // A snapshot of Report Blocks with additional data of interest to -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0034.patch b/third_party/libwebrtc/moz-patch-stack/0034.patch index 419efa0a4313..8bff5ba33af9 100644 --- a/third_party/libwebrtc/moz-patch-stack/0034.patch +++ b/third_party/libwebrtc/moz-patch-stack/0034.patch @@ -1,133 +1,44 @@ From: Andreas Pehrson -Date: Mon, 18 Jan 2021 11:04:00 +0100 -Subject: Bug 1654112 - Include RtcpPacketTypeCounter in audio send stats, to - not regress nackCount. r=ng +Date: Fri, 19 Feb 2021 13:45:00 +0100 +Subject: Bug 1654112 - libwebrtc: Add a REMB on/off switch to + VideoReceiveStream. r=ng -This is similar to how it's already included for video send. - -Differential Revision: https://phabricator.services.mozilla.com/D102273 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d380a43d59f4f7cbc001f4eab9b63ee993b32cd8 +Differential Revision: https://phabricator.services.mozilla.com/D105774 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/7330681cf4de6d8dd868cc661cbdd6679bbc07b3 --- - audio/audio_send_stream.cc | 1 + - audio/channel_send.cc | 31 +++++++++++++++++++++++++++++++ - audio/channel_send.h | 1 + - call/audio_send_stream.h | 2 ++ - 4 files changed, 35 insertions(+) + call/video_receive_stream.h | 3 +++ + video/rtp_video_stream_receiver2.cc | 3 +-- + 2 files changed, 4 insertions(+), 2 deletions(-) -diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc -index 0caf59a20e..bffb910832 100644 ---- a/audio/audio_send_stream.cc -+++ b/audio/audio_send_stream.cc -@@ -431,6 +431,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( - stats.target_bitrate_bps = channel_send_->GetTargetBitrate(); +diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h +index 95f1a47f4e..a1fc204e7c 100644 +--- a/call/video_receive_stream.h ++++ b/call/video_receive_stream.h +@@ -210,6 +210,9 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { + // disabled. + KeyFrameReqMethod keyframe_method = KeyFrameReqMethod::kPliRtcp; - webrtc::CallSendStatistics call_stats = channel_send_->GetRTCPStatistics(); -+ stats.rtcp_packet_type_counts = call_stats.rtcp_packet_type_counts; - stats.payload_bytes_sent = call_stats.payload_bytes_sent; - stats.header_and_padding_bytes_sent = - call_stats.header_and_padding_bytes_sent; -diff --git a/audio/channel_send.cc b/audio/channel_send.cc -index e1e44ef7e4..4c1d056cc5 100644 ---- a/audio/channel_send.cc -+++ b/audio/channel_send.cc -@@ -55,6 +55,31 @@ constexpr int64_t kMinRetransmissionWindowMs = 30; - class RtpPacketSenderProxy; - class TransportSequenceNumberProxy; - -+class RtcpCounterObserver : public RtcpPacketTypeCounterObserver { -+ public: -+ explicit RtcpCounterObserver(uint32_t ssrc) : ssrc_(ssrc) {} ++ // See draft-alvestrand-rmcat-remb for information. ++ bool remb = false; + -+ void RtcpPacketTypesCounterUpdated( -+ uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) override { -+ if (ssrc_ != ssrc) { -+ return; -+ } -+ -+ MutexLock lock(&mutex_); -+ packet_counter_ = packet_counter; -+ } -+ -+ RtcpPacketTypeCounter GetCounts() { -+ MutexLock lock(&mutex_); -+ return packet_counter_; -+ } -+ -+ private: -+ Mutex mutex_; -+ const uint32_t ssrc_; -+ RtcpPacketTypeCounter packet_counter_; -+}; -+ - class ChannelSend : public ChannelSendInterface, - public AudioPacketizationCallback, // receive encoded - // packets from the ACM -@@ -207,6 +232,8 @@ class ChannelSend : public ChannelSendInterface, - bool input_mute_ RTC_GUARDED_BY(volume_settings_mutex_) = false; - bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_) = false; + bool tmmbr = false; -+ const std::unique_ptr rtcp_counter_observer_; -+ - PacketRouter* packet_router_ RTC_GUARDED_BY(&worker_thread_checker_) = - nullptr; - const std::unique_ptr rtp_packet_pacer_proxy_; -@@ -384,6 +411,7 @@ ChannelSend::ChannelSend( - const FieldTrialsView& field_trials) - : ssrc_(ssrc), - event_log_(rtc_event_log), -+ rtcp_counter_observer_(new RtcpCounterObserver(ssrc)), - rtp_packet_pacer_proxy_(new RtpPacketSenderProxy()), - retransmission_rate_limiter_( - new RateLimiter(clock, kMaxRetransmissionWindowMs)), -@@ -408,6 +436,8 @@ ChannelSend::ChannelSend( + // See LntfConfig for description. +diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc +index 64141e671c..67a0ad5f69 100644 +--- a/video/rtp_video_stream_receiver2.cc ++++ b/video/rtp_video_stream_receiver2.cc +@@ -296,9 +296,8 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + frames_decryptable_(false), + absolute_capture_time_interpolator_(clock) { + packet_sequence_checker_.Detach(); +- constexpr bool remb_candidate = true; + if (packet_router_) +- packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); ++ packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), config_.rtp.remb); - configuration.event_log = event_log_; - configuration.rtt_stats = rtcp_rtt_stats; -+ configuration.rtcp_packet_type_counter_observer = -+ rtcp_counter_observer_.get(); - configuration.retransmission_rate_limiter = - retransmission_rate_limiter_.get(); - configuration.extmap_allow_mixed = extmap_allow_mixed; -@@ -668,6 +698,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { - RTC_DCHECK_RUN_ON(&worker_thread_checker_); - CallSendStatistics stats = {0}; - stats.rttMs = GetRTT(); -+ stats.rtcp_packet_type_counts = rtcp_counter_observer_->GetCounts(); - - StreamDataCounters rtp_stats; - StreamDataCounters rtx_stats; -diff --git a/audio/channel_send.h b/audio/channel_send.h -index 00d954c952..f0c9232296 100644 ---- a/audio/channel_send.h -+++ b/audio/channel_send.h -@@ -43,6 +43,7 @@ struct CallSendStatistics { - TimeDelta total_packet_send_delay = TimeDelta::Zero(); - // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-retransmittedpacketssent - uint64_t retransmitted_packets_sent; -+ RtcpPacketTypeCounter rtcp_packet_type_counts; - // A snapshot of Report Blocks with additional data of interest to statistics. - // Within this list, the sender-source SSRC pair is unique and per-pair the - // ReportBlockData represents the latest Report Block that was received for -diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h -index f9e49db574..5f4f871bf0 100644 ---- a/call/audio_send_stream.h -+++ b/call/audio_send_stream.h -@@ -31,6 +31,7 @@ - #include "call/rtp_config.h" - #include "modules/audio_processing/include/audio_processing_statistics.h" - #include "modules/rtp_rtcp/include/report_block_data.h" -+#include "modules/rtp_rtcp/include/rtcp_statistics.h" - - namespace webrtc { - -@@ -65,6 +66,7 @@ class AudioSendStream : public AudioSender { - - ANAStats ana_statistics; - AudioProcessingStats apm_statistics; -+ RtcpPacketTypeCounter rtcp_packet_type_counts; - - int64_t target_bitrate_bps = 0; - // A snapshot of Report Blocks with additional data of interest to + RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff) + << "A stream should not be configured with RTCP disabled. This value is " -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0035.patch b/third_party/libwebrtc/moz-patch-stack/0035.patch index caa960dca02a..8e1634c9bf55 100644 --- a/third_party/libwebrtc/moz-patch-stack/0035.patch +++ b/third_party/libwebrtc/moz-patch-stack/0035.patch @@ -1,44 +1,41 @@ -From: Andreas Pehrson -Date: Fri, 19 Feb 2021 13:45:00 +0100 -Subject: Bug 1654112 - libwebrtc: Add a REMB on/off switch to - VideoReceiveStream. r=ng +From: Nico Grunbaum +Date: Wed, 10 Feb 2021 12:24:00 -0800 +Subject: Bug 1654112 - Use newer thread run callback, and adapt + PlatformUIThread; r=pehrsons -Differential Revision: https://phabricator.services.mozilla.com/D105774 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/7330681cf4de6d8dd868cc661cbdd6679bbc07b3 +Differential Revision: https://phabricator.services.mozilla.com/D107879 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/bb6417a4cfac1416a8e2565bd68b66c40be4827b --- - call/video_receive_stream.h | 3 +++ - video/rtp_video_stream_receiver2.cc | 3 +-- - 2 files changed, 4 insertions(+), 2 deletions(-) + rtc_base/platform_thread.h | 7 +++++++ + 1 file changed, 7 insertions(+) -diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index 2817c9bd9a..e7489a6cdf 100644 ---- a/call/video_receive_stream.h -+++ b/call/video_receive_stream.h -@@ -210,6 +210,9 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { - // disabled. - KeyFrameReqMethod keyframe_method = KeyFrameReqMethod::kPliRtcp; +diff --git a/rtc_base/platform_thread.h b/rtc_base/platform_thread.h +index befd61849d..5b7c6884fe 100644 +--- a/rtc_base/platform_thread.h ++++ b/rtc_base/platform_thread.h +@@ -21,8 +21,13 @@ + #include "absl/types/optional.h" + #include "rtc_base/platform_thread_types.h" -+ // See draft-alvestrand-rmcat-remb for information. -+ bool remb = false; ++#include "rtc_base/deprecated/recursive_critical_section.h" + - bool tmmbr = false; + namespace rtc { - // See LntfConfig for description. -diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index aceee90313..7d7021ee85 100644 ---- a/video/rtp_video_stream_receiver2.cc -+++ b/video/rtp_video_stream_receiver2.cc -@@ -296,9 +296,8 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( - frames_decryptable_(false), - absolute_capture_time_interpolator_(clock) { - packet_sequence_checker_.Detach(); -- constexpr bool remb_candidate = true; - if (packet_router_) -- packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); -+ packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), config_.rtp.remb); ++// Bug 1691641 ++class PlatformUIThread; ++ + enum class ThreadPriority { + kLow = 1, + kNormal, +@@ -113,6 +118,8 @@ class PlatformThread final { - RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff) - << "A stream should not be configured with RTCP disabled. This value is " + absl::optional handle_; + bool joinable_ = false; ++ // Bug 1691641 ++ friend PlatformUIThread; + }; + + } // namespace rtc -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0036.patch b/third_party/libwebrtc/moz-patch-stack/0036.patch index 331a4c17903d..bd435c0eae6c 100644 --- a/third_party/libwebrtc/moz-patch-stack/0036.patch +++ b/third_party/libwebrtc/moz-patch-stack/0036.patch @@ -1,41 +1,57 @@ From: Nico Grunbaum -Date: Wed, 10 Feb 2021 12:24:00 -0800 -Subject: Bug 1654112 - Use newer thread run callback, and adapt - PlatformUIThread; r=pehrsons +Date: Thu, 18 Feb 2021 17:23:00 -0800 +Subject: Bug 1654112 - fix device_info_ds pid and Windows constants includes; + r=pehrsons -Differential Revision: https://phabricator.services.mozilla.com/D107879 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/bb6417a4cfac1416a8e2565bd68b66c40be4827b +Upstreaming bug 1697385 + +Differential Revision: https://phabricator.services.mozilla.com/D107899 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/24809d566449907edea49ea47528065ad0f76910 --- - rtc_base/platform_thread.h | 7 +++++++ - 1 file changed, 7 insertions(+) + modules/video_capture/windows/device_info_ds.cc | 3 ++- + modules/video_capture/windows/device_info_ds.h | 6 +++++- + 2 files changed, 7 insertions(+), 2 deletions(-) -diff --git a/rtc_base/platform_thread.h b/rtc_base/platform_thread.h -index befd61849d..5b7c6884fe 100644 ---- a/rtc_base/platform_thread.h -+++ b/rtc_base/platform_thread.h -@@ -21,8 +21,13 @@ - #include "absl/types/optional.h" - #include "rtc_base/platform_thread_types.h" +diff --git a/modules/video_capture/windows/device_info_ds.cc b/modules/video_capture/windows/device_info_ds.cc +index 9d22f42f4d..f6927281f3 100644 +--- a/modules/video_capture/windows/device_info_ds.cc ++++ b/modules/video_capture/windows/device_info_ds.cc +@@ -172,7 +172,8 @@ int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, +- uint32_t productUniqueIdUTF8Length) { ++ uint32_t productUniqueIdUTF8Length, ++ pid_t* pid) { + MutexLock lock(&_apiLock); + const int32_t result = GetDeviceInfo( + deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8, +diff --git a/modules/video_capture/windows/device_info_ds.h b/modules/video_capture/windows/device_info_ds.h +index ed2a726d6f..e6dfaed366 100644 +--- a/modules/video_capture/windows/device_info_ds.h ++++ b/modules/video_capture/windows/device_info_ds.h +@@ -12,8 +12,11 @@ + #define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_ -+#include "rtc_base/deprecated/recursive_critical_section.h" -+ - namespace rtc { + #include ++#include ++#include -+// Bug 1691641 -+class PlatformUIThread; -+ - enum class ThreadPriority { - kLow = 1, - kNormal, -@@ -113,6 +118,8 @@ class PlatformThread final { + #include "modules/video_capture/device_info_impl.h" ++#include "modules/video_capture/video_capture.h" + #include "modules/video_capture/video_capture_impl.h" - absl::optional handle_; - bool joinable_ = false; -+ // Bug 1691641 -+ friend PlatformUIThread; - }; + namespace webrtc { +@@ -47,7 +50,8 @@ class DeviceInfoDS : public DeviceInfoImpl { + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, +- uint32_t productUniqueIdUTF8Length) override; ++ uint32_t productUniqueIdUTF8Length, ++ pid_t* pid) override; - } // namespace rtc + /* + * Display OS /capture device specific settings dialog -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0037.patch b/third_party/libwebrtc/moz-patch-stack/0037.patch index b1604406710e..81e50a23e713 100644 --- a/third_party/libwebrtc/moz-patch-stack/0037.patch +++ b/third_party/libwebrtc/moz-patch-stack/0037.patch @@ -1,57 +1,50 @@ -From: Nico Grunbaum -Date: Thu, 18 Feb 2021 17:23:00 -0800 -Subject: Bug 1654112 - fix device_info_ds pid and Windows constants includes; - r=pehrsons +From: "Byron Campen [:bwc]" +Date: Thu, 29 Apr 2021 18:25:00 +0000 +Subject: Bug 1654112 - Work around the old (<1.5) libxrandr headers on our + build machines. r=mjf,ng -Upstreaming bug 1697385 - -Differential Revision: https://phabricator.services.mozilla.com/D107899 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/24809d566449907edea49ea47528065ad0f76910 +Differential Revision: https://phabricator.services.mozilla.com/D113830 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c91f12b557a1d23b468c75c4f2fc00eb0f8d541a --- - modules/video_capture/windows/device_info_ds.cc | 3 ++- - modules/video_capture/windows/device_info_ds.h | 6 +++++- - 2 files changed, 7 insertions(+), 2 deletions(-) + X11/extensions/Xrandr.h | 29 +++++++++++++++++++++++++++++ + 1 file changed, 29 insertions(+) + create mode 100644 X11/extensions/Xrandr.h -diff --git a/modules/video_capture/windows/device_info_ds.cc b/modules/video_capture/windows/device_info_ds.cc -index 9d22f42f4d..f6927281f3 100644 ---- a/modules/video_capture/windows/device_info_ds.cc -+++ b/modules/video_capture/windows/device_info_ds.cc -@@ -172,7 +172,8 @@ int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber, - char* deviceUniqueIdUTF8, - uint32_t deviceUniqueIdUTF8Length, - char* productUniqueIdUTF8, -- uint32_t productUniqueIdUTF8Length) { -+ uint32_t productUniqueIdUTF8Length, -+ pid_t* pid) { - MutexLock lock(&_apiLock); - const int32_t result = GetDeviceInfo( - deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8, -diff --git a/modules/video_capture/windows/device_info_ds.h b/modules/video_capture/windows/device_info_ds.h -index ed2a726d6f..e6dfaed366 100644 ---- a/modules/video_capture/windows/device_info_ds.h -+++ b/modules/video_capture/windows/device_info_ds.h -@@ -12,8 +12,11 @@ - #define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_ - - #include -+#include -+#include - - #include "modules/video_capture/device_info_impl.h" -+#include "modules/video_capture/video_capture.h" - #include "modules/video_capture/video_capture_impl.h" - - namespace webrtc { -@@ -47,7 +50,8 @@ class DeviceInfoDS : public DeviceInfoImpl { - char* deviceUniqueIdUTF8, - uint32_t deviceUniqueIdUTF8Length, - char* productUniqueIdUTF8, -- uint32_t productUniqueIdUTF8Length) override; -+ uint32_t productUniqueIdUTF8Length, -+ pid_t* pid) override; - - /* - * Display OS /capture device specific settings dialog +diff --git a/X11/extensions/Xrandr.h b/X11/extensions/Xrandr.h +new file mode 100644 +index 0000000000..876e8b4c7f +--- /dev/null ++++ b/X11/extensions/Xrandr.h +@@ -0,0 +1,29 @@ ++/* This Source Code Form is subject to the terms of the Mozilla Public ++ * License, v. 2.0. If a copy of the MPL was not distributed with this file, ++ * You can obtain one at http://mozilla.org/MPL/2.0/. */ ++ ++// Hack to compensate for the old (<1.5) Xrandr development headers on ++// Mozilla's build boxes. ++ ++#ifndef _XRANDR_H_WRAPPER_HACK_ ++#define _XRANDR_H_WRAPPER_HACK_ ++ ++#include_next ++ ++#if RANDR_MAJOR == 1 && RANDR_MINOR < 5 // defined in randr.h ++typedef struct _XRRMonitorInfo { ++ Atom name; ++ Bool primary; ++ Bool automatic; ++ int noutput; ++ int x; ++ int y; ++ int width; ++ int height; ++ int mwidth; ++ int mheight; ++ RROutput *outputs; ++} XRRMonitorInfo; ++#endif ++ ++#endif // _XRANDR_H_WRAPPER_HACK_ -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0038.patch b/third_party/libwebrtc/moz-patch-stack/0038.patch index 3e6e56a8ad2a..06b809f05ab1 100644 --- a/third_party/libwebrtc/moz-patch-stack/0038.patch +++ b/third_party/libwebrtc/moz-patch-stack/0038.patch @@ -1,50 +1,65 @@ -From: "Byron Campen [:bwc]" -Date: Thu, 29 Apr 2021 18:25:00 +0000 -Subject: Bug 1654112 - Work around the old (<1.5) libxrandr headers on our - build machines. r=mjf,ng +From: Michael Froman +Date: Fri, 16 Apr 2021 17:35:00 -0500 +Subject: Bug 1654112 - General build fixes for paths and naming changes. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D113830 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c91f12b557a1d23b468c75c4f2fc00eb0f8d541a +Differential Revision: https://phabricator.services.mozilla.com/D113438 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/99b99cca6b7b2c2ebffb5472457a4f927bda11c4 --- - X11/extensions/Xrandr.h | 29 +++++++++++++++++++++++++++++ - 1 file changed, 29 insertions(+) - create mode 100644 X11/extensions/Xrandr.h + modules/video_capture/video_capture_impl.h | 6 ++++-- + rtc_base/system/warn_current_thread_is_deadlocked.h | 2 +- + sdk/android/api/org/webrtc/VideoCodecInfo.java | 2 +- + 3 files changed, 6 insertions(+), 4 deletions(-) -diff --git a/X11/extensions/Xrandr.h b/X11/extensions/Xrandr.h -new file mode 100644 -index 0000000000..876e8b4c7f ---- /dev/null -+++ b/X11/extensions/Xrandr.h -@@ -0,0 +1,29 @@ -+/* This Source Code Form is subject to the terms of the Mozilla Public -+ * License, v. 2.0. If a copy of the MPL was not distributed with this file, -+ * You can obtain one at http://mozilla.org/MPL/2.0/. */ +diff --git a/modules/video_capture/video_capture_impl.h b/modules/video_capture/video_capture_impl.h +index e46e050609..dcc93fd3a0 100644 +--- a/modules/video_capture/video_capture_impl.h ++++ b/modules/video_capture/video_capture_impl.h +@@ -97,6 +97,10 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { + rtc::RaceChecker capture_checker_; + // current Device unique name; + char* _deviceUniqueId RTC_GUARDED_BY(api_checker_); + -+// Hack to compensate for the old (<1.5) Xrandr development headers on -+// Mozilla's build boxes. -+ -+#ifndef _XRANDR_H_WRAPPER_HACK_ -+#define _XRANDR_H_WRAPPER_HACK_ -+ -+#include_next -+ -+#if RANDR_MAJOR == 1 && RANDR_MINOR < 5 // defined in randr.h -+typedef struct _XRRMonitorInfo { -+ Atom name; -+ Bool primary; -+ Bool automatic; -+ int noutput; -+ int x; -+ int y; -+ int width; -+ int height; -+ int mwidth; -+ int mheight; -+ RROutput *outputs; -+} XRRMonitorInfo; -+#endif -+ -+#endif // _XRANDR_H_WRAPPER_HACK_ ++ // moved DeliverCapturedFrame to protected for VideoCaptureAndroid (mjf) ++ int32_t DeliverCapturedFrame(VideoFrame& captureFrame) ++ RTC_EXCLUSIVE_LOCKS_REQUIRED(api_lock_); + Mutex api_lock_; + // Should be set by platform dependent code in StartCapture. + VideoCaptureCapability _requestedCapability RTC_GUARDED_BY(api_checker_); +@@ -104,8 +108,6 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { + private: + void UpdateFrameCount(); + uint32_t CalculateFrameRate(int64_t now_ns); +- int32_t DeliverCapturedFrame(VideoFrame& captureFrame) +- RTC_EXCLUSIVE_LOCKS_REQUIRED(api_lock_); + void DeliverRawFrame(uint8_t* videoFrame, + size_t videoFrameLength, + const VideoCaptureCapability& frameInfo, +diff --git a/rtc_base/system/warn_current_thread_is_deadlocked.h b/rtc_base/system/warn_current_thread_is_deadlocked.h +index 4a0ba9dc09..eac12022ed 100644 +--- a/rtc_base/system/warn_current_thread_is_deadlocked.h ++++ b/rtc_base/system/warn_current_thread_is_deadlocked.h +@@ -13,7 +13,7 @@ + + namespace webrtc { + +-#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) ++#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(MOZ_WIDGET_ANDROID) + void WarnThatTheCurrentThreadIsProbablyDeadlocked(); + #else + inline void WarnThatTheCurrentThreadIsProbablyDeadlocked() {} +diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java +index 4f97cf74cf..363be347b5 100644 +--- a/sdk/android/api/org/webrtc/VideoCodecInfo.java ++++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java +@@ -80,7 +80,7 @@ public class VideoCodecInfo { + } + + @CalledByNative +- Map getParams() { ++ Map getParams() { + return params; + } + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0039.patch b/third_party/libwebrtc/moz-patch-stack/0039.patch index ca52db276153..53172283ccf8 100644 --- a/third_party/libwebrtc/moz-patch-stack/0039.patch +++ b/third_party/libwebrtc/moz-patch-stack/0039.patch @@ -1,65 +1,37 @@ From: Michael Froman -Date: Fri, 16 Apr 2021 17:35:00 -0500 -Subject: Bug 1654112 - General build fixes for paths and naming changes. r=ng +Date: Tue, 15 Jun 2021 12:18:00 -0500 +Subject: Bug 1654112 - suppress android lint warnings for WrongConstant in 2 + libwebrtc java files. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D113438 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/99b99cca6b7b2c2ebffb5472457a4f927bda11c4 +Differential Revision: https://phabricator.services.mozilla.com/D118050 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/e60e2f295fb722f69a3a9fe9af34219880afe772 --- - modules/video_capture/video_capture_impl.h | 6 ++++-- - rtc_base/system/warn_current_thread_is_deadlocked.h | 2 +- - sdk/android/api/org/webrtc/VideoCodecInfo.java | 2 +- - 3 files changed, 6 insertions(+), 4 deletions(-) + sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java | 5 +++++ + 1 file changed, 5 insertions(+) -diff --git a/modules/video_capture/video_capture_impl.h b/modules/video_capture/video_capture_impl.h -index e46e050609..dcc93fd3a0 100644 ---- a/modules/video_capture/video_capture_impl.h -+++ b/modules/video_capture/video_capture_impl.h -@@ -97,6 +97,10 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { - rtc::RaceChecker capture_checker_; - // current Device unique name; - char* _deviceUniqueId RTC_GUARDED_BY(api_checker_); -+ -+ // moved DeliverCapturedFrame to protected for VideoCaptureAndroid (mjf) -+ int32_t DeliverCapturedFrame(VideoFrame& captureFrame) -+ RTC_EXCLUSIVE_LOCKS_REQUIRED(api_lock_); - Mutex api_lock_; - // Should be set by platform dependent code in StartCapture. - VideoCaptureCapability _requestedCapability RTC_GUARDED_BY(api_checker_); -@@ -104,8 +108,6 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { - private: - void UpdateFrameCount(); - uint32_t CalculateFrameRate(int64_t now_ns); -- int32_t DeliverCapturedFrame(VideoFrame& captureFrame) -- RTC_EXCLUSIVE_LOCKS_REQUIRED(api_lock_); - void DeliverRawFrame(uint8_t* videoFrame, - size_t videoFrameLength, - const VideoCaptureCapability& frameInfo, -diff --git a/rtc_base/system/warn_current_thread_is_deadlocked.h b/rtc_base/system/warn_current_thread_is_deadlocked.h -index 4a0ba9dc09..eac12022ed 100644 ---- a/rtc_base/system/warn_current_thread_is_deadlocked.h -+++ b/rtc_base/system/warn_current_thread_is_deadlocked.h -@@ -13,7 +13,7 @@ +diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java +index 7894659926..7b4b809ab1 100644 +--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java ++++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java +@@ -15,6 +15,7 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION; + import static android.media.AudioManager.MODE_NORMAL; + import static android.media.AudioManager.MODE_RINGTONE; - namespace webrtc { - --#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) -+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) && !defined(MOZ_WIDGET_ANDROID) - void WarnThatTheCurrentThreadIsProbablyDeadlocked(); - #else - inline void WarnThatTheCurrentThreadIsProbablyDeadlocked() {} -diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java -index 4f97cf74cf..363be347b5 100644 ---- a/sdk/android/api/org/webrtc/VideoCodecInfo.java -+++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java -@@ -80,7 +80,7 @@ public class VideoCodecInfo { ++import android.annotation.SuppressLint; + import android.annotation.TargetApi; + import android.content.Context; + import android.content.pm.PackageManager; +@@ -229,6 +230,10 @@ final class WebRtcAudioUtils { + } } - @CalledByNative -- Map getParams() { -+ Map getParams() { - return params; - } - } ++ // Moz linting complains even though AudioManager.GET_DEVICES_ALL is ++ // listed in the docs here: ++ // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL ++ @SuppressLint("WrongConstant") + private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { + if (Build.VERSION.SDK_INT < 23) { + return; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0040.patch b/third_party/libwebrtc/moz-patch-stack/0040.patch index 137511cffbf9..98fb22f17070 100644 --- a/third_party/libwebrtc/moz-patch-stack/0040.patch +++ b/third_party/libwebrtc/moz-patch-stack/0040.patch @@ -1,61 +1,38 @@ From: Michael Froman -Date: Tue, 15 Jun 2021 12:18:00 -0500 -Subject: Bug 1654112 - suppress android lint warnings for WrongConstant in 2 - libwebrtc java files. r=ng +Date: Fri, 25 Jun 2021 15:12:00 -0500 +Subject: Bug 1654112 - Mirror Bug 1714577 - Part 3 - Register WebRTC threads + with the Gecko Profiler. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D118050 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/e60e2f295fb722f69a3a9fe9af34219880afe772 +Differential Revision: https://phabricator.services.mozilla.com/D119412 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d881b16dd8a6813feb5ce1516c2a7ebe0270e72d --- - .../java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java | 5 +++++ - sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java | 5 +++++ - 2 files changed, 10 insertions(+) + rtc_base/platform_thread.cc | 6 ++++++ + 1 file changed, 6 insertions(+) -diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java -index 0472114297..afd3d429af 100644 ---- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java -+++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java -@@ -15,6 +15,7 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION; - import static android.media.AudioManager.MODE_NORMAL; - import static android.media.AudioManager.MODE_RINGTONE; +diff --git a/rtc_base/platform_thread.cc b/rtc_base/platform_thread.cc +index 556204ac89..71a9f1b224 100644 +--- a/rtc_base/platform_thread.cc ++++ b/rtc_base/platform_thread.cc +@@ -19,6 +19,8 @@ -+import android.annotation.SuppressLint; - import android.content.Context; - import android.content.pm.PackageManager; - import android.media.AudioDeviceInfo; -@@ -247,6 +248,10 @@ public final class WebRtcAudioUtils { - } - } + #include "rtc_base/checks.h" -+ // Moz linting complains even though AudioManager.GET_DEVICES_ALL is -+ // listed in the docs here: -+ // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL -+ @SuppressLint("WrongConstant") - private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { - if (Build.VERSION.SDK_INT < 23) { - return; -diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java -index 7894659926..7b4b809ab1 100644 ---- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java -+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java -@@ -15,6 +15,7 @@ import static android.media.AudioManager.MODE_IN_COMMUNICATION; - import static android.media.AudioManager.MODE_NORMAL; - import static android.media.AudioManager.MODE_RINGTONE; ++#include "MicroGeckoProfiler.h" ++ + namespace rtc { + namespace { -+import android.annotation.SuppressLint; - import android.annotation.TargetApi; - import android.content.Context; - import android.content.pm.PackageManager; -@@ -229,6 +230,10 @@ final class WebRtcAudioUtils { - } - } - -+ // Moz linting complains even though AudioManager.GET_DEVICES_ALL is -+ // listed in the docs here: -+ // https://developer.android.com/reference/android/media/AudioManager#GET_DEVICES_ALL -+ @SuppressLint("WrongConstant") - private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { - if (Build.VERSION.SDK_INT < 23) { - return; +@@ -181,6 +183,10 @@ PlatformThread PlatformThread::SpawnThread( + new std::function([thread_function = std::move(thread_function), + name = std::string(name), attributes] { + rtc::SetCurrentThreadName(name.c_str()); ++ ++ char stacktop; ++ AutoRegisterProfiler profiler(name.c_str(), &stacktop); ++ + SetPriority(attributes.priority); + thread_function(); + }); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0041.patch b/third_party/libwebrtc/moz-patch-stack/0041.patch index eb708f4a20c1..ee4a5589abad 100644 --- a/third_party/libwebrtc/moz-patch-stack/0041.patch +++ b/third_party/libwebrtc/moz-patch-stack/0041.patch @@ -1,38 +1,300 @@ -From: Michael Froman -Date: Fri, 25 Jun 2021 15:12:00 -0500 -Subject: Bug 1654112 - Mirror Bug 1714577 - Part 3 - Register WebRTC threads - with the Gecko Profiler. r=ng +From: Nico Grunbaum +Date: Wed, 14 Jul 2021 22:26:00 +0000 +Subject: Bug 1654112 - deconflate the target and host architectures in + libwebrtc build files; r=mjf -Differential Revision: https://phabricator.services.mozilla.com/D119412 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d881b16dd8a6813feb5ce1516c2a7ebe0270e72d +Differential Revision: https://phabricator.services.mozilla.com/D119707 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/58f47eacaf10d12e21dff7362743b6f4cdd1696b --- - rtc_base/platform_thread.cc | 6 ++++++ - 1 file changed, 6 insertions(+) + BUILD.gn | 6 +++--- + common_audio/BUILD.gn | 16 ++++++++-------- + common_audio/third_party/ooura/BUILD.gn | 6 +++--- + common_audio/third_party/spl_sqrt_floor/BUILD.gn | 4 ++-- + modules/audio_processing/aec3/BUILD.gn | 6 +++--- + modules/audio_processing/aecm/BUILD.gn | 4 ++-- + modules/audio_processing/agc/BUILD.gn | 2 +- + modules/audio_processing/agc2/rnn_vad/BUILD.gn | 2 +- + modules/audio_processing/ns/BUILD.gn | 2 +- + modules/desktop_capture/BUILD.gn | 2 +- + webrtc.gni | 4 ++-- + 11 files changed, 27 insertions(+), 27 deletions(-) -diff --git a/rtc_base/platform_thread.cc b/rtc_base/platform_thread.cc -index 556204ac89..71a9f1b224 100644 ---- a/rtc_base/platform_thread.cc -+++ b/rtc_base/platform_thread.cc -@@ -19,6 +19,8 @@ +diff --git a/BUILD.gn b/BUILD.gn +index 155e3862af..49b2e7a57f 100644 +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -418,12 +418,12 @@ config("common_config") { + } + } - #include "rtc_base/checks.h" +- if (current_cpu == "arm64") { ++ if (target_cpu == "arm64") { + defines += [ "WEBRTC_ARCH_ARM64" ] + defines += [ "WEBRTC_HAS_NEON" ] + } -+#include "MicroGeckoProfiler.h" -+ - namespace rtc { - namespace { +- if (current_cpu == "arm") { ++ if (target_cpu == "arm") { + defines += [ "WEBRTC_ARCH_ARM" ] + if (arm_version >= 7) { + defines += [ "WEBRTC_ARCH_ARM_V7" ] +@@ -433,7 +433,7 @@ config("common_config") { + } + } -@@ -181,6 +183,10 @@ PlatformThread PlatformThread::SpawnThread( - new std::function([thread_function = std::move(thread_function), - name = std::string(name), attributes] { - rtc::SetCurrentThreadName(name.c_str()); -+ -+ char stacktop; -+ AutoRegisterProfiler profiler(name.c_str(), &stacktop); -+ - SetPriority(attributes.priority); - thread_function(); - }); +- if (current_cpu == "mipsel") { ++ if (target_cpu == "mipsel") { + defines += [ "MIPS32_LE" ] + if (mips_float_abi == "hard") { + defines += [ "MIPS_FPU_LE" ] +diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn +index a45214f754..79d9321bbd 100644 +--- a/common_audio/BUILD.gn ++++ b/common_audio/BUILD.gn +@@ -66,7 +66,7 @@ rtc_library("common_audio") { + deps += [ ":common_audio_neon" ] + } + +- if (current_cpu == "x86" || current_cpu == "x64") { ++ if (target_cpu == "x86" || target_cpu == "x64") { + deps += [ ":common_audio_sse2" ] + deps += [ ":common_audio_avx2" ] + } +@@ -88,7 +88,7 @@ rtc_source_set("mock_common_audio") { + rtc_source_set("common_audio_c_arm_asm") { + sources = [] + deps = [] +- if (current_cpu == "arm") { ++ if (target_cpu == "arm") { + sources += [ "signal_processing/complex_bit_reverse_arm.S" ] + + if (arm_version >= 7) { +@@ -152,7 +152,7 @@ rtc_library("common_audio_c") { + "vad/webrtc_vad.c", + ] + +- if (current_cpu == "mipsel") { ++ if (target_cpu == "mipsel") { + sources += [ + "signal_processing/complex_bit_reverse_mips.c", + "signal_processing/complex_fft_mips.c", +@@ -170,7 +170,7 @@ rtc_library("common_audio_c") { + sources += [ "signal_processing/complex_fft.c" ] + } + +- if (current_cpu != "arm" && current_cpu != "mipsel") { ++ if (target_cpu != "arm" && target_cpu != "mipsel") { + sources += [ + "signal_processing/complex_bit_reverse.c", + "signal_processing/filter_ar_fast_q12.c", +@@ -231,7 +231,7 @@ rtc_library("fir_filter_factory") { + "../rtc_base/system:arch", + "../system_wrappers", + ] +- if (current_cpu == "x86" || current_cpu == "x64") { ++ if (target_cpu == "x86" || target_cpu == "x64") { + deps += [ ":common_audio_sse2" ] + deps += [ ":common_audio_avx2" ] + } +@@ -240,7 +240,7 @@ rtc_library("fir_filter_factory") { + } + } + +-if (current_cpu == "x86" || current_cpu == "x64") { ++if (target_cpu == "x86" || target_cpu == "x64") { + rtc_library("common_audio_sse2") { + sources = [ + "fir_filter_sse.cc", +@@ -289,7 +289,7 @@ if (rtc_build_with_neon) { + "resampler/sinc_resampler_neon.cc", + ] + +- if (current_cpu != "arm64") { ++ if (target_cpu != "arm64") { + # Enable compilation for the NEON instruction set. + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags = [ "-mfpu=neon" ] +@@ -312,7 +312,7 @@ if (rtc_build_with_neon) { + "signal_processing/min_max_operations_neon.c", + ] + +- if (current_cpu != "arm64") { ++ if (target_cpu != "arm64") { + # Enable compilation for the NEON instruction set. + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags = [ "-mfpu=neon" ] +diff --git a/common_audio/third_party/ooura/BUILD.gn b/common_audio/third_party/ooura/BUILD.gn +index 0cdf98e591..a0ddf777db 100644 +--- a/common_audio/third_party/ooura/BUILD.gn ++++ b/common_audio/third_party/ooura/BUILD.gn +@@ -20,7 +20,7 @@ rtc_library("fft_size_128") { + ] + cflags = [] + +- if (current_cpu == "x86" || current_cpu == "x64") { ++ if (target_cpu == "x86" || target_cpu == "x64") { + sources += [ + "fft_size_128/ooura_fft_sse2.cc", + "fft_size_128/ooura_fft_tables_neon_sse2.h", +@@ -38,14 +38,14 @@ rtc_library("fft_size_128") { + + deps += [ "../../../common_audio" ] + +- if (current_cpu != "arm64") { ++ if (target_cpu != "arm64") { + # Enable compilation for the NEON instruction set. + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags += [ "-mfpu=neon" ] + } + } + +- if (current_cpu == "mipsel" && mips_float_abi == "hard") { ++ if (target_cpu == "mipsel" && mips_float_abi == "hard") { + sources += [ "fft_size_128/ooura_fft_mips.cc" ] + } + } +diff --git a/common_audio/third_party/spl_sqrt_floor/BUILD.gn b/common_audio/third_party/spl_sqrt_floor/BUILD.gn +index ac862c65a8..e66ed2796e 100644 +--- a/common_audio/third_party/spl_sqrt_floor/BUILD.gn ++++ b/common_audio/third_party/spl_sqrt_floor/BUILD.gn +@@ -12,11 +12,11 @@ rtc_library("spl_sqrt_floor") { + visibility = [ "../..:common_audio_c" ] + sources = [ "spl_sqrt_floor.h" ] + deps = [] +- if (current_cpu == "arm") { ++ if (target_cpu == "arm") { + sources += [ "spl_sqrt_floor_arm.S" ] + + deps += [ "../../../rtc_base/system:asm_defines" ] +- } else if (current_cpu == "mipsel") { ++ } else if (target_cpu == "mipsel") { + sources += [ "spl_sqrt_floor_mips.c" ] + } else { + sources += [ "spl_sqrt_floor.c" ] +diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn +index 3e11a245a1..c29b893b7d 100644 +--- a/modules/audio_processing/aec3/BUILD.gn ++++ b/modules/audio_processing/aec3/BUILD.gn +@@ -123,7 +123,7 @@ rtc_library("aec3") { + ] + + defines = [] +- if (rtc_build_with_neon && current_cpu != "arm64") { ++ if (rtc_build_with_neon && target_cpu != "arm64") { + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags = [ "-mfpu=neon" ] + } +@@ -162,7 +162,7 @@ rtc_library("aec3") { + "//third_party/abseil-cpp/absl/types:optional", + ] + +- if (current_cpu == "x86" || current_cpu == "x64") { ++ if (target_cpu == "x86" || target_cpu == "x64") { + deps += [ ":aec3_avx2" ] + } + } +@@ -253,7 +253,7 @@ rtc_source_set("fft_data") { + ] + } + +-if (current_cpu == "x86" || current_cpu == "x64") { ++if (target_cpu == "x86" || target_cpu == "x64") { + rtc_library("aec3_avx2") { + configs += [ "..:apm_debug_dump" ] + sources = [ +diff --git a/modules/audio_processing/aecm/BUILD.gn b/modules/audio_processing/aecm/BUILD.gn +index 80f2901049..a77f04aba5 100644 +--- a/modules/audio_processing/aecm/BUILD.gn ++++ b/modules/audio_processing/aecm/BUILD.gn +@@ -29,14 +29,14 @@ rtc_library("aecm_core") { + if (rtc_build_with_neon) { + sources += [ "aecm_core_neon.cc" ] + +- if (current_cpu != "arm64") { ++ if (target_cpu != "arm64") { + # Enable compilation for the NEON instruction set. + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags += [ "-mfpu=neon" ] + } + } + +- if (current_cpu == "mipsel") { ++ if (target_cpu == "mipsel") { + sources += [ "aecm_core_mips.cc" ] + } else { + sources += [ "aecm_core_c.cc" ] +diff --git a/modules/audio_processing/agc/BUILD.gn b/modules/audio_processing/agc/BUILD.gn +index 508f901b08..75bef1450f 100644 +--- a/modules/audio_processing/agc/BUILD.gn ++++ b/modules/audio_processing/agc/BUILD.gn +@@ -83,7 +83,7 @@ rtc_library("legacy_agc") { + ] + + if (rtc_build_with_neon) { +- if (current_cpu != "arm64") { ++ if (target_cpu != "arm64") { + # Enable compilation for the NEON instruction set. + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags = [ "-mfpu=neon" ] +diff --git a/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/modules/audio_processing/agc2/rnn_vad/BUILD.gn +index 3003a585bd..d709eb3699 100644 +--- a/modules/audio_processing/agc2/rnn_vad/BUILD.gn ++++ b/modules/audio_processing/agc2/rnn_vad/BUILD.gn +@@ -18,7 +18,7 @@ rtc_library("rnn_vad") { + ] + + defines = [] +- if (rtc_build_with_neon && current_cpu != "arm64") { ++ if (rtc_build_with_neon && target_cpu != "arm64") { + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags = [ "-mfpu=neon" ] + } +diff --git a/modules/audio_processing/ns/BUILD.gn b/modules/audio_processing/ns/BUILD.gn +index d818e23f3c..8c2e9dba84 100644 +--- a/modules/audio_processing/ns/BUILD.gn ++++ b/modules/audio_processing/ns/BUILD.gn +@@ -43,7 +43,7 @@ rtc_static_library("ns") { + ] + + defines = [] +- if (rtc_build_with_neon && current_cpu != "arm64") { ++ if (rtc_build_with_neon && target_cpu != "arm64") { + suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] + cflags = [ "-mfpu=neon" ] + } +diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn +index 42d72865b9..c184e2fbbc 100644 +--- a/modules/desktop_capture/BUILD.gn ++++ b/modules/desktop_capture/BUILD.gn +@@ -10,7 +10,7 @@ import("//build/config/linux/gtk/gtk.gni") + import("//build/config/ui.gni") + import("../../webrtc.gni") + +-use_desktop_capture_differ_sse2 = current_cpu == "x86" || current_cpu == "x64" ++use_desktop_capture_differ_sse2 = target_cpu == "x86" || target_cpu == "x64" + + config("x11_config") { + if (rtc_use_x11_extensions) { +diff --git a/webrtc.gni b/webrtc.gni +index 1861347fa1..f47db9bec4 100644 +--- a/webrtc.gni ++++ b/webrtc.gni +@@ -175,13 +175,13 @@ declare_args() { + + # Selects fixed-point code where possible. + rtc_prefer_fixed_point = false +- if (current_cpu == "arm" || current_cpu == "arm64") { ++ if (target_cpu == "arm" || target_cpu == "arm64") { + rtc_prefer_fixed_point = true + } + + # Determines whether NEON code will be built. + rtc_build_with_neon = +- (current_cpu == "arm" && arm_use_neon) || current_cpu == "arm64" ++ (target_cpu == "arm" && arm_use_neon) || target_cpu == "arm64" + + # Enable this to build OpenH264 encoder/FFmpeg decoder. This is supported on + # all platforms except Android and iOS. Because FFmpeg can be built -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0042.patch b/third_party/libwebrtc/moz-patch-stack/0042.patch index 88a7a6f12517..c990e20cfcfb 100644 --- a/third_party/libwebrtc/moz-patch-stack/0042.patch +++ b/third_party/libwebrtc/moz-patch-stack/0042.patch @@ -1,300 +1,305 @@ -From: Nico Grunbaum -Date: Wed, 14 Jul 2021 22:26:00 +0000 -Subject: Bug 1654112 - deconflate the target and host architectures in - libwebrtc build files; r=mjf +From: "Byron Campen [:bwc]" +Date: Fri, 19 Feb 2021 15:56:00 -0600 +Subject: Bug 1654112 - Get RTCP BYE and RTP timeout handling working again + (from Bug 1595479) r=mjf,dminor -Differential Revision: https://phabricator.services.mozilla.com/D119707 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/58f47eacaf10d12e21dff7362743b6f4cdd1696b +Differential Revision: https://phabricator.services.mozilla.com/D106145 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d0b311007c033e83824f5f6996a70ab9e870f31f --- - BUILD.gn | 6 +++--- - common_audio/BUILD.gn | 16 ++++++++-------- - common_audio/third_party/ooura/BUILD.gn | 6 +++--- - common_audio/third_party/spl_sqrt_floor/BUILD.gn | 4 ++-- - modules/audio_processing/aec3/BUILD.gn | 6 +++--- - modules/audio_processing/aecm/BUILD.gn | 4 ++-- - modules/audio_processing/agc/BUILD.gn | 2 +- - modules/audio_processing/agc2/rnn_vad/BUILD.gn | 2 +- - modules/audio_processing/ns/BUILD.gn | 2 +- - modules/desktop_capture/BUILD.gn | 2 +- - webrtc.gni | 4 ++-- - 11 files changed, 27 insertions(+), 27 deletions(-) + audio/audio_receive_stream.cc | 5 ++++- + audio/channel_receive.cc | 13 +++++++++---- + audio/channel_receive.h | 4 +++- + call/audio_receive_stream.h | 3 +++ + call/video_receive_stream.cc | 2 ++ + call/video_receive_stream.h | 3 +++ + modules/rtp_rtcp/include/rtp_rtcp_defines.h | 8 ++++++++ + modules/rtp_rtcp/source/rtcp_receiver.cc | 18 ++++++++++++++++-- + modules/rtp_rtcp/source/rtcp_receiver.h | 1 + + modules/rtp_rtcp/source/rtp_rtcp_interface.h | 3 +++ + video/rtp_video_stream_receiver2.cc | 7 +++++-- + 11 files changed, 57 insertions(+), 10 deletions(-) -diff --git a/BUILD.gn b/BUILD.gn -index 427bde39c0..ac93b47716 100644 ---- a/BUILD.gn -+++ b/BUILD.gn -@@ -410,12 +410,12 @@ config("common_config") { - } - } +diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc +index 978bbb25b2..655b2761ac 100644 +--- a/audio/audio_receive_stream.cc ++++ b/audio/audio_receive_stream.cc +@@ -39,6 +39,8 @@ std::string AudioReceiveStreamInterface::Config::Rtp::ToString() const { + ss << "{remote_ssrc: " << remote_ssrc; + ss << ", local_ssrc: " << local_ssrc; + ss << ", nack: " << nack.ToString(); ++ ss << ", rtcp_event_observer: " ++ << (rtcp_event_observer ? "(rtcp_event_observer)" : "nullptr"); + ss << '}'; + return ss.str(); + } +@@ -73,7 +75,8 @@ std::unique_ptr CreateChannelReceive( + config.jitter_buffer_fast_accelerate, config.jitter_buffer_min_delay_ms, + config.enable_non_sender_rtt, config.decoder_factory, + config.codec_pair_id, std::move(config.frame_decryptor), +- config.crypto_options, std::move(config.frame_transformer)); ++ config.crypto_options, std::move(config.frame_transformer), ++ config.rtp.rtcp_event_observer); + } + } // namespace -- if (current_cpu == "arm64") { -+ if (target_cpu == "arm64") { - defines += [ "WEBRTC_ARCH_ARM64" ] - defines += [ "WEBRTC_HAS_NEON" ] - } +diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc +index 270ba61ed5..b3aac7a1fb 100644 +--- a/audio/channel_receive.cc ++++ b/audio/channel_receive.cc +@@ -104,7 +104,8 @@ class ChannelReceive : public ChannelReceiveInterface, + absl::optional codec_pair_id, + rtc::scoped_refptr frame_decryptor, + const webrtc::CryptoOptions& crypto_options, +- rtc::scoped_refptr frame_transformer); ++ rtc::scoped_refptr frame_transformer, ++ RtcpEventObserver* rtcp_event_observer); + ~ChannelReceive() override; -- if (current_cpu == "arm") { -+ if (target_cpu == "arm") { - defines += [ "WEBRTC_ARCH_ARM" ] - if (arm_version >= 7) { - defines += [ "WEBRTC_ARCH_ARM_V7" ] -@@ -425,7 +425,7 @@ config("common_config") { - } - } + void SetSink(AudioSinkInterface* sink) override; +@@ -544,7 +545,8 @@ ChannelReceive::ChannelReceive( + absl::optional codec_pair_id, + rtc::scoped_refptr frame_decryptor, + const webrtc::CryptoOptions& crypto_options, +- rtc::scoped_refptr frame_transformer) ++ rtc::scoped_refptr frame_transformer, ++ RtcpEventObserver* rtcp_event_observer) + : worker_thread_(TaskQueueBase::Current()), + event_log_(rtc_event_log), + rtp_receive_statistics_(ReceiveStatistics::Create(clock)), +@@ -590,6 +592,7 @@ ChannelReceive::ChannelReceive( + configuration.local_media_ssrc = local_ssrc; + configuration.rtcp_packet_type_counter_observer = this; + configuration.non_sender_rtt_measurement = enable_non_sender_rtt; ++ configuration.rtcp_event_observer = rtcp_event_observer; -- if (current_cpu == "mipsel") { -+ if (target_cpu == "mipsel") { - defines += [ "MIPS32_LE" ] - if (mips_float_abi == "hard") { - defines += [ "MIPS_FPU_LE" ] -diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn -index a45214f754..79d9321bbd 100644 ---- a/common_audio/BUILD.gn -+++ b/common_audio/BUILD.gn -@@ -66,7 +66,7 @@ rtc_library("common_audio") { - deps += [ ":common_audio_neon" ] - } - -- if (current_cpu == "x86" || current_cpu == "x64") { -+ if (target_cpu == "x86" || target_cpu == "x64") { - deps += [ ":common_audio_sse2" ] - deps += [ ":common_audio_avx2" ] - } -@@ -88,7 +88,7 @@ rtc_source_set("mock_common_audio") { - rtc_source_set("common_audio_c_arm_asm") { - sources = [] - deps = [] -- if (current_cpu == "arm") { -+ if (target_cpu == "arm") { - sources += [ "signal_processing/complex_bit_reverse_arm.S" ] - - if (arm_version >= 7) { -@@ -152,7 +152,7 @@ rtc_library("common_audio_c") { - "vad/webrtc_vad.c", - ] - -- if (current_cpu == "mipsel") { -+ if (target_cpu == "mipsel") { - sources += [ - "signal_processing/complex_bit_reverse_mips.c", - "signal_processing/complex_fft_mips.c", -@@ -170,7 +170,7 @@ rtc_library("common_audio_c") { - sources += [ "signal_processing/complex_fft.c" ] - } - -- if (current_cpu != "arm" && current_cpu != "mipsel") { -+ if (target_cpu != "arm" && target_cpu != "mipsel") { - sources += [ - "signal_processing/complex_bit_reverse.c", - "signal_processing/filter_ar_fast_q12.c", -@@ -231,7 +231,7 @@ rtc_library("fir_filter_factory") { - "../rtc_base/system:arch", - "../system_wrappers", - ] -- if (current_cpu == "x86" || current_cpu == "x64") { -+ if (target_cpu == "x86" || target_cpu == "x64") { - deps += [ ":common_audio_sse2" ] - deps += [ ":common_audio_avx2" ] - } -@@ -240,7 +240,7 @@ rtc_library("fir_filter_factory") { - } + if (frame_transformer) + InitFrameTransformerDelegate(std::move(frame_transformer)); +@@ -1117,13 +1120,15 @@ std::unique_ptr CreateChannelReceive( + absl::optional codec_pair_id, + rtc::scoped_refptr frame_decryptor, + const webrtc::CryptoOptions& crypto_options, +- rtc::scoped_refptr frame_transformer) { ++ rtc::scoped_refptr frame_transformer, ++ RtcpEventObserver* rtcp_event_observer) { + return std::make_unique( + clock, neteq_factory, audio_device_module, rtcp_send_transport, + rtc_event_log, local_ssrc, remote_ssrc, jitter_buffer_max_packets, + jitter_buffer_fast_playout, jitter_buffer_min_delay_ms, + enable_non_sender_rtt, decoder_factory, codec_pair_id, +- std::move(frame_decryptor), crypto_options, std::move(frame_transformer)); ++ std::move(frame_decryptor), crypto_options, std::move(frame_transformer), ++ rtcp_event_observer); } --if (current_cpu == "x86" || current_cpu == "x64") { -+if (target_cpu == "x86" || target_cpu == "x64") { - rtc_library("common_audio_sse2") { - sources = [ - "fir_filter_sse.cc", -@@ -289,7 +289,7 @@ if (rtc_build_with_neon) { - "resampler/sinc_resampler_neon.cc", - ] + } // namespace voe +diff --git a/audio/channel_receive.h b/audio/channel_receive.h +index ab69103269..5713d97aaa 100644 +--- a/audio/channel_receive.h ++++ b/audio/channel_receive.h +@@ -28,6 +28,7 @@ + #include "call/rtp_packet_sink_interface.h" + #include "call/syncable.h" + #include "modules/audio_coding/include/audio_coding_module_typedefs.h" ++#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" + #include "modules/rtp_rtcp/source/source_tracker.h" + #include "system_wrappers/include/clock.h" -- if (current_cpu != "arm64") { -+ if (target_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] -@@ -312,7 +312,7 @@ if (rtc_build_with_neon) { - "signal_processing/min_max_operations_neon.c", - ] +@@ -186,7 +187,8 @@ std::unique_ptr CreateChannelReceive( + absl::optional codec_pair_id, + rtc::scoped_refptr frame_decryptor, + const webrtc::CryptoOptions& crypto_options, +- rtc::scoped_refptr frame_transformer); ++ rtc::scoped_refptr frame_transformer, ++ RtcpEventObserver* rtcp_event_observer); -- if (current_cpu != "arm64") { -+ if (target_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] -diff --git a/common_audio/third_party/ooura/BUILD.gn b/common_audio/third_party/ooura/BUILD.gn -index 0cdf98e591..a0ddf777db 100644 ---- a/common_audio/third_party/ooura/BUILD.gn -+++ b/common_audio/third_party/ooura/BUILD.gn -@@ -20,7 +20,7 @@ rtc_library("fft_size_128") { - ] - cflags = [] + } // namespace voe + } // namespace webrtc +diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h +index 4879311fdb..88b74b44ac 100644 +--- a/call/audio_receive_stream.h ++++ b/call/audio_receive_stream.h +@@ -19,6 +19,7 @@ + #include "absl/types/optional.h" + #include "api/audio_codecs/audio_decoder_factory.h" + #include "api/call/transport.h" ++#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" + #include "api/crypto/crypto_options.h" + #include "api/rtp_parameters.h" + #include "call/receive_stream.h" +@@ -117,6 +118,8 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { -- if (current_cpu == "x86" || current_cpu == "x64") { -+ if (target_cpu == "x86" || target_cpu == "x64") { - sources += [ - "fft_size_128/ooura_fft_sse2.cc", - "fft_size_128/ooura_fft_tables_neon_sse2.h", -@@ -38,14 +38,14 @@ rtc_library("fft_size_128") { + // See NackConfig for description. + NackConfig nack; ++ ++ RtcpEventObserver* rtcp_event_observer = nullptr; + } rtp; - deps += [ "../../../common_audio" ] - -- if (current_cpu != "arm64") { -+ if (target_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags += [ "-mfpu=neon" ] - } - } - -- if (current_cpu == "mipsel" && mips_float_abi == "hard") { -+ if (target_cpu == "mipsel" && mips_float_abi == "hard") { - sources += [ "fft_size_128/ooura_fft_mips.cc" ] + // Receive-side RTT. +diff --git a/call/video_receive_stream.cc b/call/video_receive_stream.cc +index 3e2a51322f..46015cb0c7 100644 +--- a/call/video_receive_stream.cc ++++ b/call/video_receive_stream.cc +@@ -161,6 +161,8 @@ std::string VideoReceiveStreamInterface::Config::Rtp::ToString() const { + ss << pt << ", "; } + ss << '}'; ++ ss << ", rtcp_event_observer: " ++ << (rtcp_event_observer ? "(rtcp_event_observer)" : "nullptr"); + ss << '}'; + return ss.str(); } -diff --git a/common_audio/third_party/spl_sqrt_floor/BUILD.gn b/common_audio/third_party/spl_sqrt_floor/BUILD.gn -index ac862c65a8..e66ed2796e 100644 ---- a/common_audio/third_party/spl_sqrt_floor/BUILD.gn -+++ b/common_audio/third_party/spl_sqrt_floor/BUILD.gn -@@ -12,11 +12,11 @@ rtc_library("spl_sqrt_floor") { - visibility = [ "../..:common_audio_c" ] - sources = [ "spl_sqrt_floor.h" ] - deps = [] -- if (current_cpu == "arm") { -+ if (target_cpu == "arm") { - sources += [ "spl_sqrt_floor_arm.S" ] +diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h +index a1fc204e7c..01ac7e0ba4 100644 +--- a/call/video_receive_stream.h ++++ b/call/video_receive_stream.h +@@ -20,6 +20,7 @@ + #include - deps += [ "../../../rtc_base/system:asm_defines" ] -- } else if (current_cpu == "mipsel") { -+ } else if (target_cpu == "mipsel") { - sources += [ "spl_sqrt_floor_mips.c" ] - } else { - sources += [ "spl_sqrt_floor.c" ] -diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn -index 3e11a245a1..c29b893b7d 100644 ---- a/modules/audio_processing/aec3/BUILD.gn -+++ b/modules/audio_processing/aec3/BUILD.gn -@@ -123,7 +123,7 @@ rtc_library("aec3") { - ] + #include "api/call/transport.h" ++#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" + #include "api/crypto/crypto_options.h" + #include "api/rtp_headers.h" + #include "api/rtp_parameters.h" +@@ -241,6 +242,8 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { + // meta data is expected to be present in generic frame descriptor + // RTP header extension). + std::set raw_payload_types; ++ ++ RtcpEventObserver* rtcp_event_observer = nullptr; + } rtp; - defines = [] -- if (rtc_build_with_neon && current_cpu != "arm64") { -+ if (rtc_build_with_neon && target_cpu != "arm64") { - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] + // Transport for outgoing packets (RTCP). +diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h +index 698f284fa5..982e5c57ef 100644 +--- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h ++++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h +@@ -173,6 +173,14 @@ class NetworkLinkRtcpObserver { + virtual void OnRttUpdate(Timestamp receive_time, TimeDelta rtt) {} + }; + ++class RtcpEventObserver { ++ public: ++ virtual void OnRtcpBye() = 0; ++ virtual void OnRtcpTimeout() = 0; ++ ++ virtual ~RtcpEventObserver() {} ++}; ++ + // NOTE! `kNumMediaTypes` must be kept in sync with RtpPacketMediaType! + static constexpr size_t kNumMediaTypes = 5; + enum class RtpPacketMediaType : size_t { +diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc +index a98b200c05..e2ad674012 100644 +--- a/modules/rtp_rtcp/source/rtcp_receiver.cc ++++ b/modules/rtp_rtcp/source/rtcp_receiver.cc +@@ -144,6 +144,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, + rtp_rtcp_(owner), + registered_ssrcs_(false, config), + network_link_rtcp_observer_(config.network_link_rtcp_observer), ++ rtcp_event_observer_(config.rtcp_event_observer), + rtcp_intra_frame_observer_(config.intra_frame_callback), + rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer), + network_state_estimate_observer_(config.network_state_estimate_observer), +@@ -171,6 +172,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, + rtp_rtcp_(owner), + registered_ssrcs_(true, config), + network_link_rtcp_observer_(config.network_link_rtcp_observer), ++ rtcp_event_observer_(config.rtcp_event_observer), + rtcp_intra_frame_observer_(config.intra_frame_callback), + rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer), + network_state_estimate_observer_(config.network_state_estimate_observer), +@@ -778,6 +780,10 @@ bool RTCPReceiver::HandleBye(const CommonHeader& rtcp_block) { + return false; } -@@ -162,7 +162,7 @@ rtc_library("aec3") { - "//third_party/abseil-cpp/absl/types:optional", - ] -- if (current_cpu == "x86" || current_cpu == "x64") { -+ if (target_cpu == "x86" || target_cpu == "x64") { - deps += [ ":aec3_avx2" ] - } - } -@@ -253,7 +253,7 @@ rtc_source_set("fft_data") { - ] ++ if (rtcp_event_observer_) { ++ rtcp_event_observer_->OnRtcpBye(); ++ } ++ + // Clear our lists. + rtts_.erase(bye.sender_ssrc()); + EraseIf(received_report_blocks_, [&](const auto& elem) { +@@ -1199,12 +1205,20 @@ std::vector RTCPReceiver::TmmbrReceived() { } --if (current_cpu == "x86" || current_cpu == "x64") { -+if (target_cpu == "x86" || target_cpu == "x64") { - rtc_library("aec3_avx2") { - configs += [ "..:apm_debug_dump" ] - sources = [ -diff --git a/modules/audio_processing/aecm/BUILD.gn b/modules/audio_processing/aecm/BUILD.gn -index 80f2901049..a77f04aba5 100644 ---- a/modules/audio_processing/aecm/BUILD.gn -+++ b/modules/audio_processing/aecm/BUILD.gn -@@ -29,14 +29,14 @@ rtc_library("aecm_core") { - if (rtc_build_with_neon) { - sources += [ "aecm_core_neon.cc" ] + bool RTCPReceiver::RtcpRrTimeoutLocked(Timestamp now) { +- return ResetTimestampIfExpired(now, last_received_rb_, report_interval_); ++ bool result = ResetTimestampIfExpired(now, last_received_rb_, report_interval_); ++ if (result && rtcp_event_observer_) { ++ rtcp_event_observer_->OnRtcpTimeout(); ++ } ++ return result; + } -- if (current_cpu != "arm64") { -+ if (target_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags += [ "-mfpu=neon" ] - } - } + bool RTCPReceiver::RtcpRrSequenceNumberTimeoutLocked(Timestamp now) { +- return ResetTimestampIfExpired(now, last_increased_sequence_number_, ++ bool result = ResetTimestampIfExpired(now, last_increased_sequence_number_, + report_interval_); ++ if (result && rtcp_event_observer_) { ++ rtcp_event_observer_->OnRtcpTimeout(); ++ } ++ return result; + } -- if (current_cpu == "mipsel") { -+ if (target_cpu == "mipsel") { - sources += [ "aecm_core_mips.cc" ] - } else { - sources += [ "aecm_core_c.cc" ] -diff --git a/modules/audio_processing/agc/BUILD.gn b/modules/audio_processing/agc/BUILD.gn -index 508f901b08..75bef1450f 100644 ---- a/modules/audio_processing/agc/BUILD.gn -+++ b/modules/audio_processing/agc/BUILD.gn -@@ -83,7 +83,7 @@ rtc_library("legacy_agc") { - ] + } // namespace webrtc +diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h +index e748b257e8..36e117af55 100644 +--- a/modules/rtp_rtcp/source/rtcp_receiver.h ++++ b/modules/rtp_rtcp/source/rtcp_receiver.h +@@ -362,6 +362,7 @@ class RTCPReceiver final { + RegisteredSsrcs registered_ssrcs_; - if (rtc_build_with_neon) { -- if (current_cpu != "arm64") { -+ if (target_cpu != "arm64") { - # Enable compilation for the NEON instruction set. - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] -diff --git a/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/modules/audio_processing/agc2/rnn_vad/BUILD.gn -index 3003a585bd..d709eb3699 100644 ---- a/modules/audio_processing/agc2/rnn_vad/BUILD.gn -+++ b/modules/audio_processing/agc2/rnn_vad/BUILD.gn -@@ -18,7 +18,7 @@ rtc_library("rnn_vad") { - ] + NetworkLinkRtcpObserver* const network_link_rtcp_observer_; ++ RtcpEventObserver* const rtcp_event_observer_; + RtcpIntraFrameObserver* const rtcp_intra_frame_observer_; + RtcpLossNotificationObserver* const rtcp_loss_notification_observer_; + NetworkStateEstimateObserver* const network_state_estimate_observer_; +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h +index dc86f92bf5..889b099abd 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h ++++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h +@@ -74,6 +74,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { + // bandwidth estimation related message. + NetworkLinkRtcpObserver* network_link_rtcp_observer = nullptr; - defines = [] -- if (rtc_build_with_neon && current_cpu != "arm64") { -+ if (rtc_build_with_neon && target_cpu != "arm64") { - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] - } -diff --git a/modules/audio_processing/ns/BUILD.gn b/modules/audio_processing/ns/BUILD.gn -index d818e23f3c..8c2e9dba84 100644 ---- a/modules/audio_processing/ns/BUILD.gn -+++ b/modules/audio_processing/ns/BUILD.gn -@@ -43,7 +43,7 @@ rtc_static_library("ns") { - ] ++ // Called when we receive a RTCP bye or timeout ++ RtcpEventObserver* rtcp_event_observer = nullptr; ++ + NetworkStateEstimateObserver* network_state_estimate_observer = nullptr; + TransportFeedbackObserver* transport_feedback_callback = nullptr; + VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr; +diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc +index 67a0ad5f69..75eccd3002 100644 +--- a/video/rtp_video_stream_receiver2.cc ++++ b/video/rtp_video_stream_receiver2.cc +@@ -83,7 +83,8 @@ std::unique_ptr CreateRtpRtcpModule( + RtcpCnameCallback* rtcp_cname_callback, + bool non_sender_rtt_measurement, + uint32_t local_ssrc, +- RtcEventLog* rtc_event_log) { ++ RtcEventLog* rtc_event_log, ++ RtcpEventObserver* rtcp_event_observer) { + RtpRtcpInterface::Configuration configuration; + configuration.clock = clock; + configuration.audio = false; +@@ -95,6 +96,7 @@ std::unique_ptr CreateRtpRtcpModule( + rtcp_packet_type_counter_observer; + configuration.rtcp_cname_callback = rtcp_cname_callback; + configuration.local_media_ssrc = local_ssrc; ++ configuration.rtcp_event_observer = rtcp_event_observer; + configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; + configuration.event_log = rtc_event_log; - defines = [] -- if (rtc_build_with_neon && current_cpu != "arm64") { -+ if (rtc_build_with_neon && target_cpu != "arm64") { - suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ] - cflags = [ "-mfpu=neon" ] - } -diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn -index 42d72865b9..c184e2fbbc 100644 ---- a/modules/desktop_capture/BUILD.gn -+++ b/modules/desktop_capture/BUILD.gn -@@ -10,7 +10,7 @@ import("//build/config/linux/gtk/gtk.gni") - import("//build/config/ui.gni") - import("../../webrtc.gni") - --use_desktop_capture_differ_sse2 = current_cpu == "x86" || current_cpu == "x64" -+use_desktop_capture_differ_sse2 = target_cpu == "x86" || target_cpu == "x64" - - config("x11_config") { - if (rtc_use_x11_extensions) { -diff --git a/webrtc.gni b/webrtc.gni -index c25caf85e0..9e49573c98 100644 ---- a/webrtc.gni -+++ b/webrtc.gni -@@ -171,13 +171,13 @@ declare_args() { - - # Selects fixed-point code where possible. - rtc_prefer_fixed_point = false -- if (current_cpu == "arm" || current_cpu == "arm64") { -+ if (target_cpu == "arm" || target_cpu == "arm64") { - rtc_prefer_fixed_point = true - } - - # Determines whether NEON code will be built. - rtc_build_with_neon = -- (current_cpu == "arm" && arm_use_neon) || current_cpu == "arm64" -+ (target_cpu == "arm" && arm_use_neon) || target_cpu == "arm64" - - # Enable this to build OpenH264 encoder/FFmpeg decoder. This is supported on - # all platforms except Android and iOS. Because FFmpeg can be built +@@ -275,7 +277,8 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + rtcp_cname_callback, + config_.rtp.rtcp_xr.receiver_reference_time_report, + config_.rtp.local_ssrc, +- event_log)), ++ event_log, ++ config_.rtp.rtcp_event_observer)), + nack_periodic_processor_(nack_periodic_processor), + complete_frame_callback_(complete_frame_callback), + keyframe_request_method_(config_.rtp.keyframe_method), -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0043.patch b/third_party/libwebrtc/moz-patch-stack/0043.patch index 809e1616a7ad..ffb76543a7c4 100644 --- a/third_party/libwebrtc/moz-patch-stack/0043.patch +++ b/third_party/libwebrtc/moz-patch-stack/0043.patch @@ -1,305 +1,32 @@ From: "Byron Campen [:bwc]" -Date: Fri, 19 Feb 2021 15:56:00 -0600 -Subject: Bug 1654112 - Get RTCP BYE and RTP timeout handling working again - (from Bug 1595479) r=mjf,dminor +Date: Fri, 12 Mar 2021 08:53:00 -0600 +Subject: Bug 1654112 - libwebrtc modification: Init some stats that were being + passed to us uninitialized. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D106145 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d0b311007c033e83824f5f6996a70ab9e870f31f +Differential Revision: https://phabricator.services.mozilla.com/D108673 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c385bb870413b925af48df97aa1f2b80a26e78d2 --- - audio/audio_receive_stream.cc | 5 ++++- - audio/channel_receive.cc | 13 +++++++++---- - audio/channel_receive.h | 4 +++- - call/audio_receive_stream.h | 3 +++ - call/video_receive_stream.cc | 2 ++ - call/video_receive_stream.h | 3 +++ - modules/rtp_rtcp/include/rtp_rtcp_defines.h | 8 ++++++++ - modules/rtp_rtcp/source/rtcp_receiver.cc | 18 ++++++++++++++++-- - modules/rtp_rtcp/source/rtcp_receiver.h | 1 + - modules/rtp_rtcp/source/rtp_rtcp_interface.h | 3 +++ - video/rtp_video_stream_receiver2.cc | 7 +++++-- - 11 files changed, 57 insertions(+), 10 deletions(-) + call/video_receive_stream.h | 7 ++++--- + 1 file changed, 4 insertions(+), 3 deletions(-) -diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc -index 978bbb25b2..655b2761ac 100644 ---- a/audio/audio_receive_stream.cc -+++ b/audio/audio_receive_stream.cc -@@ -39,6 +39,8 @@ std::string AudioReceiveStreamInterface::Config::Rtp::ToString() const { - ss << "{remote_ssrc: " << remote_ssrc; - ss << ", local_ssrc: " << local_ssrc; - ss << ", nack: " << nack.ToString(); -+ ss << ", rtcp_event_observer: " -+ << (rtcp_event_observer ? "(rtcp_event_observer)" : "nullptr"); - ss << '}'; - return ss.str(); - } -@@ -73,7 +75,8 @@ std::unique_ptr CreateChannelReceive( - config.jitter_buffer_fast_accelerate, config.jitter_buffer_min_delay_ms, - config.enable_non_sender_rtt, config.decoder_factory, - config.codec_pair_id, std::move(config.frame_decryptor), -- config.crypto_options, std::move(config.frame_transformer)); -+ config.crypto_options, std::move(config.frame_transformer), -+ config.rtp.rtcp_event_observer); - } - } // namespace - -diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc -index 32c46764d2..fc7f906e49 100644 ---- a/audio/channel_receive.cc -+++ b/audio/channel_receive.cc -@@ -102,7 +102,8 @@ class ChannelReceive : public ChannelReceiveInterface, - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options, -- rtc::scoped_refptr frame_transformer); -+ rtc::scoped_refptr frame_transformer, -+ RtcpEventObserver* rtcp_event_observer); - ~ChannelReceive() override; - - void SetSink(AudioSinkInterface* sink) override; -@@ -540,7 +541,8 @@ ChannelReceive::ChannelReceive( - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options, -- rtc::scoped_refptr frame_transformer) -+ rtc::scoped_refptr frame_transformer, -+ RtcpEventObserver* rtcp_event_observer) - : worker_thread_(TaskQueueBase::Current()), - event_log_(rtc_event_log), - rtp_receive_statistics_(ReceiveStatistics::Create(clock)), -@@ -585,6 +587,7 @@ ChannelReceive::ChannelReceive( - configuration.local_media_ssrc = local_ssrc; - configuration.rtcp_packet_type_counter_observer = this; - configuration.non_sender_rtt_measurement = enable_non_sender_rtt; -+ configuration.rtcp_event_observer = rtcp_event_observer; - - if (frame_transformer) - InitFrameTransformerDelegate(std::move(frame_transformer)); -@@ -1112,13 +1115,15 @@ std::unique_ptr CreateChannelReceive( - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options, -- rtc::scoped_refptr frame_transformer) { -+ rtc::scoped_refptr frame_transformer, -+ RtcpEventObserver* rtcp_event_observer) { - return std::make_unique( - clock, neteq_factory, audio_device_module, rtcp_send_transport, - rtc_event_log, local_ssrc, remote_ssrc, jitter_buffer_max_packets, - jitter_buffer_fast_playout, jitter_buffer_min_delay_ms, - enable_non_sender_rtt, decoder_factory, codec_pair_id, -- std::move(frame_decryptor), crypto_options, std::move(frame_transformer)); -+ std::move(frame_decryptor), crypto_options, std::move(frame_transformer), -+ rtcp_event_observer); - } - - } // namespace voe -diff --git a/audio/channel_receive.h b/audio/channel_receive.h -index ab69103269..5713d97aaa 100644 ---- a/audio/channel_receive.h -+++ b/audio/channel_receive.h -@@ -28,6 +28,7 @@ - #include "call/rtp_packet_sink_interface.h" - #include "call/syncable.h" - #include "modules/audio_coding/include/audio_coding_module_typedefs.h" -+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" - #include "modules/rtp_rtcp/source/source_tracker.h" - #include "system_wrappers/include/clock.h" - -@@ -186,7 +187,8 @@ std::unique_ptr CreateChannelReceive( - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options, -- rtc::scoped_refptr frame_transformer); -+ rtc::scoped_refptr frame_transformer, -+ RtcpEventObserver* rtcp_event_observer); - - } // namespace voe - } // namespace webrtc -diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h -index 4879311fdb..88b74b44ac 100644 ---- a/call/audio_receive_stream.h -+++ b/call/audio_receive_stream.h -@@ -19,6 +19,7 @@ - #include "absl/types/optional.h" - #include "api/audio_codecs/audio_decoder_factory.h" - #include "api/call/transport.h" -+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" - #include "api/crypto/crypto_options.h" - #include "api/rtp_parameters.h" - #include "call/receive_stream.h" -@@ -117,6 +118,8 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { - - // See NackConfig for description. - NackConfig nack; -+ -+ RtcpEventObserver* rtcp_event_observer = nullptr; - } rtp; - - // Receive-side RTT. -diff --git a/call/video_receive_stream.cc b/call/video_receive_stream.cc -index 3e2a51322f..46015cb0c7 100644 ---- a/call/video_receive_stream.cc -+++ b/call/video_receive_stream.cc -@@ -161,6 +161,8 @@ std::string VideoReceiveStreamInterface::Config::Rtp::ToString() const { - ss << pt << ", "; - } - ss << '}'; -+ ss << ", rtcp_event_observer: " -+ << (rtcp_event_observer ? "(rtcp_event_observer)" : "nullptr"); - ss << '}'; - return ss.str(); - } diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index e7489a6cdf..af23f7942b 100644 +index 01ac7e0ba4..01fb08a009 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h -@@ -20,6 +20,7 @@ - #include +@@ -152,9 +152,10 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { + RtcpPacketTypeCounter rtcp_packet_type_counts; + absl::optional rtx_rtp_stats; - #include "api/call/transport.h" -+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" - #include "api/crypto/crypto_options.h" - #include "api/rtp_headers.h" - #include "api/rtp_parameters.h" -@@ -241,6 +242,8 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { - // meta data is expected to be present in generic frame descriptor - // RTP header extension). - std::set raw_payload_types; -+ -+ RtcpEventObserver* rtcp_event_observer = nullptr; - } rtp; +- uint32_t rtcp_sender_packets_sent; +- uint32_t rtcp_sender_octets_sent; +- int64_t rtcp_sender_ntp_timestamp_ms; ++ // Mozilla modification: Init these three. ++ uint32_t rtcp_sender_packets_sent = 0; ++ uint32_t rtcp_sender_octets_sent = 0; ++ int64_t rtcp_sender_ntp_timestamp_ms = 0; - // Transport for outgoing packets (RTCP). -diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h -index 594b3334f4..3ccc0e0f5c 100644 ---- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h -+++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h -@@ -173,6 +173,14 @@ class NetworkLinkRtcpObserver { - virtual void OnRttUpdate(Timestamp receive_time, TimeDelta rtt) {} - }; - -+class RtcpEventObserver { -+ public: -+ virtual void OnRtcpBye() = 0; -+ virtual void OnRtcpTimeout() = 0; -+ -+ virtual ~RtcpEventObserver() {} -+}; -+ - // NOTE! `kNumMediaTypes` must be kept in sync with RtpPacketMediaType! - static constexpr size_t kNumMediaTypes = 5; - enum class RtpPacketMediaType : size_t { -diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc -index c5b4afefc8..fda09577d3 100644 ---- a/modules/rtp_rtcp/source/rtcp_receiver.cc -+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc -@@ -143,6 +143,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, - rtp_rtcp_(owner), - registered_ssrcs_(false, config), - network_link_rtcp_observer_(config.network_link_rtcp_observer), -+ rtcp_event_observer_(config.rtcp_event_observer), - rtcp_intra_frame_observer_(config.intra_frame_callback), - rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer), - network_state_estimate_observer_(config.network_state_estimate_observer), -@@ -170,6 +171,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, - rtp_rtcp_(owner), - registered_ssrcs_(true, config), - network_link_rtcp_observer_(config.network_link_rtcp_observer), -+ rtcp_event_observer_(config.rtcp_event_observer), - rtcp_intra_frame_observer_(config.intra_frame_callback), - rtcp_loss_notification_observer_(config.rtcp_loss_notification_observer), - network_state_estimate_observer_(config.network_state_estimate_observer), -@@ -778,6 +780,10 @@ bool RTCPReceiver::HandleBye(const CommonHeader& rtcp_block) { - return false; - } - -+ if (rtcp_event_observer_) { -+ rtcp_event_observer_->OnRtcpBye(); -+ } -+ - // Clear our lists. - rtts_.erase(bye.sender_ssrc()); - EraseIf(received_report_blocks_, [&](const auto& elem) { -@@ -1200,12 +1206,20 @@ std::vector RTCPReceiver::TmmbrReceived() { - } - - bool RTCPReceiver::RtcpRrTimeoutLocked(Timestamp now) { -- return ResetTimestampIfExpired(now, last_received_rb_, report_interval_); -+ bool result = ResetTimestampIfExpired(now, last_received_rb_, report_interval_); -+ if (result && rtcp_event_observer_) { -+ rtcp_event_observer_->OnRtcpTimeout(); -+ } -+ return result; - } - - bool RTCPReceiver::RtcpRrSequenceNumberTimeoutLocked(Timestamp now) { -- return ResetTimestampIfExpired(now, last_increased_sequence_number_, -+ bool result = ResetTimestampIfExpired(now, last_increased_sequence_number_, - report_interval_); -+ if (result && rtcp_event_observer_) { -+ rtcp_event_observer_->OnRtcpTimeout(); -+ } -+ return result; - } - - } // namespace webrtc -diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h -index 4ed3efad82..b727934325 100644 ---- a/modules/rtp_rtcp/source/rtcp_receiver.h -+++ b/modules/rtp_rtcp/source/rtcp_receiver.h -@@ -361,6 +361,7 @@ class RTCPReceiver final { - RegisteredSsrcs registered_ssrcs_; - - NetworkLinkRtcpObserver* const network_link_rtcp_observer_; -+ RtcpEventObserver* const rtcp_event_observer_; - RtcpIntraFrameObserver* const rtcp_intra_frame_observer_; - RtcpLossNotificationObserver* const rtcp_loss_notification_observer_; - NetworkStateEstimateObserver* const network_state_estimate_observer_; -diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -index 7b5ec77f3d..b43832b5fb 100644 ---- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h -+++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -@@ -73,6 +73,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { - // bandwidth estimation related message. - NetworkLinkRtcpObserver* network_link_rtcp_observer = nullptr; - -+ // Called when we receive a RTCP bye or timeout -+ RtcpEventObserver* rtcp_event_observer = nullptr; -+ - NetworkStateEstimateObserver* network_state_estimate_observer = nullptr; - TransportFeedbackObserver* transport_feedback_callback = nullptr; - VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr; -diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index 7d7021ee85..76395b568e 100644 ---- a/video/rtp_video_stream_receiver2.cc -+++ b/video/rtp_video_stream_receiver2.cc -@@ -83,7 +83,8 @@ std::unique_ptr CreateRtpRtcpModule( - RtcpCnameCallback* rtcp_cname_callback, - bool non_sender_rtt_measurement, - uint32_t local_ssrc, -- RtcEventLog* rtc_event_log) { -+ RtcEventLog* rtc_event_log, -+ RtcpEventObserver* rtcp_event_observer) { - RtpRtcpInterface::Configuration configuration; - configuration.clock = clock; - configuration.audio = false; -@@ -95,6 +96,7 @@ std::unique_ptr CreateRtpRtcpModule( - rtcp_packet_type_counter_observer; - configuration.rtcp_cname_callback = rtcp_cname_callback; - configuration.local_media_ssrc = local_ssrc; -+ configuration.rtcp_event_observer = rtcp_event_observer; - configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; - configuration.event_log = rtc_event_log; - -@@ -275,7 +277,8 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( - rtcp_cname_callback, - config_.rtp.rtcp_xr.receiver_reference_time_report, - config_.rtp.local_ssrc, -- event_log)), -+ event_log, -+ config_.rtp.rtcp_event_observer)), - nack_periodic_processor_(nack_periodic_processor), - complete_frame_callback_(complete_frame_callback), - keyframe_request_method_(config_.rtp.keyframe_method), + // Timing frame info: all important timestamps for a full lifetime of a + // single 'timing frame'. -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0044.patch b/third_party/libwebrtc/moz-patch-stack/0044.patch index 36ed1bfb2099..9d705699e139 100644 --- a/third_party/libwebrtc/moz-patch-stack/0044.patch +++ b/third_party/libwebrtc/moz-patch-stack/0044.patch @@ -1,32 +1,76 @@ From: "Byron Campen [:bwc]" -Date: Fri, 12 Mar 2021 08:53:00 -0600 -Subject: Bug 1654112 - libwebrtc modification: Init some stats that were being - passed to us uninitialized. r=ng +Date: Fri, 12 Mar 2021 08:55:00 -0600 +Subject: Bug 1654112 - libwebrtc modification: Surface video RTCP SR stats + again. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D108673 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c385bb870413b925af48df97aa1f2b80a26e78d2 +libwebrtc has stopped surfacing these, and Chromium does not support +these stats at all. + +Differential Revision: https://phabricator.services.mozilla.com/D108674 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/edac9d01a9ac7594f4b22708a4690753638ca32c --- - call/video_receive_stream.h | 7 ++++--- - 1 file changed, 4 insertions(+), 3 deletions(-) + video/rtp_video_stream_receiver2.cc | 10 ++++++++++ + video/rtp_video_stream_receiver2.h | 6 ++++++ + video/video_receive_stream2.cc | 8 ++++++++ + 3 files changed, 24 insertions(+) -diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index af23f7942b..023bdbb2c4 100644 ---- a/call/video_receive_stream.h -+++ b/call/video_receive_stream.h -@@ -152,9 +152,10 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { - RtcpPacketTypeCounter rtcp_packet_type_counts; - absl::optional rtx_rtp_stats; +diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc +index 75eccd3002..0fa7fc5b9c 100644 +--- a/video/rtp_video_stream_receiver2.cc ++++ b/video/rtp_video_stream_receiver2.cc +@@ -1057,6 +1057,16 @@ absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() + return absl::nullopt; + } -- uint32_t rtcp_sender_packets_sent; -- uint32_t rtcp_sender_octets_sent; -- int64_t rtcp_sender_ntp_timestamp_ms; -+ // Mozilla modification: Init these three. -+ uint32_t rtcp_sender_packets_sent = 0; -+ uint32_t rtcp_sender_octets_sent = 0; -+ int64_t rtcp_sender_ntp_timestamp_ms = 0; ++// Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP ++// stats at all, and even on the most recent libwebrtc code there does not ++// seem to be any support for these stats right now. So, we hack this in. ++void RtpVideoStreamReceiver2::RemoteRTCPSenderInfo( ++ uint32_t* packet_count, uint32_t* octet_count, ++ int64_t* ntp_timestamp_ms) const { ++ RTC_DCHECK_RUN_ON(&worker_task_checker_); ++ rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms); ++} ++ + void RtpVideoStreamReceiver2::ManageFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); +diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h +index 0178355262..be8bce770f 100644 +--- a/video/rtp_video_stream_receiver2.h ++++ b/video/rtp_video_stream_receiver2.h +@@ -207,6 +207,12 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, + absl::optional LastReceivedFrameRtpTimestamp() const; + absl::optional LastReceivedKeyframePacketMs() const; + ++ // Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP ++ // stats at all, and even on the most recent libwebrtc code there does not ++ // seem to be any support for these stats right now. So, we hack this in. ++ void RemoteRTCPSenderInfo(uint32_t* packet_count, uint32_t* octet_count, ++ int64_t* ntp_timestamp_ms) const; ++ + private: + // Implements RtpVideoFrameReceiver. + void ManageFrame(std::unique_ptr frame) override; +diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc +index 31a52aa1b7..707fb64f9b 100644 +--- a/video/video_receive_stream2.cc ++++ b/video/video_receive_stream2.cc +@@ -574,6 +574,14 @@ VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { + } + } + } ++ ++ // Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP ++ // stats at all, and even on the most recent libwebrtc code there does not ++ // seem to be any support for these stats right now. So, we hack this in. ++ rtp_video_stream_receiver_.RemoteRTCPSenderInfo( ++ &stats.rtcp_sender_packets_sent, &stats.rtcp_sender_octets_sent, ++ &stats.rtcp_sender_ntp_timestamp_ms); ++ + return stats; + } - // Timing frame info: all important timestamps for a full lifetime of a - // single 'timing frame'. -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0045.patch b/third_party/libwebrtc/moz-patch-stack/0045.patch index fb09a29e1184..82a9d1268217 100644 --- a/third_party/libwebrtc/moz-patch-stack/0045.patch +++ b/third_party/libwebrtc/moz-patch-stack/0045.patch @@ -1,76 +1,38 @@ -From: "Byron Campen [:bwc]" -Date: Fri, 12 Mar 2021 08:55:00 -0600 -Subject: Bug 1654112 - libwebrtc modification: Surface video RTCP SR stats - again. r=ng +From: Nico Grunbaum +Date: Mon, 26 Jul 2021 22:51:00 -0700 +Subject: Bug 1654112 - fix timestamp issues with RTP sources; r=mjf -libwebrtc has stopped surfacing these, and Chromium does not support -these stats at all. - -Differential Revision: https://phabricator.services.mozilla.com/D108674 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/edac9d01a9ac7594f4b22708a4690753638ca32c +Differential Revision: https://phabricator.services.mozilla.com/D120930 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/de8c14e4972f717bf937b6f2fffcd08c35e21ced --- - video/rtp_video_stream_receiver2.cc | 10 ++++++++++ - video/rtp_video_stream_receiver2.h | 6 ++++++ - video/video_receive_stream2.cc | 8 ++++++++ - 3 files changed, 24 insertions(+) + modules/rtp_rtcp/source/source_tracker.cc | 7 ++++++- + 1 file changed, 6 insertions(+), 1 deletion(-) -diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index 76395b568e..be36dc8305 100644 ---- a/video/rtp_video_stream_receiver2.cc -+++ b/video/rtp_video_stream_receiver2.cc -@@ -1051,6 +1051,16 @@ absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() - return absl::nullopt; - } +diff --git a/modules/rtp_rtcp/source/source_tracker.cc b/modules/rtp_rtcp/source/source_tracker.cc +index 13d848dce0..46a46ef51d 100644 +--- a/modules/rtp_rtcp/source/source_tracker.cc ++++ b/modules/rtp_rtcp/source/source_tracker.cc +@@ -47,7 +47,8 @@ void SourceTracker::OnFrameDeliveredInternal( + SourceKey key(RtpSourceType::CSRC, csrc); + SourceEntry& entry = UpdateEntry(key); -+// Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP -+// stats at all, and even on the most recent libwebrtc code there does not -+// seem to be any support for these stats right now. So, we hack this in. -+void RtpVideoStreamReceiver2::RemoteRTCPSenderInfo( -+ uint32_t* packet_count, uint32_t* octet_count, -+ int64_t* ntp_timestamp_ms) const { -+ RTC_DCHECK_RUN_ON(&worker_task_checker_); -+ rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms); -+} -+ - void RtpVideoStreamReceiver2::ManageFrame( - std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); -diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h -index f9ed339c0a..53378e9e9b 100644 ---- a/video/rtp_video_stream_receiver2.h -+++ b/video/rtp_video_stream_receiver2.h -@@ -206,6 +206,12 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, - absl::optional LastReceivedPacketMs() const; - absl::optional LastReceivedKeyframePacketMs() const; - -+ // Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP -+ // stats at all, and even on the most recent libwebrtc code there does not -+ // seem to be any support for these stats right now. So, we hack this in. -+ void RemoteRTCPSenderInfo(uint32_t* packet_count, uint32_t* octet_count, -+ int64_t* ntp_timestamp_ms) const; -+ - private: - // Implements RtpVideoFrameReceiver. - void ManageFrame(std::unique_ptr frame) override; -diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc -index 4a7e7ac577..ee784ef592 100644 ---- a/video/video_receive_stream2.cc -+++ b/video/video_receive_stream2.cc -@@ -574,6 +574,14 @@ VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { - } - } +- entry.timestamp = now; ++ const auto packet_time = packet_info.receive_time(); ++ entry.timestamp = packet_time.ms() ? packet_time : now; + entry.audio_level = packet_info.audio_level(); + entry.absolute_capture_time = packet_info.absolute_capture_time(); + entry.local_capture_clock_offset = +@@ -86,6 +87,10 @@ std::vector SourceTracker::GetSources() const { + .local_capture_clock_offset = entry.local_capture_clock_offset}); } + ++ std::sort(sources.begin(), sources.end(), [](const auto &a, const auto &b){ ++ return a.timestamp_ms() > b.timestamp_ms(); ++ }); + -+ // Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP -+ // stats at all, and even on the most recent libwebrtc code there does not -+ // seem to be any support for these stats right now. So, we hack this in. -+ rtp_video_stream_receiver_.RemoteRTCPSenderInfo( -+ &stats.rtcp_sender_packets_sent, &stats.rtcp_sender_octets_sent, -+ &stats.rtcp_sender_ntp_timestamp_ms); -+ - return stats; + return sources; } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0046.patch b/third_party/libwebrtc/moz-patch-stack/0046.patch index 9cda08653958..ccc4e8566923 100644 --- a/third_party/libwebrtc/moz-patch-stack/0046.patch +++ b/third_party/libwebrtc/moz-patch-stack/0046.patch @@ -1,38 +1,32 @@ -From: Nico Grunbaum -Date: Mon, 26 Jul 2021 22:51:00 -0700 -Subject: Bug 1654112 - fix timestamp issues with RTP sources; r=mjf +From: Andreas Pehrson +Date: Fri, 20 Aug 2021 13:52:00 +0200 +Subject: Bug 1654112 - Don't check the calling thread in + webrtc::AudioReceiveStream::GetSources. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D120930 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/de8c14e4972f717bf937b6f2fffcd08c35e21ced +source_tracker_ is thread safe with its own internal mutex, so this call is safe +as long as the caller has a guarantee for the lifetime of the +AudioReceiveStream. This is similar to webrtc::VideoReceiveStream. + +Upliftable. + +Differential Revision: https://phabricator.services.mozilla.com/D123226 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c186df8a088e46285a15e40149182daa34cc6805 --- - modules/rtp_rtcp/source/source_tracker.cc | 7 ++++++- - 1 file changed, 6 insertions(+), 1 deletion(-) + audio/audio_receive_stream.cc | 1 - + 1 file changed, 1 deletion(-) -diff --git a/modules/rtp_rtcp/source/source_tracker.cc b/modules/rtp_rtcp/source/source_tracker.cc -index 13d848dce0..46a46ef51d 100644 ---- a/modules/rtp_rtcp/source/source_tracker.cc -+++ b/modules/rtp_rtcp/source/source_tracker.cc -@@ -47,7 +47,8 @@ void SourceTracker::OnFrameDeliveredInternal( - SourceKey key(RtpSourceType::CSRC, csrc); - SourceEntry& entry = UpdateEntry(key); +diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc +index 655b2761ac..c49b83f95f 100644 +--- a/audio/audio_receive_stream.cc ++++ b/audio/audio_receive_stream.cc +@@ -366,7 +366,6 @@ int AudioReceiveStreamImpl::GetBaseMinimumPlayoutDelayMs() const { + } -- entry.timestamp = now; -+ const auto packet_time = packet_info.receive_time(); -+ entry.timestamp = packet_time.ms() ? packet_time : now; - entry.audio_level = packet_info.audio_level(); - entry.absolute_capture_time = packet_info.absolute_capture_time(); - entry.local_capture_clock_offset = -@@ -86,6 +87,10 @@ std::vector SourceTracker::GetSources() const { - .local_capture_clock_offset = entry.local_capture_clock_offset}); - } - -+ std::sort(sources.begin(), sources.end(), [](const auto &a, const auto &b){ -+ return a.timestamp_ms() > b.timestamp_ms(); -+ }); -+ - return sources; + std::vector AudioReceiveStreamImpl::GetSources() const { +- RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return source_tracker_.GetSources(); } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0047.patch b/third_party/libwebrtc/moz-patch-stack/0047.patch index 56c9fbde3066..550434d51aca 100644 --- a/third_party/libwebrtc/moz-patch-stack/0047.patch +++ b/third_party/libwebrtc/moz-patch-stack/0047.patch @@ -1,32 +1,1073 @@ -From: Andreas Pehrson -Date: Fri, 20 Aug 2021 13:52:00 +0200 -Subject: Bug 1654112 - Don't check the calling thread in - webrtc::AudioReceiveStream::GetSources. r=ng +From: Paul Adenot +Date: Fri, 18 Aug 2023 13:13:36 -0500 +Subject: Bug 1714577 - Part 6 - Copy WebRTC's trace_event.h to Gecko, + verbatim, and redirect the original trace_event.h to it. r=gerald -source_tracker_ is thread safe with its own internal mutex, so this call is safe -as long as the caller has a guarantee for the lifetime of the -AudioReceiveStream. This is similar to webrtc::VideoReceiveStream. - -Upliftable. - -Differential Revision: https://phabricator.services.mozilla.com/D123226 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c186df8a088e46285a15e40149182daa34cc6805 +Differential Revision: https://phabricator.services.mozilla.com/D116843 --- - audio/audio_receive_stream.cc | 1 - - 1 file changed, 1 deletion(-) + rtc_base/trace_event.h | 1055 +--------------------------------------- + 1 file changed, 3 insertions(+), 1052 deletions(-) -diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc -index 655b2761ac..c49b83f95f 100644 ---- a/audio/audio_receive_stream.cc -+++ b/audio/audio_receive_stream.cc -@@ -366,7 +366,6 @@ int AudioReceiveStreamImpl::GetBaseMinimumPlayoutDelayMs() const { - } - - std::vector AudioReceiveStreamImpl::GetSources() const { -- RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return source_tracker_.GetSources(); - } - +diff --git a/rtc_base/trace_event.h b/rtc_base/trace_event.h +index 6689bc0c37..b34df0c93f 100644 +--- a/rtc_base/trace_event.h ++++ b/rtc_base/trace_event.h +@@ -1,1052 +1,3 @@ +-// Copyright (c) 2012 The Chromium Authors. All rights reserved. +-// Use of this source code is governed by a BSD-style license that can be +-// found in the LICENSE file under third_party_mods/chromium or at: +-// http://src.chromium.org/svn/trunk/src/LICENSE +- +-#ifndef RTC_BASE_TRACE_EVENT_H_ +-#define RTC_BASE_TRACE_EVENT_H_ +- +-#include +- +-#include "rtc_base/event_tracer.h" +- +-#if defined(TRACE_EVENT0) +-#error "Another copy of trace_event.h has already been included." +-#endif +- +-#if defined(RTC_DISABLE_TRACE_EVENTS) +-#define RTC_TRACE_EVENTS_ENABLED 0 +-#else +-#define RTC_TRACE_EVENTS_ENABLED 1 +-#endif +- +-// Type values for identifying types in the TraceValue union. +-#define TRACE_VALUE_TYPE_BOOL (static_cast(1)) +-#define TRACE_VALUE_TYPE_UINT (static_cast(2)) +-#define TRACE_VALUE_TYPE_INT (static_cast(3)) +-#define TRACE_VALUE_TYPE_DOUBLE (static_cast(4)) +-#define TRACE_VALUE_TYPE_POINTER (static_cast(5)) +-#define TRACE_VALUE_TYPE_STRING (static_cast(6)) +-#define TRACE_VALUE_TYPE_COPY_STRING (static_cast(7)) +- +-#if RTC_TRACE_EVENTS_ENABLED +- +-// Extracted from Chromium's src/base/debug/trace_event.h. +- +-// This header is designed to give you trace_event macros without specifying +-// how the events actually get collected and stored. If you need to expose trace +-// event to some other universe, you can copy-and-paste this file, +-// implement the TRACE_EVENT_API macros, and do any other necessary fixup for +-// the target platform. The end result is that multiple libraries can funnel +-// events through to a shared trace event collector. +- +-// Trace events are for tracking application performance and resource usage. +-// Macros are provided to track: +-// Begin and end of function calls +-// Counters +-// +-// Events are issued against categories. Whereas RTC_LOG's +-// categories are statically defined, TRACE categories are created +-// implicitly with a string. For example: +-// TRACE_EVENT_INSTANT0("MY_SUBSYSTEM", "SomeImportantEvent") +-// +-// Events can be INSTANT, or can be pairs of BEGIN and END in the same scope: +-// TRACE_EVENT_BEGIN0("MY_SUBSYSTEM", "SomethingCostly") +-// doSomethingCostly() +-// TRACE_EVENT_END0("MY_SUBSYSTEM", "SomethingCostly") +-// Note: our tools can't always determine the correct BEGIN/END pairs unless +-// these are used in the same scope. Use ASYNC_BEGIN/ASYNC_END macros if you +-// need them to be in separate scopes. +-// +-// A common use case is to trace entire function scopes. This +-// issues a trace BEGIN and END automatically: +-// void doSomethingCostly() { +-// TRACE_EVENT0("MY_SUBSYSTEM", "doSomethingCostly"); +-// ... +-// } +-// +-// Additional parameters can be associated with an event: +-// void doSomethingCostly2(int howMuch) { +-// TRACE_EVENT1("MY_SUBSYSTEM", "doSomethingCostly", +-// "howMuch", howMuch); +-// ... +-// } +-// +-// The trace system will automatically add to this information the +-// current process id, thread id, and a timestamp in microseconds. +-// +-// To trace an asynchronous procedure such as an IPC send/receive, use +-// ASYNC_BEGIN and ASYNC_END: +-// [single threaded sender code] +-// static int send_count = 0; +-// ++send_count; +-// TRACE_EVENT_ASYNC_BEGIN0("ipc", "message", send_count); +-// Send(new MyMessage(send_count)); +-// [receive code] +-// void OnMyMessage(send_count) { +-// TRACE_EVENT_ASYNC_END0("ipc", "message", send_count); +-// } +-// The third parameter is a unique ID to match ASYNC_BEGIN/ASYNC_END pairs. +-// ASYNC_BEGIN and ASYNC_END can occur on any thread of any traced process. +-// Pointers can be used for the ID parameter, and they will be mangled +-// internally so that the same pointer on two different processes will not +-// match. For example: +-// class MyTracedClass { +-// public: +-// MyTracedClass() { +-// TRACE_EVENT_ASYNC_BEGIN0("category", "MyTracedClass", this); +-// } +-// ~MyTracedClass() { +-// TRACE_EVENT_ASYNC_END0("category", "MyTracedClass", this); +-// } +-// } +-// +-// Trace event also supports counters, which is a way to track a quantity +-// as it varies over time. Counters are created with the following macro: +-// TRACE_COUNTER1("MY_SUBSYSTEM", "myCounter", g_myCounterValue); +-// +-// Counters are process-specific. The macro itself can be issued from any +-// thread, however. +-// +-// Sometimes, you want to track two counters at once. You can do this with two +-// counter macros: +-// TRACE_COUNTER1("MY_SUBSYSTEM", "myCounter0", g_myCounterValue[0]); +-// TRACE_COUNTER1("MY_SUBSYSTEM", "myCounter1", g_myCounterValue[1]); +-// Or you can do it with a combined macro: +-// TRACE_COUNTER2("MY_SUBSYSTEM", "myCounter", +-// "bytesPinned", g_myCounterValue[0], +-// "bytesAllocated", g_myCounterValue[1]); +-// This indicates to the tracing UI that these counters should be displayed +-// in a single graph, as a summed area chart. +-// +-// Since counters are in a global namespace, you may want to disembiguate with a +-// unique ID, by using the TRACE_COUNTER_ID* variations. +-// +-// By default, trace collection is compiled in, but turned off at runtime. +-// Collecting trace data is the responsibility of the embedding +-// application. In Chrome's case, navigating to about:tracing will turn on +-// tracing and display data collected across all active processes. +-// +-// +-// Memory scoping note: +-// Tracing copies the pointers, not the string content, of the strings passed +-// in for category, name, and arg_names. Thus, the following code will +-// cause problems: +-// char* str = strdup("impprtantName"); +-// TRACE_EVENT_INSTANT0("SUBSYSTEM", str); // BAD! +-// free(str); // Trace system now has dangling pointer +-// +-// To avoid this issue with the `name` and `arg_name` parameters, use the +-// TRACE_EVENT_COPY_XXX overloads of the macros at additional runtime overhead. +-// Notes: The category must always be in a long-lived char* (i.e. static const). +-// The `arg_values`, when used, are always deep copied with the _COPY +-// macros. +-// +-// When are string argument values copied: +-// const char* arg_values are only referenced by default: +-// TRACE_EVENT1("category", "name", +-// "arg1", "literal string is only referenced"); +-// Use TRACE_STR_COPY to force copying of a const char*: +-// TRACE_EVENT1("category", "name", +-// "arg1", TRACE_STR_COPY("string will be copied")); +-// std::string arg_values are always copied: +-// TRACE_EVENT1("category", "name", +-// "arg1", std::string("string will be copied")); +-// +-// +-// Thread Safety: +-// Thread safety is provided by methods defined in event_tracer.h. See the file +-// for details. +- +-// By default, const char* argument values are assumed to have long-lived scope +-// and will not be copied. Use this macro to force a const char* to be copied. +-#define TRACE_STR_COPY(str) \ +- webrtc::trace_event_internal::TraceStringWithCopy(str) +- +-// This will mark the trace event as disabled by default. The user will need +-// to explicitly enable the event. +-#define TRACE_DISABLED_BY_DEFAULT(name) "disabled-by-default-" name +- +-// By default, uint64 ID argument values are not mangled with the Process ID in +-// TRACE_EVENT_ASYNC macros. Use this macro to force Process ID mangling. +-#define TRACE_ID_MANGLE(id) \ +- webrtc::trace_event_internal::TraceID::ForceMangle(id) +- +-// Records a pair of begin and end events called "name" for the current +-// scope, with 0, 1 or 2 associated arguments. If the category is not +-// enabled, then this does nothing. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-#define TRACE_EVENT0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name) +-#define TRACE_EVENT1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, arg1_name, arg1_val) +-#define TRACE_EVENT2(category, name, arg1_name, arg1_val, arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +- +-// Records a single event called "name" immediately, with 0, 1 or 2 +-// associated arguments. If the category is not enabled, then this +-// does nothing. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-#define TRACE_EVENT_INSTANT0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +- TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_INSTANT1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) +-#define TRACE_EVENT_INSTANT2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_INSTANT0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +- TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_INSTANT1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) +-#define TRACE_EVENT_COPY_INSTANT2(category, name, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +- +-// Records a single BEGIN event called "name" immediately, with 0, 1 or 2 +-// associated arguments. If the category is not enabled, then this +-// does nothing. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-#define TRACE_EVENT_BEGIN0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ +- TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_BEGIN1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ +- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) +-#define TRACE_EVENT_BEGIN2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ +- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_BEGIN0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ +- TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_BEGIN1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ +- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) +-#define TRACE_EVENT_COPY_BEGIN2(category, name, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ +- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +- +-// Records a single END event for "name" immediately. If the category +-// is not enabled, then this does nothing. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-#define TRACE_EVENT_END0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ +- TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_END1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ +- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) +-#define TRACE_EVENT_END2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ +- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_END0(category, name) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ +- TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_END1(category, name, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ +- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) +-#define TRACE_EVENT_COPY_END2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ +- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ +- arg2_name, arg2_val) +- +-// Records the value of a counter called "name" immediately. Value +-// must be representable as a 32 bit integer. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-#define TRACE_COUNTER1(category, name, value) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- TRACE_EVENT_FLAG_NONE, "value", \ +- static_cast(value)) +-#define TRACE_COPY_COUNTER1(category, name, value) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- TRACE_EVENT_FLAG_COPY, "value", \ +- static_cast(value)) +- +-// Records the values of a multi-parted counter called "name" immediately. +-// The UI will treat value1 and value2 as parts of a whole, displaying their +-// values as a stacked-bar chart. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-#define TRACE_COUNTER2(category, name, value1_name, value1_val, value2_name, \ +- value2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- TRACE_EVENT_FLAG_NONE, value1_name, \ +- static_cast(value1_val), value2_name, \ +- static_cast(value2_val)) +-#define TRACE_COPY_COUNTER2(category, name, value1_name, value1_val, \ +- value2_name, value2_val) \ +- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- TRACE_EVENT_FLAG_COPY, value1_name, \ +- static_cast(value1_val), value2_name, \ +- static_cast(value2_val)) +- +-// Records the value of a counter called "name" immediately. Value +-// must be representable as a 32 bit integer. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-// - `id` is used to disambiguate counters with the same name. It must either +-// be a pointer or an integer value up to 64 bits. If it's a pointer, the bits +-// will be xored with a hash of the process ID so that the same pointer on +-// two different processes will not collide. +-#define TRACE_COUNTER_ID1(category, name, id, value) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- id, TRACE_EVENT_FLAG_NONE, "value", \ +- static_cast(value)) +-#define TRACE_COPY_COUNTER_ID1(category, name, id, value) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- id, TRACE_EVENT_FLAG_COPY, "value", \ +- static_cast(value)) +- +-// Records the values of a multi-parted counter called "name" immediately. +-// The UI will treat value1 and value2 as parts of a whole, displaying their +-// values as a stacked-bar chart. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-// - `id` is used to disambiguate counters with the same name. It must either +-// be a pointer or an integer value up to 64 bits. If it's a pointer, the bits +-// will be xored with a hash of the process ID so that the same pointer on +-// two different processes will not collide. +-#define TRACE_COUNTER_ID2(category, name, id, value1_name, value1_val, \ +- value2_name, value2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- id, TRACE_EVENT_FLAG_NONE, value1_name, \ +- static_cast(value1_val), value2_name, \ +- static_cast(value2_val)) +-#define TRACE_COPY_COUNTER_ID2(category, name, id, value1_name, value1_val, \ +- value2_name, value2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ +- id, TRACE_EVENT_FLAG_COPY, value1_name, \ +- static_cast(value1_val), value2_name, \ +- static_cast(value2_val)) +- +-// Records a single ASYNC_BEGIN event called "name" immediately, with 0, 1 or 2 +-// associated arguments. If the category is not enabled, then this +-// does nothing. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-// - `id` is used to match the ASYNC_BEGIN event with the ASYNC_END event. ASYNC +-// events are considered to match if their category, name and id values all +-// match. `id` must either be a pointer or an integer value up to 64 bits. If +-// it's a pointer, the bits will be xored with a hash of the process ID so +-// that the same pointer on two different processes will not collide. +-// An asynchronous operation can consist of multiple phases. The first phase is +-// defined by the ASYNC_BEGIN calls. Additional phases can be defined using the +-// ASYNC_STEP macros. When the operation completes, call ASYNC_END. +-// An ASYNC trace typically occur on a single thread (if not, they will only be +-// drawn on the thread defined in the ASYNC_BEGIN event), but all events in that +-// operation must use the same `name` and `id`. Each event can have its own +-// args. +-#define TRACE_EVENT_ASYNC_BEGIN0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_ASYNC_BEGIN0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_COPY_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +- +-// Records a single ASYNC_STEP event for `step` immediately. If the category +-// is not enabled, then this does nothing. The `name` and `id` must match the +-// ASYNC_BEGIN event above. The `step` param identifies this step within the +-// async event. This should be called at the beginning of the next phase of an +-// asynchronous operation. +-#define TRACE_EVENT_ASYNC_STEP0(category, name, id, step) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, "step", \ +- step) +-#define TRACE_EVENT_ASYNC_STEP1(category, name, id, step, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, "step", \ +- step, arg1_name, arg1_val) +-#define TRACE_EVENT_COPY_ASYNC_STEP0(category, name, id, step) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, "step", \ +- step) +-#define TRACE_EVENT_COPY_ASYNC_STEP1(category, name, id, step, arg1_name, \ +- arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, "step", \ +- step, arg1_name, arg1_val) +- +-// Records a single ASYNC_END event for "name" immediately. If the category +-// is not enabled, then this does nothing. +-#define TRACE_EVENT_ASYNC_END0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ +- name, id, TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_ASYNC_END0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ +- name, id, TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_COPY_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +- +-// Records a single FLOW_BEGIN event called "name" immediately, with 0, 1 or 2 +-// associated arguments. If the category is not enabled, then this +-// does nothing. +-// - category and name strings must have application lifetime (statics or +-// literals). They may not include " chars. +-// - `id` is used to match the FLOW_BEGIN event with the FLOW_END event. FLOW +-// events are considered to match if their category, name and id values all +-// match. `id` must either be a pointer or an integer value up to 64 bits. If +-// it's a pointer, the bits will be xored with a hash of the process ID so +-// that the same pointer on two different processes will not collide. +-// FLOW events are different from ASYNC events in how they are drawn by the +-// tracing UI. A FLOW defines asynchronous data flow, such as posting a task +-// (FLOW_BEGIN) and later executing that task (FLOW_END). Expect FLOWs to be +-// drawn as lines or arrows from FLOW_BEGIN scopes to FLOW_END scopes. Similar +-// to ASYNC, a FLOW can consist of multiple phases. The first phase is defined +-// by the FLOW_BEGIN calls. Additional phases can be defined using the FLOW_STEP +-// macros. When the operation completes, call FLOW_END. An async operation can +-// span threads and processes, but all events in that operation must use the +-// same `name` and `id`. Each event can have its own args. +-#define TRACE_EVENT_FLOW_BEGIN0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_FLOW_BEGIN0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_COPY_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +- +-// Records a single FLOW_STEP event for `step` immediately. If the category +-// is not enabled, then this does nothing. The `name` and `id` must match the +-// FLOW_BEGIN event above. The `step` param identifies this step within the +-// async event. This should be called at the beginning of the next phase of an +-// asynchronous operation. +-#define TRACE_EVENT_FLOW_STEP0(category, name, id, step) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, "step", \ +- step) +-#define TRACE_EVENT_FLOW_STEP1(category, name, id, step, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_NONE, "step", \ +- step, arg1_name, arg1_val) +-#define TRACE_EVENT_COPY_FLOW_STEP0(category, name, id, step) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, "step", \ +- step) +-#define TRACE_EVENT_COPY_FLOW_STEP1(category, name, id, step, arg1_name, \ +- arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ +- name, id, TRACE_EVENT_FLAG_COPY, "step", \ +- step, arg1_name, arg1_val) +- +-// Records a single FLOW_END event for "name" immediately. If the category +-// is not enabled, then this does nothing. +-#define TRACE_EVENT_FLOW_END0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ +- id, TRACE_EVENT_FLAG_NONE) +-#define TRACE_EVENT_FLOW_END1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ +- id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_FLOW_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ +- id, TRACE_EVENT_FLAG_NONE, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +-#define TRACE_EVENT_COPY_FLOW_END0(category, name, id) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ +- id, TRACE_EVENT_FLAG_COPY) +-#define TRACE_EVENT_COPY_FLOW_END1(category, name, id, arg1_name, arg1_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ +- id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val) +-#define TRACE_EVENT_COPY_FLOW_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ +- id, TRACE_EVENT_FLAG_COPY, arg1_name, \ +- arg1_val, arg2_name, arg2_val) +- +-//////////////////////////////////////////////////////////////////////////////// +-// Implementation specific tracing API definitions. +- +-// Get a pointer to the enabled state of the given trace category. Only +-// long-lived literal strings should be given as the category name. The returned +-// pointer can be held permanently in a local static for example. If the +-// unsigned char is non-zero, tracing is enabled. If tracing is enabled, +-// TRACE_EVENT_API_ADD_TRACE_EVENT can be called. It's OK if tracing is disabled +-// between the load of the tracing state and the call to +-// TRACE_EVENT_API_ADD_TRACE_EVENT, because this flag only provides an early out +-// for best performance when tracing is disabled. +-// const unsigned char* +-// TRACE_EVENT_API_GET_CATEGORY_ENABLED(const char* category_name) +-#define TRACE_EVENT_API_GET_CATEGORY_ENABLED \ +- webrtc::EventTracer::GetCategoryEnabled +- +-// Add a trace event to the platform tracing system. +-// void TRACE_EVENT_API_ADD_TRACE_EVENT( +-// char phase, +-// const unsigned char* category_enabled, +-// const char* name, +-// unsigned long long id, +-// int num_args, +-// const char** arg_names, +-// const unsigned char* arg_types, +-// const unsigned long long* arg_values, +-// unsigned char flags) +-#define TRACE_EVENT_API_ADD_TRACE_EVENT webrtc::EventTracer::AddTraceEvent +- +-//////////////////////////////////////////////////////////////////////////////// +- +-// Implementation detail: trace event macros create temporary variables +-// to keep instrumentation overhead low. These macros give each temporary +-// variable a unique name based on the line number to prevent name collissions. +-#define INTERNAL_TRACE_EVENT_UID3(a, b) trace_event_unique_##a##b +-#define INTERNAL_TRACE_EVENT_UID2(a, b) INTERNAL_TRACE_EVENT_UID3(a, b) +-#define INTERNAL_TRACE_EVENT_UID(name_prefix) \ +- INTERNAL_TRACE_EVENT_UID2(name_prefix, __LINE__) +- +-#if WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS +-#define INTERNAL_TRACE_EVENT_INFO_TYPE const unsigned char* +-#else +-#define INTERNAL_TRACE_EVENT_INFO_TYPE static const unsigned char* +-#endif // WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS +- +-// Implementation detail: internal macro to create static category. +-#define INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category) \ +- INTERNAL_TRACE_EVENT_INFO_TYPE INTERNAL_TRACE_EVENT_UID(catstatic) = \ +- TRACE_EVENT_API_GET_CATEGORY_ENABLED(category); +- +-// Implementation detail: internal macro to create static category and add +-// event if the category is enabled. +-#define INTERNAL_TRACE_EVENT_ADD(phase, category, name, flags, ...) \ +- do { \ +- INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category); \ +- if (*INTERNAL_TRACE_EVENT_UID(catstatic)) { \ +- webrtc::trace_event_internal::AddTraceEvent( \ +- phase, INTERNAL_TRACE_EVENT_UID(catstatic), name, \ +- webrtc::trace_event_internal::kNoEventId, flags, ##__VA_ARGS__); \ +- } \ +- } while (0) +- +-// Implementation detail: internal macro to create static category and add begin +-// event if the category is enabled. Also adds the end event when the scope +-// ends. +-#define INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, ...) \ +- INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category); \ +- webrtc::trace_event_internal::TraceEndOnScopeClose INTERNAL_TRACE_EVENT_UID( \ +- profileScope); \ +- if (*INTERNAL_TRACE_EVENT_UID(catstatic)) { \ +- webrtc::trace_event_internal::AddTraceEvent( \ +- TRACE_EVENT_PHASE_BEGIN, INTERNAL_TRACE_EVENT_UID(catstatic), name, \ +- webrtc::trace_event_internal::kNoEventId, TRACE_EVENT_FLAG_NONE, \ +- ##__VA_ARGS__); \ +- INTERNAL_TRACE_EVENT_UID(profileScope) \ +- .Initialize(INTERNAL_TRACE_EVENT_UID(catstatic), name); \ +- } +- +-// Implementation detail: internal macro to create static category and add +-// event if the category is enabled. +-#define INTERNAL_TRACE_EVENT_ADD_WITH_ID(phase, category, name, id, flags, \ +- ...) \ +- do { \ +- INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category); \ +- if (*INTERNAL_TRACE_EVENT_UID(catstatic)) { \ +- unsigned char trace_event_flags = flags | TRACE_EVENT_FLAG_HAS_ID; \ +- webrtc::trace_event_internal::TraceID trace_event_trace_id( \ +- id, &trace_event_flags); \ +- webrtc::trace_event_internal::AddTraceEvent( \ +- phase, INTERNAL_TRACE_EVENT_UID(catstatic), name, \ +- trace_event_trace_id.data(), trace_event_flags, ##__VA_ARGS__); \ +- } \ +- } while (0) +- +-// Notes regarding the following definitions: +-// New values can be added and propagated to third party libraries, but existing +-// definitions must never be changed, because third party libraries may use old +-// definitions. +- +-// Phase indicates the nature of an event entry. E.g. part of a begin/end pair. +-#define TRACE_EVENT_PHASE_BEGIN ('B') +-#define TRACE_EVENT_PHASE_END ('E') +-#define TRACE_EVENT_PHASE_INSTANT ('I') +-#define TRACE_EVENT_PHASE_ASYNC_BEGIN ('S') +-#define TRACE_EVENT_PHASE_ASYNC_STEP ('T') +-#define TRACE_EVENT_PHASE_ASYNC_END ('F') +-#define TRACE_EVENT_PHASE_FLOW_BEGIN ('s') +-#define TRACE_EVENT_PHASE_FLOW_STEP ('t') +-#define TRACE_EVENT_PHASE_FLOW_END ('f') +-#define TRACE_EVENT_PHASE_METADATA ('M') +-#define TRACE_EVENT_PHASE_COUNTER ('C') +- +-// Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT. +-#define TRACE_EVENT_FLAG_NONE (static_cast(0)) +-#define TRACE_EVENT_FLAG_COPY (static_cast(1 << 0)) +-#define TRACE_EVENT_FLAG_HAS_ID (static_cast(1 << 1)) +-#define TRACE_EVENT_FLAG_MANGLE_ID (static_cast(1 << 2)) +- +-namespace webrtc { +-namespace trace_event_internal { +- +-// Specify these values when the corresponding argument of AddTraceEvent is not +-// used. +-const int kZeroNumArgs = 0; +-const unsigned long long kNoEventId = 0; +- +-// TraceID encapsulates an ID that can either be an integer or pointer. Pointers +-// are mangled with the Process ID so that they are unlikely to collide when the +-// same pointer is used on different processes. +-class TraceID { +- public: +- class ForceMangle { +- public: +- explicit ForceMangle(unsigned long long id) : data_(id) {} +- explicit ForceMangle(unsigned long id) : data_(id) {} +- explicit ForceMangle(unsigned int id) : data_(id) {} +- explicit ForceMangle(unsigned short id) : data_(id) {} +- explicit ForceMangle(unsigned char id) : data_(id) {} +- explicit ForceMangle(long long id) +- : data_(static_cast(id)) {} +- explicit ForceMangle(long id) +- : data_(static_cast(id)) {} +- explicit ForceMangle(int id) : data_(static_cast(id)) {} +- explicit ForceMangle(short id) +- : data_(static_cast(id)) {} +- explicit ForceMangle(signed char id) +- : data_(static_cast(id)) {} +- +- unsigned long long data() const { return data_; } +- +- private: +- unsigned long long data_; +- }; +- +- explicit TraceID(const void* id, unsigned char* flags) +- : data_( +- static_cast(reinterpret_cast(id))) { +- *flags |= TRACE_EVENT_FLAG_MANGLE_ID; +- } +- explicit TraceID(ForceMangle id, unsigned char* flags) : data_(id.data()) { +- *flags |= TRACE_EVENT_FLAG_MANGLE_ID; +- } +- explicit TraceID(unsigned long long id, unsigned char* flags) : data_(id) { +- (void)flags; +- } +- explicit TraceID(unsigned long id, unsigned char* flags) : data_(id) { +- (void)flags; +- } +- explicit TraceID(unsigned int id, unsigned char* flags) : data_(id) { +- (void)flags; +- } +- explicit TraceID(unsigned short id, unsigned char* flags) : data_(id) { +- (void)flags; +- } +- explicit TraceID(unsigned char id, unsigned char* flags) : data_(id) { +- (void)flags; +- } +- explicit TraceID(long long id, unsigned char* flags) +- : data_(static_cast(id)) { +- (void)flags; +- } +- explicit TraceID(long id, unsigned char* flags) +- : data_(static_cast(id)) { +- (void)flags; +- } +- explicit TraceID(int id, unsigned char* flags) +- : data_(static_cast(id)) { +- (void)flags; +- } +- explicit TraceID(short id, unsigned char* flags) +- : data_(static_cast(id)) { +- (void)flags; +- } +- explicit TraceID(signed char id, unsigned char* flags) +- : data_(static_cast(id)) { +- (void)flags; +- } +- +- unsigned long long data() const { return data_; } +- +- private: +- unsigned long long data_; +-}; +- +-// Simple union to store various types as unsigned long long. +-union TraceValueUnion { +- bool as_bool; +- unsigned long long as_uint; +- long long as_int; +- double as_double; +- const void* as_pointer; +- const char* as_string; +-}; +- +-// Simple container for const char* that should be copied instead of retained. +-class TraceStringWithCopy { +- public: +- explicit TraceStringWithCopy(const char* str) : str_(str) {} +- operator const char*() const { return str_; } +- +- private: +- const char* str_; +-}; +- +-// Define SetTraceValue for each allowed type. It stores the type and +-// value in the return arguments. This allows this API to avoid declaring any +-// structures so that it is portable to third_party libraries. +-#define INTERNAL_DECLARE_SET_TRACE_VALUE(actual_type, union_member, \ +- value_type_id) \ +- static inline void SetTraceValue(actual_type arg, unsigned char* type, \ +- unsigned long long* value) { \ +- TraceValueUnion type_value; \ +- type_value.union_member = arg; \ +- *type = value_type_id; \ +- *value = type_value.as_uint; \ +- } +-// Simpler form for int types that can be safely casted. +-#define INTERNAL_DECLARE_SET_TRACE_VALUE_INT(actual_type, value_type_id) \ +- static inline void SetTraceValue(actual_type arg, unsigned char* type, \ +- unsigned long long* value) { \ +- *type = value_type_id; \ +- *value = static_cast(arg); \ +- } +- +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned long long, TRACE_VALUE_TYPE_UINT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned long, TRACE_VALUE_TYPE_UINT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned int, TRACE_VALUE_TYPE_UINT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned short, TRACE_VALUE_TYPE_UINT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned char, TRACE_VALUE_TYPE_UINT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(long long, TRACE_VALUE_TYPE_INT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(long, TRACE_VALUE_TYPE_INT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(int, TRACE_VALUE_TYPE_INT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(short, TRACE_VALUE_TYPE_INT) +-INTERNAL_DECLARE_SET_TRACE_VALUE_INT(signed char, TRACE_VALUE_TYPE_INT) +-INTERNAL_DECLARE_SET_TRACE_VALUE(bool, as_bool, TRACE_VALUE_TYPE_BOOL) +-INTERNAL_DECLARE_SET_TRACE_VALUE(double, as_double, TRACE_VALUE_TYPE_DOUBLE) +-INTERNAL_DECLARE_SET_TRACE_VALUE(const void*, +- as_pointer, +- TRACE_VALUE_TYPE_POINTER) +-INTERNAL_DECLARE_SET_TRACE_VALUE(const char*, +- as_string, +- TRACE_VALUE_TYPE_STRING) +-INTERNAL_DECLARE_SET_TRACE_VALUE(const TraceStringWithCopy&, +- as_string, +- TRACE_VALUE_TYPE_COPY_STRING) +- +-#undef INTERNAL_DECLARE_SET_TRACE_VALUE +-#undef INTERNAL_DECLARE_SET_TRACE_VALUE_INT +- +-// std::string version of SetTraceValue so that trace arguments can be strings. +-static inline void SetTraceValue(const std::string& arg, +- unsigned char* type, +- unsigned long long* value) { +- TraceValueUnion type_value; +- type_value.as_string = arg.c_str(); +- *type = TRACE_VALUE_TYPE_COPY_STRING; +- *value = type_value.as_uint; +-} +- +-// These AddTraceEvent template functions are defined here instead of in the +-// macro, because the arg_values could be temporary objects, such as +-// std::string. In order to store pointers to the internal c_str and pass +-// through to the tracing API, the arg_values must live throughout +-// these procedures. +- +-static inline void AddTraceEvent(char phase, +- const unsigned char* category_enabled, +- const char* name, +- unsigned long long id, +- unsigned char flags) { +- TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_enabled, name, id, +- kZeroNumArgs, nullptr, nullptr, nullptr, +- flags); +-} +- +-template +-static inline void AddTraceEvent(char phase, +- const unsigned char* category_enabled, +- const char* name, +- unsigned long long id, +- unsigned char flags, +- const char* arg1_name, +- const ARG1_TYPE& arg1_val) { +- const int num_args = 1; +- unsigned char arg_types[1]; +- unsigned long long arg_values[1]; +- SetTraceValue(arg1_val, &arg_types[0], &arg_values[0]); +- TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_enabled, name, id, num_args, +- &arg1_name, arg_types, arg_values, flags); +-} +- +-template +-static inline void AddTraceEvent(char phase, +- const unsigned char* category_enabled, +- const char* name, +- unsigned long long id, +- unsigned char flags, +- const char* arg1_name, +- const ARG1_TYPE& arg1_val, +- const char* arg2_name, +- const ARG2_TYPE& arg2_val) { +- const int num_args = 2; +- const char* arg_names[2] = {arg1_name, arg2_name}; +- unsigned char arg_types[2]; +- unsigned long long arg_values[2]; +- SetTraceValue(arg1_val, &arg_types[0], &arg_values[0]); +- SetTraceValue(arg2_val, &arg_types[1], &arg_values[1]); +- TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_enabled, name, id, num_args, +- arg_names, arg_types, arg_values, flags); +-} +- +-// Used by TRACE_EVENTx macro. Do not use directly. +-class TraceEndOnScopeClose { +- public: +- // Note: members of data_ intentionally left uninitialized. See Initialize. +- TraceEndOnScopeClose() : p_data_(nullptr) {} +- ~TraceEndOnScopeClose() { +- if (p_data_) +- AddEventIfEnabled(); +- } +- +- void Initialize(const unsigned char* category_enabled, const char* name) { +- data_.category_enabled = category_enabled; +- data_.name = name; +- p_data_ = &data_; +- } +- +- private: +- // Add the end event if the category is still enabled. +- void AddEventIfEnabled() { +- // Only called when p_data_ is non-null. +- if (*p_data_->category_enabled) { +- TRACE_EVENT_API_ADD_TRACE_EVENT(TRACE_EVENT_PHASE_END, +- p_data_->category_enabled, p_data_->name, +- kNoEventId, kZeroNumArgs, nullptr, +- nullptr, nullptr, TRACE_EVENT_FLAG_NONE); +- } +- } +- +- // This Data struct workaround is to avoid initializing all the members +- // in Data during construction of this object, since this object is always +- // constructed, even when tracing is disabled. If the members of Data were +- // members of this class instead, compiler warnings occur about potential +- // uninitialized accesses. +- struct Data { +- const unsigned char* category_enabled; +- const char* name; +- }; +- Data* p_data_; +- Data data_; +-}; +- +-} // namespace trace_event_internal +-} // namespace webrtc +-#else +- +-//////////////////////////////////////////////////////////////////////////////// +-// This section defines no-op alternatives to the tracing macros when +-// RTC_DISABLE_TRACE_EVENTS is defined. +- +-#define RTC_NOOP() \ +- do { \ +- } while (0) +- +-#define TRACE_STR_COPY(str) RTC_NOOP() +- +-#define TRACE_DISABLED_BY_DEFAULT(name) "disabled-by-default-" name +- +-#define TRACE_ID_MANGLE(id) 0 +- +-#define TRACE_EVENT0(category, name) RTC_NOOP() +-#define TRACE_EVENT1(category, name, arg1_name, arg1_val) RTC_NOOP() +-#define TRACE_EVENT2(category, name, arg1_name, arg1_val, arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_INSTANT0(category, name) RTC_NOOP() +-#define TRACE_EVENT_INSTANT1(category, name, arg1_name, arg1_val) RTC_NOOP() +- +-#define TRACE_EVENT_INSTANT2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_COPY_INSTANT0(category, name) RTC_NOOP() +-#define TRACE_EVENT_COPY_INSTANT1(category, name, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_INSTANT2(category, name, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_BEGIN0(category, name) RTC_NOOP() +-#define TRACE_EVENT_BEGIN1(category, name, arg1_name, arg1_val) RTC_NOOP() +-#define TRACE_EVENT_BEGIN2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_BEGIN0(category, name) RTC_NOOP() +-#define TRACE_EVENT_COPY_BEGIN1(category, name, arg1_name, arg1_val) RTC_NOOP() +-#define TRACE_EVENT_COPY_BEGIN2(category, name, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_END0(category, name) RTC_NOOP() +-#define TRACE_EVENT_END1(category, name, arg1_name, arg1_val) RTC_NOOP() +-#define TRACE_EVENT_END2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_END0(category, name) RTC_NOOP() +-#define TRACE_EVENT_COPY_END1(category, name, arg1_name, arg1_val) RTC_NOOP() +-#define TRACE_EVENT_COPY_END2(category, name, arg1_name, arg1_val, arg2_name, \ +- arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_COUNTER1(category, name, value) RTC_NOOP() +-#define TRACE_COPY_COUNTER1(category, name, value) RTC_NOOP() +- +-#define TRACE_COUNTER2(category, name, value1_name, value1_val, value2_name, \ +- value2_val) \ +- RTC_NOOP() +-#define TRACE_COPY_COUNTER2(category, name, value1_name, value1_val, \ +- value2_name, value2_val) \ +- RTC_NOOP() +- +-#define TRACE_COUNTER_ID1(category, name, id, value) RTC_NOOP() +-#define TRACE_COPY_COUNTER_ID1(category, name, id, value) RTC_NOOP() +- +-#define TRACE_COUNTER_ID2(category, name, id, value1_name, value1_val, \ +- value2_name, value2_val) \ +- RTC_NOOP() +-#define TRACE_COPY_COUNTER_ID2(category, name, id, value1_name, value1_val, \ +- value2_name, value2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_ASYNC_BEGIN0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_BEGIN0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_ASYNC_STEP0(category, name, id, step) RTC_NOOP() +-#define TRACE_EVENT_ASYNC_STEP1(category, name, id, step, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_STEP0(category, name, id, step) RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_STEP1(category, name, id, step, arg1_name, \ +- arg1_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_ASYNC_END0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_END0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_FLOW_BEGIN0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_BEGIN0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_FLOW_STEP0(category, name, id, step) RTC_NOOP() +-#define TRACE_EVENT_FLOW_STEP1(category, name, id, step, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_STEP0(category, name, id, step) RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_STEP1(category, name, id, step, arg1_name, \ +- arg1_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_FLOW_END0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_FLOW_END1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_FLOW_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_END0(category, name, id) RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_END1(category, name, id, arg1_name, arg1_val) \ +- RTC_NOOP() +-#define TRACE_EVENT_COPY_FLOW_END2(category, name, id, arg1_name, arg1_val, \ +- arg2_name, arg2_val) \ +- RTC_NOOP() +- +-#define TRACE_EVENT_API_GET_CATEGORY_ENABLED "" +- +-#define TRACE_EVENT_API_ADD_TRACE_EVENT RTC_NOOP() +- +-#endif // RTC_TRACE_EVENTS_ENABLED +- +-#endif // RTC_BASE_TRACE_EVENT_H_ ++// This header is diverted to a similar header in Gecko, that is defining the ++// same macros, modified to talk to the Gecko Profiler. ++#include "GeckoTraceEvent.h" -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0048.patch b/third_party/libwebrtc/moz-patch-stack/0048.patch index 0214c1e7998b..85bdc9b64523 100644 --- a/third_party/libwebrtc/moz-patch-stack/0048.patch +++ b/third_party/libwebrtc/moz-patch-stack/0048.patch @@ -1,1073 +1,30 @@ -From: Paul Adenot -Date: Fri, 18 Aug 2023 13:13:36 -0500 -Subject: Bug 1714577 - Part 6 - Copy WebRTC's trace_event.h to Gecko, - verbatim, and redirect the original trace_event.h to it. r=gerald +From: Nico Grunbaum +Date: Thu, 28 Oct 2021 18:13:00 +0000 +Subject: Bug 1729367 - P6 - Restore PID recording post cherry-pick;r=mjf -Differential Revision: https://phabricator.services.mozilla.com/D116843 +This restores the code from P0, which was removed to make cherry-picking 439ffe462a66ad9fa9a251b265e4ab28c2647d25 and 449a78b1e20ea85b11f967cf3a184ee610ce21c3 easier. + +Differential Revision: https://phabricator.services.mozilla.com/D129714 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/72a83cb2e571023cd4150bbdef5be5455ce851f4 --- - rtc_base/trace_event.h | 1055 +--------------------------------------- - 1 file changed, 3 insertions(+), 1052 deletions(-) + modules/desktop_capture/win/window_capture_utils.cc | 4 ++++ + 1 file changed, 4 insertions(+) -diff --git a/rtc_base/trace_event.h b/rtc_base/trace_event.h -index 6689bc0c37..b34df0c93f 100644 ---- a/rtc_base/trace_event.h -+++ b/rtc_base/trace_event.h -@@ -1,1052 +1,3 @@ --// Copyright (c) 2012 The Chromium Authors. All rights reserved. --// Use of this source code is governed by a BSD-style license that can be --// found in the LICENSE file under third_party_mods/chromium or at: --// http://src.chromium.org/svn/trunk/src/LICENSE -- --#ifndef RTC_BASE_TRACE_EVENT_H_ --#define RTC_BASE_TRACE_EVENT_H_ -- --#include -- --#include "rtc_base/event_tracer.h" -- --#if defined(TRACE_EVENT0) --#error "Another copy of trace_event.h has already been included." --#endif -- --#if defined(RTC_DISABLE_TRACE_EVENTS) --#define RTC_TRACE_EVENTS_ENABLED 0 --#else --#define RTC_TRACE_EVENTS_ENABLED 1 --#endif -- --// Type values for identifying types in the TraceValue union. --#define TRACE_VALUE_TYPE_BOOL (static_cast(1)) --#define TRACE_VALUE_TYPE_UINT (static_cast(2)) --#define TRACE_VALUE_TYPE_INT (static_cast(3)) --#define TRACE_VALUE_TYPE_DOUBLE (static_cast(4)) --#define TRACE_VALUE_TYPE_POINTER (static_cast(5)) --#define TRACE_VALUE_TYPE_STRING (static_cast(6)) --#define TRACE_VALUE_TYPE_COPY_STRING (static_cast(7)) -- --#if RTC_TRACE_EVENTS_ENABLED -- --// Extracted from Chromium's src/base/debug/trace_event.h. -- --// This header is designed to give you trace_event macros without specifying --// how the events actually get collected and stored. If you need to expose trace --// event to some other universe, you can copy-and-paste this file, --// implement the TRACE_EVENT_API macros, and do any other necessary fixup for --// the target platform. The end result is that multiple libraries can funnel --// events through to a shared trace event collector. -- --// Trace events are for tracking application performance and resource usage. --// Macros are provided to track: --// Begin and end of function calls --// Counters --// --// Events are issued against categories. Whereas RTC_LOG's --// categories are statically defined, TRACE categories are created --// implicitly with a string. For example: --// TRACE_EVENT_INSTANT0("MY_SUBSYSTEM", "SomeImportantEvent") --// --// Events can be INSTANT, or can be pairs of BEGIN and END in the same scope: --// TRACE_EVENT_BEGIN0("MY_SUBSYSTEM", "SomethingCostly") --// doSomethingCostly() --// TRACE_EVENT_END0("MY_SUBSYSTEM", "SomethingCostly") --// Note: our tools can't always determine the correct BEGIN/END pairs unless --// these are used in the same scope. Use ASYNC_BEGIN/ASYNC_END macros if you --// need them to be in separate scopes. --// --// A common use case is to trace entire function scopes. This --// issues a trace BEGIN and END automatically: --// void doSomethingCostly() { --// TRACE_EVENT0("MY_SUBSYSTEM", "doSomethingCostly"); --// ... --// } --// --// Additional parameters can be associated with an event: --// void doSomethingCostly2(int howMuch) { --// TRACE_EVENT1("MY_SUBSYSTEM", "doSomethingCostly", --// "howMuch", howMuch); --// ... --// } --// --// The trace system will automatically add to this information the --// current process id, thread id, and a timestamp in microseconds. --// --// To trace an asynchronous procedure such as an IPC send/receive, use --// ASYNC_BEGIN and ASYNC_END: --// [single threaded sender code] --// static int send_count = 0; --// ++send_count; --// TRACE_EVENT_ASYNC_BEGIN0("ipc", "message", send_count); --// Send(new MyMessage(send_count)); --// [receive code] --// void OnMyMessage(send_count) { --// TRACE_EVENT_ASYNC_END0("ipc", "message", send_count); --// } --// The third parameter is a unique ID to match ASYNC_BEGIN/ASYNC_END pairs. --// ASYNC_BEGIN and ASYNC_END can occur on any thread of any traced process. --// Pointers can be used for the ID parameter, and they will be mangled --// internally so that the same pointer on two different processes will not --// match. For example: --// class MyTracedClass { --// public: --// MyTracedClass() { --// TRACE_EVENT_ASYNC_BEGIN0("category", "MyTracedClass", this); --// } --// ~MyTracedClass() { --// TRACE_EVENT_ASYNC_END0("category", "MyTracedClass", this); --// } --// } --// --// Trace event also supports counters, which is a way to track a quantity --// as it varies over time. Counters are created with the following macro: --// TRACE_COUNTER1("MY_SUBSYSTEM", "myCounter", g_myCounterValue); --// --// Counters are process-specific. The macro itself can be issued from any --// thread, however. --// --// Sometimes, you want to track two counters at once. You can do this with two --// counter macros: --// TRACE_COUNTER1("MY_SUBSYSTEM", "myCounter0", g_myCounterValue[0]); --// TRACE_COUNTER1("MY_SUBSYSTEM", "myCounter1", g_myCounterValue[1]); --// Or you can do it with a combined macro: --// TRACE_COUNTER2("MY_SUBSYSTEM", "myCounter", --// "bytesPinned", g_myCounterValue[0], --// "bytesAllocated", g_myCounterValue[1]); --// This indicates to the tracing UI that these counters should be displayed --// in a single graph, as a summed area chart. --// --// Since counters are in a global namespace, you may want to disembiguate with a --// unique ID, by using the TRACE_COUNTER_ID* variations. --// --// By default, trace collection is compiled in, but turned off at runtime. --// Collecting trace data is the responsibility of the embedding --// application. In Chrome's case, navigating to about:tracing will turn on --// tracing and display data collected across all active processes. --// --// --// Memory scoping note: --// Tracing copies the pointers, not the string content, of the strings passed --// in for category, name, and arg_names. Thus, the following code will --// cause problems: --// char* str = strdup("impprtantName"); --// TRACE_EVENT_INSTANT0("SUBSYSTEM", str); // BAD! --// free(str); // Trace system now has dangling pointer --// --// To avoid this issue with the `name` and `arg_name` parameters, use the --// TRACE_EVENT_COPY_XXX overloads of the macros at additional runtime overhead. --// Notes: The category must always be in a long-lived char* (i.e. static const). --// The `arg_values`, when used, are always deep copied with the _COPY --// macros. --// --// When are string argument values copied: --// const char* arg_values are only referenced by default: --// TRACE_EVENT1("category", "name", --// "arg1", "literal string is only referenced"); --// Use TRACE_STR_COPY to force copying of a const char*: --// TRACE_EVENT1("category", "name", --// "arg1", TRACE_STR_COPY("string will be copied")); --// std::string arg_values are always copied: --// TRACE_EVENT1("category", "name", --// "arg1", std::string("string will be copied")); --// --// --// Thread Safety: --// Thread safety is provided by methods defined in event_tracer.h. See the file --// for details. -- --// By default, const char* argument values are assumed to have long-lived scope --// and will not be copied. Use this macro to force a const char* to be copied. --#define TRACE_STR_COPY(str) \ -- webrtc::trace_event_internal::TraceStringWithCopy(str) -- --// This will mark the trace event as disabled by default. The user will need --// to explicitly enable the event. --#define TRACE_DISABLED_BY_DEFAULT(name) "disabled-by-default-" name -- --// By default, uint64 ID argument values are not mangled with the Process ID in --// TRACE_EVENT_ASYNC macros. Use this macro to force Process ID mangling. --#define TRACE_ID_MANGLE(id) \ -- webrtc::trace_event_internal::TraceID::ForceMangle(id) -- --// Records a pair of begin and end events called "name" for the current --// scope, with 0, 1 or 2 associated arguments. If the category is not --// enabled, then this does nothing. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --#define TRACE_EVENT0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name) --#define TRACE_EVENT1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, arg1_name, arg1_val) --#define TRACE_EVENT2(category, name, arg1_name, arg1_val, arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, arg1_name, arg1_val, \ -- arg2_name, arg2_val) -- --// Records a single event called "name" immediately, with 0, 1 or 2 --// associated arguments. If the category is not enabled, then this --// does nothing. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --#define TRACE_EVENT_INSTANT0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ -- TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_INSTANT1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ -- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) --#define TRACE_EVENT_INSTANT2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ -- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ -- arg2_name, arg2_val) --#define TRACE_EVENT_COPY_INSTANT0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ -- TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_INSTANT1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ -- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) --#define TRACE_EVENT_COPY_INSTANT2(category, name, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ -- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ -- arg2_name, arg2_val) -- --// Records a single BEGIN event called "name" immediately, with 0, 1 or 2 --// associated arguments. If the category is not enabled, then this --// does nothing. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --#define TRACE_EVENT_BEGIN0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ -- TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_BEGIN1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ -- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) --#define TRACE_EVENT_BEGIN2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ -- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ -- arg2_name, arg2_val) --#define TRACE_EVENT_COPY_BEGIN0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ -- TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_BEGIN1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ -- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) --#define TRACE_EVENT_COPY_BEGIN2(category, name, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ -- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ -- arg2_name, arg2_val) -- --// Records a single END event for "name" immediately. If the category --// is not enabled, then this does nothing. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --#define TRACE_EVENT_END0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ -- TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_END1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ -- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) --#define TRACE_EVENT_END2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ -- TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ -- arg2_name, arg2_val) --#define TRACE_EVENT_COPY_END0(category, name) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ -- TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_END1(category, name, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ -- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) --#define TRACE_EVENT_COPY_END2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ -- TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ -- arg2_name, arg2_val) -- --// Records the value of a counter called "name" immediately. Value --// must be representable as a 32 bit integer. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --#define TRACE_COUNTER1(category, name, value) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- TRACE_EVENT_FLAG_NONE, "value", \ -- static_cast(value)) --#define TRACE_COPY_COUNTER1(category, name, value) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- TRACE_EVENT_FLAG_COPY, "value", \ -- static_cast(value)) -- --// Records the values of a multi-parted counter called "name" immediately. --// The UI will treat value1 and value2 as parts of a whole, displaying their --// values as a stacked-bar chart. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --#define TRACE_COUNTER2(category, name, value1_name, value1_val, value2_name, \ -- value2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- TRACE_EVENT_FLAG_NONE, value1_name, \ -- static_cast(value1_val), value2_name, \ -- static_cast(value2_val)) --#define TRACE_COPY_COUNTER2(category, name, value1_name, value1_val, \ -- value2_name, value2_val) \ -- INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- TRACE_EVENT_FLAG_COPY, value1_name, \ -- static_cast(value1_val), value2_name, \ -- static_cast(value2_val)) -- --// Records the value of a counter called "name" immediately. Value --// must be representable as a 32 bit integer. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --// - `id` is used to disambiguate counters with the same name. It must either --// be a pointer or an integer value up to 64 bits. If it's a pointer, the bits --// will be xored with a hash of the process ID so that the same pointer on --// two different processes will not collide. --#define TRACE_COUNTER_ID1(category, name, id, value) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- id, TRACE_EVENT_FLAG_NONE, "value", \ -- static_cast(value)) --#define TRACE_COPY_COUNTER_ID1(category, name, id, value) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- id, TRACE_EVENT_FLAG_COPY, "value", \ -- static_cast(value)) -- --// Records the values of a multi-parted counter called "name" immediately. --// The UI will treat value1 and value2 as parts of a whole, displaying their --// values as a stacked-bar chart. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --// - `id` is used to disambiguate counters with the same name. It must either --// be a pointer or an integer value up to 64 bits. If it's a pointer, the bits --// will be xored with a hash of the process ID so that the same pointer on --// two different processes will not collide. --#define TRACE_COUNTER_ID2(category, name, id, value1_name, value1_val, \ -- value2_name, value2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- id, TRACE_EVENT_FLAG_NONE, value1_name, \ -- static_cast(value1_val), value2_name, \ -- static_cast(value2_val)) --#define TRACE_COPY_COUNTER_ID2(category, name, id, value1_name, value1_val, \ -- value2_name, value2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ -- id, TRACE_EVENT_FLAG_COPY, value1_name, \ -- static_cast(value1_val), value2_name, \ -- static_cast(value2_val)) -- --// Records a single ASYNC_BEGIN event called "name" immediately, with 0, 1 or 2 --// associated arguments. If the category is not enabled, then this --// does nothing. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --// - `id` is used to match the ASYNC_BEGIN event with the ASYNC_END event. ASYNC --// events are considered to match if their category, name and id values all --// match. `id` must either be a pointer or an integer value up to 64 bits. If --// it's a pointer, the bits will be xored with a hash of the process ID so --// that the same pointer on two different processes will not collide. --// An asynchronous operation can consist of multiple phases. The first phase is --// defined by the ASYNC_BEGIN calls. Additional phases can be defined using the --// ASYNC_STEP macros. When the operation completes, call ASYNC_END. --// An ASYNC trace typically occur on a single thread (if not, they will only be --// drawn on the thread defined in the ASYNC_BEGIN event), but all events in that --// operation must use the same `name` and `id`. Each event can have its own --// args. --#define TRACE_EVENT_ASYNC_BEGIN0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val, arg2_name, arg2_val) --#define TRACE_EVENT_COPY_ASYNC_BEGIN0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_COPY_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val, arg2_name, arg2_val) -- --// Records a single ASYNC_STEP event for `step` immediately. If the category --// is not enabled, then this does nothing. The `name` and `id` must match the --// ASYNC_BEGIN event above. The `step` param identifies this step within the --// async event. This should be called at the beginning of the next phase of an --// asynchronous operation. --#define TRACE_EVENT_ASYNC_STEP0(category, name, id, step) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, "step", \ -- step) --#define TRACE_EVENT_ASYNC_STEP1(category, name, id, step, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, "step", \ -- step, arg1_name, arg1_val) --#define TRACE_EVENT_COPY_ASYNC_STEP0(category, name, id, step) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, "step", \ -- step) --#define TRACE_EVENT_COPY_ASYNC_STEP1(category, name, id, step, arg1_name, \ -- arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, "step", \ -- step, arg1_name, arg1_val) -- --// Records a single ASYNC_END event for "name" immediately. If the category --// is not enabled, then this does nothing. --#define TRACE_EVENT_ASYNC_END0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ -- name, id, TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val, arg2_name, arg2_val) --#define TRACE_EVENT_COPY_ASYNC_END0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ -- name, id, TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_COPY_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val, arg2_name, arg2_val) -- --// Records a single FLOW_BEGIN event called "name" immediately, with 0, 1 or 2 --// associated arguments. If the category is not enabled, then this --// does nothing. --// - category and name strings must have application lifetime (statics or --// literals). They may not include " chars. --// - `id` is used to match the FLOW_BEGIN event with the FLOW_END event. FLOW --// events are considered to match if their category, name and id values all --// match. `id` must either be a pointer or an integer value up to 64 bits. If --// it's a pointer, the bits will be xored with a hash of the process ID so --// that the same pointer on two different processes will not collide. --// FLOW events are different from ASYNC events in how they are drawn by the --// tracing UI. A FLOW defines asynchronous data flow, such as posting a task --// (FLOW_BEGIN) and later executing that task (FLOW_END). Expect FLOWs to be --// drawn as lines or arrows from FLOW_BEGIN scopes to FLOW_END scopes. Similar --// to ASYNC, a FLOW can consist of multiple phases. The first phase is defined --// by the FLOW_BEGIN calls. Additional phases can be defined using the FLOW_STEP --// macros. When the operation completes, call FLOW_END. An async operation can --// span threads and processes, but all events in that operation must use the --// same `name` and `id`. Each event can have its own args. --#define TRACE_EVENT_FLOW_BEGIN0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val, arg2_name, arg2_val) --#define TRACE_EVENT_COPY_FLOW_BEGIN0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_COPY_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val, arg2_name, arg2_val) -- --// Records a single FLOW_STEP event for `step` immediately. If the category --// is not enabled, then this does nothing. The `name` and `id` must match the --// FLOW_BEGIN event above. The `step` param identifies this step within the --// async event. This should be called at the beginning of the next phase of an --// asynchronous operation. --#define TRACE_EVENT_FLOW_STEP0(category, name, id, step) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, "step", \ -- step) --#define TRACE_EVENT_FLOW_STEP1(category, name, id, step, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_NONE, "step", \ -- step, arg1_name, arg1_val) --#define TRACE_EVENT_COPY_FLOW_STEP0(category, name, id, step) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, "step", \ -- step) --#define TRACE_EVENT_COPY_FLOW_STEP1(category, name, id, step, arg1_name, \ -- arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ -- name, id, TRACE_EVENT_FLAG_COPY, "step", \ -- step, arg1_name, arg1_val) -- --// Records a single FLOW_END event for "name" immediately. If the category --// is not enabled, then this does nothing. --#define TRACE_EVENT_FLOW_END0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ -- id, TRACE_EVENT_FLAG_NONE) --#define TRACE_EVENT_FLOW_END1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ -- id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_FLOW_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ -- id, TRACE_EVENT_FLAG_NONE, arg1_name, \ -- arg1_val, arg2_name, arg2_val) --#define TRACE_EVENT_COPY_FLOW_END0(category, name, id) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ -- id, TRACE_EVENT_FLAG_COPY) --#define TRACE_EVENT_COPY_FLOW_END1(category, name, id, arg1_name, arg1_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ -- id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val) --#define TRACE_EVENT_COPY_FLOW_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ -- id, TRACE_EVENT_FLAG_COPY, arg1_name, \ -- arg1_val, arg2_name, arg2_val) -- --//////////////////////////////////////////////////////////////////////////////// --// Implementation specific tracing API definitions. -- --// Get a pointer to the enabled state of the given trace category. Only --// long-lived literal strings should be given as the category name. The returned --// pointer can be held permanently in a local static for example. If the --// unsigned char is non-zero, tracing is enabled. If tracing is enabled, --// TRACE_EVENT_API_ADD_TRACE_EVENT can be called. It's OK if tracing is disabled --// between the load of the tracing state and the call to --// TRACE_EVENT_API_ADD_TRACE_EVENT, because this flag only provides an early out --// for best performance when tracing is disabled. --// const unsigned char* --// TRACE_EVENT_API_GET_CATEGORY_ENABLED(const char* category_name) --#define TRACE_EVENT_API_GET_CATEGORY_ENABLED \ -- webrtc::EventTracer::GetCategoryEnabled -- --// Add a trace event to the platform tracing system. --// void TRACE_EVENT_API_ADD_TRACE_EVENT( --// char phase, --// const unsigned char* category_enabled, --// const char* name, --// unsigned long long id, --// int num_args, --// const char** arg_names, --// const unsigned char* arg_types, --// const unsigned long long* arg_values, --// unsigned char flags) --#define TRACE_EVENT_API_ADD_TRACE_EVENT webrtc::EventTracer::AddTraceEvent -- --//////////////////////////////////////////////////////////////////////////////// -- --// Implementation detail: trace event macros create temporary variables --// to keep instrumentation overhead low. These macros give each temporary --// variable a unique name based on the line number to prevent name collissions. --#define INTERNAL_TRACE_EVENT_UID3(a, b) trace_event_unique_##a##b --#define INTERNAL_TRACE_EVENT_UID2(a, b) INTERNAL_TRACE_EVENT_UID3(a, b) --#define INTERNAL_TRACE_EVENT_UID(name_prefix) \ -- INTERNAL_TRACE_EVENT_UID2(name_prefix, __LINE__) -- --#if WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS --#define INTERNAL_TRACE_EVENT_INFO_TYPE const unsigned char* --#else --#define INTERNAL_TRACE_EVENT_INFO_TYPE static const unsigned char* --#endif // WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS -- --// Implementation detail: internal macro to create static category. --#define INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category) \ -- INTERNAL_TRACE_EVENT_INFO_TYPE INTERNAL_TRACE_EVENT_UID(catstatic) = \ -- TRACE_EVENT_API_GET_CATEGORY_ENABLED(category); -- --// Implementation detail: internal macro to create static category and add --// event if the category is enabled. --#define INTERNAL_TRACE_EVENT_ADD(phase, category, name, flags, ...) \ -- do { \ -- INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category); \ -- if (*INTERNAL_TRACE_EVENT_UID(catstatic)) { \ -- webrtc::trace_event_internal::AddTraceEvent( \ -- phase, INTERNAL_TRACE_EVENT_UID(catstatic), name, \ -- webrtc::trace_event_internal::kNoEventId, flags, ##__VA_ARGS__); \ -- } \ -- } while (0) -- --// Implementation detail: internal macro to create static category and add begin --// event if the category is enabled. Also adds the end event when the scope --// ends. --#define INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, ...) \ -- INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category); \ -- webrtc::trace_event_internal::TraceEndOnScopeClose INTERNAL_TRACE_EVENT_UID( \ -- profileScope); \ -- if (*INTERNAL_TRACE_EVENT_UID(catstatic)) { \ -- webrtc::trace_event_internal::AddTraceEvent( \ -- TRACE_EVENT_PHASE_BEGIN, INTERNAL_TRACE_EVENT_UID(catstatic), name, \ -- webrtc::trace_event_internal::kNoEventId, TRACE_EVENT_FLAG_NONE, \ -- ##__VA_ARGS__); \ -- INTERNAL_TRACE_EVENT_UID(profileScope) \ -- .Initialize(INTERNAL_TRACE_EVENT_UID(catstatic), name); \ -- } -- --// Implementation detail: internal macro to create static category and add --// event if the category is enabled. --#define INTERNAL_TRACE_EVENT_ADD_WITH_ID(phase, category, name, id, flags, \ -- ...) \ -- do { \ -- INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category); \ -- if (*INTERNAL_TRACE_EVENT_UID(catstatic)) { \ -- unsigned char trace_event_flags = flags | TRACE_EVENT_FLAG_HAS_ID; \ -- webrtc::trace_event_internal::TraceID trace_event_trace_id( \ -- id, &trace_event_flags); \ -- webrtc::trace_event_internal::AddTraceEvent( \ -- phase, INTERNAL_TRACE_EVENT_UID(catstatic), name, \ -- trace_event_trace_id.data(), trace_event_flags, ##__VA_ARGS__); \ -- } \ -- } while (0) -- --// Notes regarding the following definitions: --// New values can be added and propagated to third party libraries, but existing --// definitions must never be changed, because third party libraries may use old --// definitions. -- --// Phase indicates the nature of an event entry. E.g. part of a begin/end pair. --#define TRACE_EVENT_PHASE_BEGIN ('B') --#define TRACE_EVENT_PHASE_END ('E') --#define TRACE_EVENT_PHASE_INSTANT ('I') --#define TRACE_EVENT_PHASE_ASYNC_BEGIN ('S') --#define TRACE_EVENT_PHASE_ASYNC_STEP ('T') --#define TRACE_EVENT_PHASE_ASYNC_END ('F') --#define TRACE_EVENT_PHASE_FLOW_BEGIN ('s') --#define TRACE_EVENT_PHASE_FLOW_STEP ('t') --#define TRACE_EVENT_PHASE_FLOW_END ('f') --#define TRACE_EVENT_PHASE_METADATA ('M') --#define TRACE_EVENT_PHASE_COUNTER ('C') -- --// Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT. --#define TRACE_EVENT_FLAG_NONE (static_cast(0)) --#define TRACE_EVENT_FLAG_COPY (static_cast(1 << 0)) --#define TRACE_EVENT_FLAG_HAS_ID (static_cast(1 << 1)) --#define TRACE_EVENT_FLAG_MANGLE_ID (static_cast(1 << 2)) -- --namespace webrtc { --namespace trace_event_internal { -- --// Specify these values when the corresponding argument of AddTraceEvent is not --// used. --const int kZeroNumArgs = 0; --const unsigned long long kNoEventId = 0; -- --// TraceID encapsulates an ID that can either be an integer or pointer. Pointers --// are mangled with the Process ID so that they are unlikely to collide when the --// same pointer is used on different processes. --class TraceID { -- public: -- class ForceMangle { -- public: -- explicit ForceMangle(unsigned long long id) : data_(id) {} -- explicit ForceMangle(unsigned long id) : data_(id) {} -- explicit ForceMangle(unsigned int id) : data_(id) {} -- explicit ForceMangle(unsigned short id) : data_(id) {} -- explicit ForceMangle(unsigned char id) : data_(id) {} -- explicit ForceMangle(long long id) -- : data_(static_cast(id)) {} -- explicit ForceMangle(long id) -- : data_(static_cast(id)) {} -- explicit ForceMangle(int id) : data_(static_cast(id)) {} -- explicit ForceMangle(short id) -- : data_(static_cast(id)) {} -- explicit ForceMangle(signed char id) -- : data_(static_cast(id)) {} -- -- unsigned long long data() const { return data_; } -- -- private: -- unsigned long long data_; -- }; -- -- explicit TraceID(const void* id, unsigned char* flags) -- : data_( -- static_cast(reinterpret_cast(id))) { -- *flags |= TRACE_EVENT_FLAG_MANGLE_ID; -- } -- explicit TraceID(ForceMangle id, unsigned char* flags) : data_(id.data()) { -- *flags |= TRACE_EVENT_FLAG_MANGLE_ID; -- } -- explicit TraceID(unsigned long long id, unsigned char* flags) : data_(id) { -- (void)flags; -- } -- explicit TraceID(unsigned long id, unsigned char* flags) : data_(id) { -- (void)flags; -- } -- explicit TraceID(unsigned int id, unsigned char* flags) : data_(id) { -- (void)flags; -- } -- explicit TraceID(unsigned short id, unsigned char* flags) : data_(id) { -- (void)flags; -- } -- explicit TraceID(unsigned char id, unsigned char* flags) : data_(id) { -- (void)flags; -- } -- explicit TraceID(long long id, unsigned char* flags) -- : data_(static_cast(id)) { -- (void)flags; -- } -- explicit TraceID(long id, unsigned char* flags) -- : data_(static_cast(id)) { -- (void)flags; -- } -- explicit TraceID(int id, unsigned char* flags) -- : data_(static_cast(id)) { -- (void)flags; -- } -- explicit TraceID(short id, unsigned char* flags) -- : data_(static_cast(id)) { -- (void)flags; -- } -- explicit TraceID(signed char id, unsigned char* flags) -- : data_(static_cast(id)) { -- (void)flags; -- } -- -- unsigned long long data() const { return data_; } -- -- private: -- unsigned long long data_; --}; -- --// Simple union to store various types as unsigned long long. --union TraceValueUnion { -- bool as_bool; -- unsigned long long as_uint; -- long long as_int; -- double as_double; -- const void* as_pointer; -- const char* as_string; --}; -- --// Simple container for const char* that should be copied instead of retained. --class TraceStringWithCopy { -- public: -- explicit TraceStringWithCopy(const char* str) : str_(str) {} -- operator const char*() const { return str_; } -- -- private: -- const char* str_; --}; -- --// Define SetTraceValue for each allowed type. It stores the type and --// value in the return arguments. This allows this API to avoid declaring any --// structures so that it is portable to third_party libraries. --#define INTERNAL_DECLARE_SET_TRACE_VALUE(actual_type, union_member, \ -- value_type_id) \ -- static inline void SetTraceValue(actual_type arg, unsigned char* type, \ -- unsigned long long* value) { \ -- TraceValueUnion type_value; \ -- type_value.union_member = arg; \ -- *type = value_type_id; \ -- *value = type_value.as_uint; \ -- } --// Simpler form for int types that can be safely casted. --#define INTERNAL_DECLARE_SET_TRACE_VALUE_INT(actual_type, value_type_id) \ -- static inline void SetTraceValue(actual_type arg, unsigned char* type, \ -- unsigned long long* value) { \ -- *type = value_type_id; \ -- *value = static_cast(arg); \ -- } -- --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned long long, TRACE_VALUE_TYPE_UINT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned long, TRACE_VALUE_TYPE_UINT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned int, TRACE_VALUE_TYPE_UINT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned short, TRACE_VALUE_TYPE_UINT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(unsigned char, TRACE_VALUE_TYPE_UINT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(long long, TRACE_VALUE_TYPE_INT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(long, TRACE_VALUE_TYPE_INT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(int, TRACE_VALUE_TYPE_INT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(short, TRACE_VALUE_TYPE_INT) --INTERNAL_DECLARE_SET_TRACE_VALUE_INT(signed char, TRACE_VALUE_TYPE_INT) --INTERNAL_DECLARE_SET_TRACE_VALUE(bool, as_bool, TRACE_VALUE_TYPE_BOOL) --INTERNAL_DECLARE_SET_TRACE_VALUE(double, as_double, TRACE_VALUE_TYPE_DOUBLE) --INTERNAL_DECLARE_SET_TRACE_VALUE(const void*, -- as_pointer, -- TRACE_VALUE_TYPE_POINTER) --INTERNAL_DECLARE_SET_TRACE_VALUE(const char*, -- as_string, -- TRACE_VALUE_TYPE_STRING) --INTERNAL_DECLARE_SET_TRACE_VALUE(const TraceStringWithCopy&, -- as_string, -- TRACE_VALUE_TYPE_COPY_STRING) -- --#undef INTERNAL_DECLARE_SET_TRACE_VALUE --#undef INTERNAL_DECLARE_SET_TRACE_VALUE_INT -- --// std::string version of SetTraceValue so that trace arguments can be strings. --static inline void SetTraceValue(const std::string& arg, -- unsigned char* type, -- unsigned long long* value) { -- TraceValueUnion type_value; -- type_value.as_string = arg.c_str(); -- *type = TRACE_VALUE_TYPE_COPY_STRING; -- *value = type_value.as_uint; --} -- --// These AddTraceEvent template functions are defined here instead of in the --// macro, because the arg_values could be temporary objects, such as --// std::string. In order to store pointers to the internal c_str and pass --// through to the tracing API, the arg_values must live throughout --// these procedures. -- --static inline void AddTraceEvent(char phase, -- const unsigned char* category_enabled, -- const char* name, -- unsigned long long id, -- unsigned char flags) { -- TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_enabled, name, id, -- kZeroNumArgs, nullptr, nullptr, nullptr, -- flags); --} -- --template --static inline void AddTraceEvent(char phase, -- const unsigned char* category_enabled, -- const char* name, -- unsigned long long id, -- unsigned char flags, -- const char* arg1_name, -- const ARG1_TYPE& arg1_val) { -- const int num_args = 1; -- unsigned char arg_types[1]; -- unsigned long long arg_values[1]; -- SetTraceValue(arg1_val, &arg_types[0], &arg_values[0]); -- TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_enabled, name, id, num_args, -- &arg1_name, arg_types, arg_values, flags); --} -- --template --static inline void AddTraceEvent(char phase, -- const unsigned char* category_enabled, -- const char* name, -- unsigned long long id, -- unsigned char flags, -- const char* arg1_name, -- const ARG1_TYPE& arg1_val, -- const char* arg2_name, -- const ARG2_TYPE& arg2_val) { -- const int num_args = 2; -- const char* arg_names[2] = {arg1_name, arg2_name}; -- unsigned char arg_types[2]; -- unsigned long long arg_values[2]; -- SetTraceValue(arg1_val, &arg_types[0], &arg_values[0]); -- SetTraceValue(arg2_val, &arg_types[1], &arg_values[1]); -- TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_enabled, name, id, num_args, -- arg_names, arg_types, arg_values, flags); --} -- --// Used by TRACE_EVENTx macro. Do not use directly. --class TraceEndOnScopeClose { -- public: -- // Note: members of data_ intentionally left uninitialized. See Initialize. -- TraceEndOnScopeClose() : p_data_(nullptr) {} -- ~TraceEndOnScopeClose() { -- if (p_data_) -- AddEventIfEnabled(); -- } -- -- void Initialize(const unsigned char* category_enabled, const char* name) { -- data_.category_enabled = category_enabled; -- data_.name = name; -- p_data_ = &data_; -- } -- -- private: -- // Add the end event if the category is still enabled. -- void AddEventIfEnabled() { -- // Only called when p_data_ is non-null. -- if (*p_data_->category_enabled) { -- TRACE_EVENT_API_ADD_TRACE_EVENT(TRACE_EVENT_PHASE_END, -- p_data_->category_enabled, p_data_->name, -- kNoEventId, kZeroNumArgs, nullptr, -- nullptr, nullptr, TRACE_EVENT_FLAG_NONE); -- } -- } -- -- // This Data struct workaround is to avoid initializing all the members -- // in Data during construction of this object, since this object is always -- // constructed, even when tracing is disabled. If the members of Data were -- // members of this class instead, compiler warnings occur about potential -- // uninitialized accesses. -- struct Data { -- const unsigned char* category_enabled; -- const char* name; -- }; -- Data* p_data_; -- Data data_; --}; -- --} // namespace trace_event_internal --} // namespace webrtc --#else -- --//////////////////////////////////////////////////////////////////////////////// --// This section defines no-op alternatives to the tracing macros when --// RTC_DISABLE_TRACE_EVENTS is defined. -- --#define RTC_NOOP() \ -- do { \ -- } while (0) -- --#define TRACE_STR_COPY(str) RTC_NOOP() -- --#define TRACE_DISABLED_BY_DEFAULT(name) "disabled-by-default-" name -- --#define TRACE_ID_MANGLE(id) 0 -- --#define TRACE_EVENT0(category, name) RTC_NOOP() --#define TRACE_EVENT1(category, name, arg1_name, arg1_val) RTC_NOOP() --#define TRACE_EVENT2(category, name, arg1_name, arg1_val, arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_INSTANT0(category, name) RTC_NOOP() --#define TRACE_EVENT_INSTANT1(category, name, arg1_name, arg1_val) RTC_NOOP() -- --#define TRACE_EVENT_INSTANT2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_COPY_INSTANT0(category, name) RTC_NOOP() --#define TRACE_EVENT_COPY_INSTANT1(category, name, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_INSTANT2(category, name, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_BEGIN0(category, name) RTC_NOOP() --#define TRACE_EVENT_BEGIN1(category, name, arg1_name, arg1_val) RTC_NOOP() --#define TRACE_EVENT_BEGIN2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_BEGIN0(category, name) RTC_NOOP() --#define TRACE_EVENT_COPY_BEGIN1(category, name, arg1_name, arg1_val) RTC_NOOP() --#define TRACE_EVENT_COPY_BEGIN2(category, name, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_END0(category, name) RTC_NOOP() --#define TRACE_EVENT_END1(category, name, arg1_name, arg1_val) RTC_NOOP() --#define TRACE_EVENT_END2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_END0(category, name) RTC_NOOP() --#define TRACE_EVENT_COPY_END1(category, name, arg1_name, arg1_val) RTC_NOOP() --#define TRACE_EVENT_COPY_END2(category, name, arg1_name, arg1_val, arg2_name, \ -- arg2_val) \ -- RTC_NOOP() -- --#define TRACE_COUNTER1(category, name, value) RTC_NOOP() --#define TRACE_COPY_COUNTER1(category, name, value) RTC_NOOP() -- --#define TRACE_COUNTER2(category, name, value1_name, value1_val, value2_name, \ -- value2_val) \ -- RTC_NOOP() --#define TRACE_COPY_COUNTER2(category, name, value1_name, value1_val, \ -- value2_name, value2_val) \ -- RTC_NOOP() -- --#define TRACE_COUNTER_ID1(category, name, id, value) RTC_NOOP() --#define TRACE_COPY_COUNTER_ID1(category, name, id, value) RTC_NOOP() -- --#define TRACE_COUNTER_ID2(category, name, id, value1_name, value1_val, \ -- value2_name, value2_val) \ -- RTC_NOOP() --#define TRACE_COPY_COUNTER_ID2(category, name, id, value1_name, value1_val, \ -- value2_name, value2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_ASYNC_BEGIN0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_BEGIN0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_ASYNC_STEP0(category, name, id, step) RTC_NOOP() --#define TRACE_EVENT_ASYNC_STEP1(category, name, id, step, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_STEP0(category, name, id, step) RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_STEP1(category, name, id, step, arg1_name, \ -- arg1_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_ASYNC_END0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_END0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_FLOW_BEGIN0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_BEGIN0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_FLOW_STEP0(category, name, id, step) RTC_NOOP() --#define TRACE_EVENT_FLOW_STEP1(category, name, id, step, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_STEP0(category, name, id, step) RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_STEP1(category, name, id, step, arg1_name, \ -- arg1_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_FLOW_END0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_FLOW_END1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_FLOW_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_END0(category, name, id) RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_END1(category, name, id, arg1_name, arg1_val) \ -- RTC_NOOP() --#define TRACE_EVENT_COPY_FLOW_END2(category, name, id, arg1_name, arg1_val, \ -- arg2_name, arg2_val) \ -- RTC_NOOP() -- --#define TRACE_EVENT_API_GET_CATEGORY_ENABLED "" -- --#define TRACE_EVENT_API_ADD_TRACE_EVENT RTC_NOOP() -- --#endif // RTC_TRACE_EVENTS_ENABLED -- --#endif // RTC_BASE_TRACE_EVENT_H_ -+// This header is diverted to a similar header in Gecko, that is defining the -+// same macros, modified to talk to the Gecko Profiler. -+#include "GeckoTraceEvent.h" +diff --git a/modules/desktop_capture/win/window_capture_utils.cc b/modules/desktop_capture/win/window_capture_utils.cc +index ccfef49bc5..d58c02e17c 100644 +--- a/modules/desktop_capture/win/window_capture_utils.cc ++++ b/modules/desktop_capture/win/window_capture_utils.cc +@@ -79,6 +79,10 @@ BOOL CALLBACK GetWindowListHandler(HWND hwnd, LPARAM param) { + DesktopCapturer::Source window; + window.id = reinterpret_cast(hwnd); + ++ DWORD pid; ++ GetWindowThreadProcessId(hwnd, &pid); ++ window.pid = static_cast(pid); ++ + // GetWindowText* are potentially blocking operations if `hwnd` is + // owned by the current process. The APIs will send messages to the window's + // message loop, and if the message loop is waiting on this operation we will -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0049.patch b/third_party/libwebrtc/moz-patch-stack/0049.patch index 03ff69c4a839..ee530423b1cf 100644 --- a/third_party/libwebrtc/moz-patch-stack/0049.patch +++ b/third_party/libwebrtc/moz-patch-stack/0049.patch @@ -1,30 +1,29 @@ From: Nico Grunbaum Date: Thu, 28 Oct 2021 18:13:00 +0000 -Subject: Bug 1729367 - P6 - Restore PID recording post cherry-pick;r=mjf +Subject: Bug 1729367 - P7 - restore mac PID tracking using new API;r=mjf + a=webrtc-update -This restores the code from P0, which was removed to make cherry-picking 439ffe462a66ad9fa9a251b265e4ab28c2647d25 and 449a78b1e20ea85b11f967cf3a184ee610ce21c3 easier. - -Differential Revision: https://phabricator.services.mozilla.com/D129714 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/72a83cb2e571023cd4150bbdef5be5455ce851f4 +Differential Revision: https://phabricator.services.mozilla.com/D129721 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/1495ca5ef535f8ad692a3a579ca42eddc14f39a8 --- - modules/desktop_capture/win/window_capture_utils.cc | 4 ++++ - 1 file changed, 4 insertions(+) + modules/desktop_capture/window_capturer_mac.mm | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) -diff --git a/modules/desktop_capture/win/window_capture_utils.cc b/modules/desktop_capture/win/window_capture_utils.cc -index ccfef49bc5..d58c02e17c 100644 ---- a/modules/desktop_capture/win/window_capture_utils.cc -+++ b/modules/desktop_capture/win/window_capture_utils.cc -@@ -79,6 +79,10 @@ BOOL CALLBACK GetWindowListHandler(HWND hwnd, LPARAM param) { - DesktopCapturer::Source window; - window.id = reinterpret_cast(hwnd); - -+ DWORD pid; -+ GetWindowThreadProcessId(hwnd, &pid); -+ window.pid = static_cast(pid); -+ - // GetWindowText* are potentially blocking operations if `hwnd` is - // owned by the current process. The APIs will send messages to the window's - // message loop, and if the message loop is waiting on this operation we will +diff --git a/modules/desktop_capture/window_capturer_mac.mm b/modules/desktop_capture/window_capturer_mac.mm +index f99b4a74d1..10f6a74650 100644 +--- a/modules/desktop_capture/window_capturer_mac.mm ++++ b/modules/desktop_capture/window_capturer_mac.mm +@@ -170,8 +170,9 @@ void WindowCapturerMac::CaptureFrame() { + return webrtc::GetWindowList( + [sources](CFDictionaryRef window) { + WindowId window_id = GetWindowId(window); ++ int pid = GetWindowOwnerPid(window); + if (window_id != kNullWindowId) { +- sources->push_back(DesktopCapturer::Source{window_id, GetWindowTitle(window)}); ++ sources->push_back(DesktopCapturer::Source{window_id, pid, GetWindowTitle(window)}); + } + return true; + }, -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0050.patch b/third_party/libwebrtc/moz-patch-stack/0050.patch index 58fb9a280c8d..5faa458f5bd4 100644 --- a/third_party/libwebrtc/moz-patch-stack/0050.patch +++ b/third_party/libwebrtc/moz-patch-stack/0050.patch @@ -1,29 +1,208 @@ -From: Nico Grunbaum -Date: Thu, 28 Oct 2021 18:13:00 +0000 -Subject: Bug 1729367 - P7 - restore mac PID tracking using new API;r=mjf - a=webrtc-update +From: Andreas Pehrson +Date: Tue, 2 Nov 2021 14:35:00 +0000 +Subject: Bug 1729455 - Add to stats the local receive time for receiving video + Sender Reports. r=ng -Differential Revision: https://phabricator.services.mozilla.com/D129721 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/1495ca5ef535f8ad692a3a579ca42eddc14f39a8 +Differential Revision: https://phabricator.services.mozilla.com/D125712 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/99267b6d193fbcb3e4c845c5e80770424d6d06e2 --- - modules/desktop_capture/window_capturer_mac.mm | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) + call/video_receive_stream.h | 3 ++- + modules/rtp_rtcp/source/rtcp_receiver.cc | 6 ++++-- + modules/rtp_rtcp/source/rtcp_receiver.h | 3 ++- + modules/rtp_rtcp/source/rtp_rtcp_impl.cc | 10 +++++----- + modules/rtp_rtcp/source/rtp_rtcp_impl.h | 3 ++- + modules/rtp_rtcp/source/rtp_rtcp_impl2.cc | 10 +++++----- + modules/rtp_rtcp/source/rtp_rtcp_impl2.h | 3 ++- + modules/rtp_rtcp/source/rtp_rtcp_interface.h | 5 +++-- + video/rtp_video_stream_receiver2.cc | 5 +++-- + video/rtp_video_stream_receiver2.h | 3 ++- + video/video_receive_stream2.cc | 3 ++- + 11 files changed, 32 insertions(+), 22 deletions(-) -diff --git a/modules/desktop_capture/window_capturer_mac.mm b/modules/desktop_capture/window_capturer_mac.mm -index f99b4a74d1..10f6a74650 100644 ---- a/modules/desktop_capture/window_capturer_mac.mm -+++ b/modules/desktop_capture/window_capturer_mac.mm -@@ -170,8 +170,9 @@ void WindowCapturerMac::CaptureFrame() { - return webrtc::GetWindowList( - [sources](CFDictionaryRef window) { - WindowId window_id = GetWindowId(window); -+ int pid = GetWindowOwnerPid(window); - if (window_id != kNullWindowId) { -- sources->push_back(DesktopCapturer::Source{window_id, GetWindowTitle(window)}); -+ sources->push_back(DesktopCapturer::Source{window_id, pid, GetWindowTitle(window)}); - } - return true; - }, +diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h +index 01fb08a009..87ee39e142 100644 +--- a/call/video_receive_stream.h ++++ b/call/video_receive_stream.h +@@ -152,10 +152,11 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { + RtcpPacketTypeCounter rtcp_packet_type_counts; + absl::optional rtx_rtp_stats; + +- // Mozilla modification: Init these three. ++ // Mozilla modification: Init these. + uint32_t rtcp_sender_packets_sent = 0; + uint32_t rtcp_sender_octets_sent = 0; + int64_t rtcp_sender_ntp_timestamp_ms = 0; ++ int64_t rtcp_sender_remote_ntp_timestamp_ms = 0; + + // Timing frame info: all important timestamps for a full lifetime of a + // single 'timing frame'. +diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc +index e2ad674012..94de316421 100644 +--- a/modules/rtp_rtcp/source/rtcp_receiver.cc ++++ b/modules/rtp_rtcp/source/rtcp_receiver.cc +@@ -365,11 +365,13 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { + + void RTCPReceiver::RemoteRTCPSenderInfo(uint32_t* packet_count, + uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const { ++ int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const { + MutexLock lock(&rtcp_receiver_lock_); + *packet_count = remote_sender_packet_count_; + *octet_count = remote_sender_octet_count_; +- *ntp_timestamp_ms = remote_sender_ntp_time_.ToMs(); ++ *ntp_timestamp_ms = last_received_sr_ntp_.ToMs(); ++ *remote_ntp_timestamp_ms = remote_sender_ntp_time_.ToMs(); + } + + std::vector RTCPReceiver::GetLatestReportBlockData() const { +diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h +index 36e117af55..7fc541585c 100644 +--- a/modules/rtp_rtcp/source/rtcp_receiver.h ++++ b/modules/rtp_rtcp/source/rtcp_receiver.h +@@ -120,7 +120,8 @@ class RTCPReceiver final { + // Get received sender packet and octet counts + void RemoteRTCPSenderInfo(uint32_t* packet_count, + uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const; ++ int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const; + + absl::optional AverageRtt() const; + absl::optional LastRtt() const; +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +index 525497c489..bafa336e15 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc ++++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +@@ -501,11 +501,11 @@ void ModuleRtpRtcpImpl::GetSendStreamDataCounters( + } + + // Received RTCP report. +-void ModuleRtpRtcpImpl::RemoteRTCPSenderInfo(uint32_t* packet_count, +- uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const { +- return rtcp_receiver_.RemoteRTCPSenderInfo(packet_count, octet_count, +- ntp_timestamp_ms); ++void ModuleRtpRtcpImpl::RemoteRTCPSenderInfo( ++ uint32_t* packet_count, uint32_t* octet_count, int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const { ++ return rtcp_receiver_.RemoteRTCPSenderInfo( ++ packet_count, octet_count, ntp_timestamp_ms, remote_ntp_timestamp_ms); + } + + std::vector ModuleRtpRtcpImpl::GetLatestReportBlockData() +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h +index 742a69cce3..0b1266a2db 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h ++++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h +@@ -184,7 +184,8 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl + + void RemoteRTCPSenderInfo(uint32_t* packet_count, + uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const override; ++ int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const override; + + // A snapshot of the most recent Report Block with additional data of + // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +index c171cf47a4..6bd172317d 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc ++++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +@@ -505,11 +505,11 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( + } + + // Received RTCP report. +-void ModuleRtpRtcpImpl2::RemoteRTCPSenderInfo(uint32_t* packet_count, +- uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const { +- return rtcp_receiver_.RemoteRTCPSenderInfo(packet_count, octet_count, +- ntp_timestamp_ms); ++void ModuleRtpRtcpImpl2::RemoteRTCPSenderInfo( ++ uint32_t* packet_count, uint32_t* octet_count, int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const { ++ return rtcp_receiver_.RemoteRTCPSenderInfo( ++ packet_count, octet_count, ntp_timestamp_ms, remote_ntp_timestamp_ms); + } + + std::vector ModuleRtpRtcpImpl2::GetLatestReportBlockData() +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +index a9d18ec44a..54ca61a705 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h ++++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +@@ -196,7 +196,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, + + void RemoteRTCPSenderInfo(uint32_t* packet_count, + uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const override; ++ int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const override; + + // A snapshot of the most recent Report Block with additional data of + // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. +diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h +index 889b099abd..2614461b2e 100644 +--- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h ++++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h +@@ -392,10 +392,11 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { + StreamDataCounters* rtx_counters) const = 0; + + +- // Returns packet count, octet count, and timestamp from RTCP sender report. ++ // Returns packet count, octet count, and timestamps from RTCP sender report. + virtual void RemoteRTCPSenderInfo(uint32_t* packet_count, + uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const = 0; ++ int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const = 0; + // A snapshot of Report Blocks with additional data of interest to statistics. + // Within this list, the sender-source SSRC pair is unique and per-pair the + // ReportBlockData represents the latest Report Block that was received for +diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc +index 0fa7fc5b9c..2670b7fe58 100644 +--- a/video/rtp_video_stream_receiver2.cc ++++ b/video/rtp_video_stream_receiver2.cc +@@ -1062,9 +1062,10 @@ absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() + // seem to be any support for these stats right now. So, we hack this in. + void RtpVideoStreamReceiver2::RemoteRTCPSenderInfo( + uint32_t* packet_count, uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const { ++ int64_t* ntp_timestamp_ms, int64_t* remote_ntp_timestamp_ms) const { + RTC_DCHECK_RUN_ON(&worker_task_checker_); +- rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms); ++ rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms, ++ remote_ntp_timestamp_ms); + } + + void RtpVideoStreamReceiver2::ManageFrame( +diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h +index be8bce770f..0e96d7f2cd 100644 +--- a/video/rtp_video_stream_receiver2.h ++++ b/video/rtp_video_stream_receiver2.h +@@ -211,7 +211,8 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, + // stats at all, and even on the most recent libwebrtc code there does not + // seem to be any support for these stats right now. So, we hack this in. + void RemoteRTCPSenderInfo(uint32_t* packet_count, uint32_t* octet_count, +- int64_t* ntp_timestamp_ms) const; ++ int64_t* ntp_timestamp_ms, ++ int64_t* remote_ntp_timestamp_ms) const; + + private: + // Implements RtpVideoFrameReceiver. +diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc +index 707fb64f9b..0f42fa50b7 100644 +--- a/video/video_receive_stream2.cc ++++ b/video/video_receive_stream2.cc +@@ -580,7 +580,8 @@ VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { + // seem to be any support for these stats right now. So, we hack this in. + rtp_video_stream_receiver_.RemoteRTCPSenderInfo( + &stats.rtcp_sender_packets_sent, &stats.rtcp_sender_octets_sent, +- &stats.rtcp_sender_ntp_timestamp_ms); ++ &stats.rtcp_sender_ntp_timestamp_ms, ++ &stats.rtcp_sender_remote_ntp_timestamp_ms); + + return stats; + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0051.patch b/third_party/libwebrtc/moz-patch-stack/0051.patch index 592072901ac4..cdb32e990af2 100644 --- a/third_party/libwebrtc/moz-patch-stack/0051.patch +++ b/third_party/libwebrtc/moz-patch-stack/0051.patch @@ -1,208 +1,26 @@ From: Andreas Pehrson Date: Tue, 2 Nov 2021 14:35:00 +0000 -Subject: Bug 1729455 - Add to stats the local receive time for receiving video - Sender Reports. r=ng +Subject: Bug 1729455 - Ensure the libwebrtc system clock is not used. r=bwc -Differential Revision: https://phabricator.services.mozilla.com/D125712 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/99267b6d193fbcb3e4c845c5e80770424d6d06e2 +Differential Revision: https://phabricator.services.mozilla.com/D128244 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/3e8ac168ee3db089dd892bf140df53e15d6f0918 --- - call/video_receive_stream.h | 3 ++- - modules/rtp_rtcp/source/rtcp_receiver.cc | 6 ++++-- - modules/rtp_rtcp/source/rtcp_receiver.h | 3 ++- - modules/rtp_rtcp/source/rtp_rtcp_impl.cc | 10 +++++----- - modules/rtp_rtcp/source/rtp_rtcp_impl.h | 3 ++- - modules/rtp_rtcp/source/rtp_rtcp_impl2.cc | 10 +++++----- - modules/rtp_rtcp/source/rtp_rtcp_impl2.h | 3 ++- - modules/rtp_rtcp/source/rtp_rtcp_interface.h | 5 +++-- - video/rtp_video_stream_receiver2.cc | 5 +++-- - video/rtp_video_stream_receiver2.h | 3 ++- - video/video_receive_stream2.cc | 3 ++- - 11 files changed, 32 insertions(+), 22 deletions(-) + rtc_base/system_time.cc | 2 ++ + 1 file changed, 2 insertions(+) -diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index 023bdbb2c4..8587d2b5db 100644 ---- a/call/video_receive_stream.h -+++ b/call/video_receive_stream.h -@@ -152,10 +152,11 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { - RtcpPacketTypeCounter rtcp_packet_type_counts; - absl::optional rtx_rtp_stats; +diff --git a/rtc_base/system_time.cc b/rtc_base/system_time.cc +index 058e6c2990..1a5e447916 100644 +--- a/rtc_base/system_time.cc ++++ b/rtc_base/system_time.cc +@@ -12,6 +12,8 @@ + // rtc::SystemTimeNanos() must be provided externally. + #ifndef WEBRTC_EXCLUDE_SYSTEM_TIME -- // Mozilla modification: Init these three. -+ // Mozilla modification: Init these. - uint32_t rtcp_sender_packets_sent = 0; - uint32_t rtcp_sender_octets_sent = 0; - int64_t rtcp_sender_ntp_timestamp_ms = 0; -+ int64_t rtcp_sender_remote_ntp_timestamp_ms = 0; ++#error Mozilla: Must not use the built-in libwebrtc clock ++ + #include - // Timing frame info: all important timestamps for a full lifetime of a - // single 'timing frame'. -diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc -index fda09577d3..eb7a7f8390 100644 ---- a/modules/rtp_rtcp/source/rtcp_receiver.cc -+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc -@@ -364,11 +364,13 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { - - void RTCPReceiver::RemoteRTCPSenderInfo(uint32_t* packet_count, - uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const { -+ int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const { - MutexLock lock(&rtcp_receiver_lock_); - *packet_count = remote_sender_packet_count_; - *octet_count = remote_sender_octet_count_; -- *ntp_timestamp_ms = remote_sender_ntp_time_.ToMs(); -+ *ntp_timestamp_ms = last_received_sr_ntp_.ToMs(); -+ *remote_ntp_timestamp_ms = remote_sender_ntp_time_.ToMs(); - } - - std::vector RTCPReceiver::GetLatestReportBlockData() const { -diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h -index b727934325..6e6d82dcf4 100644 ---- a/modules/rtp_rtcp/source/rtcp_receiver.h -+++ b/modules/rtp_rtcp/source/rtcp_receiver.h -@@ -119,7 +119,8 @@ class RTCPReceiver final { - // Get received sender packet and octet counts - void RemoteRTCPSenderInfo(uint32_t* packet_count, - uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const; -+ int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const; - - absl::optional AverageRtt() const; - absl::optional LastRtt() const; -diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc -index b360327f27..38e4a736f9 100644 ---- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc -+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc -@@ -501,11 +501,11 @@ void ModuleRtpRtcpImpl::GetSendStreamDataCounters( - } - - // Received RTCP report. --void ModuleRtpRtcpImpl::RemoteRTCPSenderInfo(uint32_t* packet_count, -- uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const { -- return rtcp_receiver_.RemoteRTCPSenderInfo(packet_count, octet_count, -- ntp_timestamp_ms); -+void ModuleRtpRtcpImpl::RemoteRTCPSenderInfo( -+ uint32_t* packet_count, uint32_t* octet_count, int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const { -+ return rtcp_receiver_.RemoteRTCPSenderInfo( -+ packet_count, octet_count, ntp_timestamp_ms, remote_ntp_timestamp_ms); - } - - std::vector ModuleRtpRtcpImpl::GetLatestReportBlockData() -diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h -index f78a0da41a..e7bb109527 100644 ---- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h -+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h -@@ -184,7 +184,8 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl - - void RemoteRTCPSenderInfo(uint32_t* packet_count, - uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const override; -+ int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const override; - - // A snapshot of the most recent Report Block with additional data of - // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. -diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -index 241ed27a56..80d388a4fd 100644 ---- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc -@@ -506,11 +506,11 @@ void ModuleRtpRtcpImpl2::GetSendStreamDataCounters( - } - - // Received RTCP report. --void ModuleRtpRtcpImpl2::RemoteRTCPSenderInfo(uint32_t* packet_count, -- uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const { -- return rtcp_receiver_.RemoteRTCPSenderInfo(packet_count, octet_count, -- ntp_timestamp_ms); -+void ModuleRtpRtcpImpl2::RemoteRTCPSenderInfo( -+ uint32_t* packet_count, uint32_t* octet_count, int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const { -+ return rtcp_receiver_.RemoteRTCPSenderInfo( -+ packet_count, octet_count, ntp_timestamp_ms, remote_ntp_timestamp_ms); - } - - std::vector ModuleRtpRtcpImpl2::GetLatestReportBlockData() -diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h -index f9c77a57b0..e9da4d4f23 100644 ---- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h -+++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h -@@ -196,7 +196,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, - - void RemoteRTCPSenderInfo(uint32_t* packet_count, - uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const override; -+ int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const override; - - // A snapshot of the most recent Report Block with additional data of - // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. -diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -index b43832b5fb..b0a85d00c6 100644 ---- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h -+++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h -@@ -391,10 +391,11 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { - StreamDataCounters* rtx_counters) const = 0; - - -- // Returns packet count, octet count, and timestamp from RTCP sender report. -+ // Returns packet count, octet count, and timestamps from RTCP sender report. - virtual void RemoteRTCPSenderInfo(uint32_t* packet_count, - uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const = 0; -+ int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const = 0; - // A snapshot of Report Blocks with additional data of interest to statistics. - // Within this list, the sender-source SSRC pair is unique and per-pair the - // ReportBlockData represents the latest Report Block that was received for -diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index be36dc8305..72d3596f96 100644 ---- a/video/rtp_video_stream_receiver2.cc -+++ b/video/rtp_video_stream_receiver2.cc -@@ -1056,9 +1056,10 @@ absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() - // seem to be any support for these stats right now. So, we hack this in. - void RtpVideoStreamReceiver2::RemoteRTCPSenderInfo( - uint32_t* packet_count, uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const { -+ int64_t* ntp_timestamp_ms, int64_t* remote_ntp_timestamp_ms) const { - RTC_DCHECK_RUN_ON(&worker_task_checker_); -- rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms); -+ rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms, -+ remote_ntp_timestamp_ms); - } - - void RtpVideoStreamReceiver2::ManageFrame( -diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h -index 53378e9e9b..dc9cea422d 100644 ---- a/video/rtp_video_stream_receiver2.h -+++ b/video/rtp_video_stream_receiver2.h -@@ -210,7 +210,8 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, - // stats at all, and even on the most recent libwebrtc code there does not - // seem to be any support for these stats right now. So, we hack this in. - void RemoteRTCPSenderInfo(uint32_t* packet_count, uint32_t* octet_count, -- int64_t* ntp_timestamp_ms) const; -+ int64_t* ntp_timestamp_ms, -+ int64_t* remote_ntp_timestamp_ms) const; - - private: - // Implements RtpVideoFrameReceiver. -diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc -index ee784ef592..2263f1dbf8 100644 ---- a/video/video_receive_stream2.cc -+++ b/video/video_receive_stream2.cc -@@ -580,7 +580,8 @@ VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { - // seem to be any support for these stats right now. So, we hack this in. - rtp_video_stream_receiver_.RemoteRTCPSenderInfo( - &stats.rtcp_sender_packets_sent, &stats.rtcp_sender_octets_sent, -- &stats.rtcp_sender_ntp_timestamp_ms); -+ &stats.rtcp_sender_ntp_timestamp_ms, -+ &stats.rtcp_sender_remote_ntp_timestamp_ms); - - return stats; - } + #include -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0052.patch b/third_party/libwebrtc/moz-patch-stack/0052.patch index cbce6b9258fc..c4373a82d25f 100644 --- a/third_party/libwebrtc/moz-patch-stack/0052.patch +++ b/third_party/libwebrtc/moz-patch-stack/0052.patch @@ -1,26 +1,199 @@ From: Andreas Pehrson Date: Tue, 2 Nov 2021 14:35:00 +0000 -Subject: Bug 1729455 - Ensure the libwebrtc system clock is not used. r=bwc +Subject: Bug 1729455 - Inject RTCStatsTimestampMakerRealtimeClock into Call + instances. r=bwc -Differential Revision: https://phabricator.services.mozilla.com/D128244 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/3e8ac168ee3db089dd892bf140df53e15d6f0918 +This patch makes libwebrtc use our clock for timestamps. +It also makes sure there's no use of the libwebrtc realtime clock, other than +for relative time tracking (like timeouts), and that future libwebrtc updates +don't introduce unaudited use of it. + +Differential Revision: https://phabricator.services.mozilla.com/D127714 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/0744d68b8c944e69945de4ac5c4ca71332e78ad8 --- - rtc_base/system_time.cc | 2 ++ - 1 file changed, 2 insertions(+) + audio/channel_send.cc | 2 +- + call/call.cc | 2 ++ + call/call_factory.cc | 4 ++++ + call/degraded_call.cc | 2 ++ + modules/audio_coding/acm2/acm_receiver.cc | 2 +- + modules/rtp_rtcp/include/flexfec_receiver.h | 2 ++ + modules/rtp_rtcp/source/flexfec_receiver.cc | 2 ++ + rtc_base/task_utils/repeating_task.h | 4 ++-- + system_wrappers/include/clock.h | 2 +- + system_wrappers/source/clock.cc | 2 +- + 10 files changed, 18 insertions(+), 6 deletions(-) -diff --git a/rtc_base/system_time.cc b/rtc_base/system_time.cc -index 058e6c2990..1a5e447916 100644 ---- a/rtc_base/system_time.cc -+++ b/rtc_base/system_time.cc -@@ -12,6 +12,8 @@ - // rtc::SystemTimeNanos() must be provided externally. - #ifndef WEBRTC_EXCLUDE_SYSTEM_TIME +diff --git a/audio/channel_send.cc b/audio/channel_send.cc +index 4c1d056cc5..5ee97f290e 100644 +--- a/audio/channel_send.cc ++++ b/audio/channel_send.cc +@@ -428,7 +428,7 @@ ChannelSend::ChannelSend( + transport_controller->GetRtcpObserver(); + configuration.transport_feedback_callback = + transport_controller->transport_feedback_observer(); +- configuration.clock = (clock ? clock : Clock::GetRealTimeClock()); ++ configuration.clock = clock; + configuration.audio = true; + configuration.outgoing_transport = rtp_transport; -+#error Mozilla: Must not use the built-in libwebrtc clock -+ - #include +diff --git a/call/call.cc b/call/call.cc +index fa5d14d204..85297d9568 100644 +--- a/call/call.cc ++++ b/call/call.cc +@@ -478,12 +478,14 @@ std::string Call::Stats::ToString(int64_t time_ms) const { + return ss.str(); + } - #include ++/* Mozilla: Avoid this since it could use GetRealTimeClock(). + Call* Call::Create(const Call::Config& config) { + Clock* clock = Clock::GetRealTimeClock(); + return Create(config, clock, + RtpTransportControllerSendFactory().Create( + config.ExtractTransportConfig(), clock)); + } ++ */ + + Call* Call::Create(const Call::Config& config, + Clock* clock, +diff --git a/call/call_factory.cc b/call/call_factory.cc +index 380e80ce12..253f8cd7de 100644 +--- a/call/call_factory.cc ++++ b/call/call_factory.cc +@@ -95,6 +95,9 @@ Call* CallFactory::CreateCall(const Call::Config& config) { + + RtpTransportConfig transportConfig = config.ExtractTransportConfig(); + ++ RTC_CHECK(false); ++ return nullptr; ++ /* Mozilla: Avoid this since it could use GetRealTimeClock(). + Call* call = + Call::Create(config, Clock::GetRealTimeClock(), + config.rtp_transport_controller_send_factory->Create( +@@ -107,6 +110,7 @@ Call* CallFactory::CreateCall(const Call::Config& config) { + } + + return call; ++ */ + } + + std::unique_ptr CreateCallFactory() { +diff --git a/call/degraded_call.cc b/call/degraded_call.cc +index 3f47fcded0..114be134ab 100644 +--- a/call/degraded_call.cc ++++ b/call/degraded_call.cc +@@ -129,6 +129,7 @@ bool DegradedCall::FakeNetworkPipeTransportAdapter::SendRtcp( + return true; + } + ++/* Mozilla: Avoid this since it could use GetRealTimeClock(). + DegradedCall::DegradedCall( + std::unique_ptr call, + const std::vector& send_configs, +@@ -165,6 +166,7 @@ DegradedCall::DegradedCall( + } + } + } ++*/ + + DegradedCall::~DegradedCall() { + RTC_DCHECK_RUN_ON(call_->worker_thread()); +diff --git a/modules/audio_coding/acm2/acm_receiver.cc b/modules/audio_coding/acm2/acm_receiver.cc +index a77e472ec1..a5bf88e547 100644 +--- a/modules/audio_coding/acm2/acm_receiver.cc ++++ b/modules/audio_coding/acm2/acm_receiver.cc +@@ -50,7 +50,7 @@ std::unique_ptr CreateNetEq( + + AcmReceiver::Config::Config( + rtc::scoped_refptr decoder_factory) +- : clock(*Clock::GetRealTimeClock()), decoder_factory(decoder_factory) { ++ : clock(*Clock::GetRealTimeClockRaw()), decoder_factory(decoder_factory) { + // Post-decode VAD is disabled by default in NetEq, however, Audio + // Conference Mixer relies on VAD decisions and fails without them. + neteq_config.enable_post_decode_vad = true; +diff --git a/modules/rtp_rtcp/include/flexfec_receiver.h b/modules/rtp_rtcp/include/flexfec_receiver.h +index a869c8ad41..b6a33882d1 100644 +--- a/modules/rtp_rtcp/include/flexfec_receiver.h ++++ b/modules/rtp_rtcp/include/flexfec_receiver.h +@@ -30,9 +30,11 @@ class Clock; + + class FlexfecReceiver { + public: ++ /* Mozilla: Avoid this since it could use GetRealTimeClock(). + FlexfecReceiver(uint32_t ssrc, + uint32_t protected_media_ssrc, + RecoveredPacketReceiver* recovered_packet_receiver); ++ */ + FlexfecReceiver(Clock* clock, + uint32_t ssrc, + uint32_t protected_media_ssrc, +diff --git a/modules/rtp_rtcp/source/flexfec_receiver.cc b/modules/rtp_rtcp/source/flexfec_receiver.cc +index 9f30a726b1..fa89d07c62 100644 +--- a/modules/rtp_rtcp/source/flexfec_receiver.cc ++++ b/modules/rtp_rtcp/source/flexfec_receiver.cc +@@ -31,6 +31,7 @@ constexpr TimeDelta kPacketLogInterval = TimeDelta::Seconds(10); + + } // namespace + ++/* Mozilla: Avoid this since it could use GetRealTimeClock(). + FlexfecReceiver::FlexfecReceiver( + uint32_t ssrc, + uint32_t protected_media_ssrc, +@@ -39,6 +40,7 @@ FlexfecReceiver::FlexfecReceiver( + ssrc, + protected_media_ssrc, + recovered_packet_receiver) {} ++ */ + + FlexfecReceiver::FlexfecReceiver( + Clock* clock, +diff --git a/rtc_base/task_utils/repeating_task.h b/rtc_base/task_utils/repeating_task.h +index c45de95ecc..28c691c3de 100644 +--- a/rtc_base/task_utils/repeating_task.h ++++ b/rtc_base/task_utils/repeating_task.h +@@ -57,7 +57,7 @@ class RepeatingTaskHandle { + absl::AnyInvocable closure, + TaskQueueBase::DelayPrecision precision = + TaskQueueBase::DelayPrecision::kLow, +- Clock* clock = Clock::GetRealTimeClock(), ++ Clock* clock = Clock::GetRealTimeClockRaw(), + const Location& location = Location::Current()); + + // DelayedStart is equivalent to Start except that the first invocation of the +@@ -68,7 +68,7 @@ class RepeatingTaskHandle { + absl::AnyInvocable closure, + TaskQueueBase::DelayPrecision precision = + TaskQueueBase::DelayPrecision::kLow, +- Clock* clock = Clock::GetRealTimeClock(), ++ Clock* clock = Clock::GetRealTimeClockRaw(), + const Location& location = Location::Current()); + + // Stops future invocations of the repeating task closure. Can only be called +diff --git a/system_wrappers/include/clock.h b/system_wrappers/include/clock.h +index 60296070cc..214b34c970 100644 +--- a/system_wrappers/include/clock.h ++++ b/system_wrappers/include/clock.h +@@ -49,7 +49,7 @@ class RTC_EXPORT Clock { + } + + // Returns an instance of the real-time system clock implementation. +- static Clock* GetRealTimeClock(); ++ static Clock* GetRealTimeClockRaw(); + }; + + class SimulatedClock : public Clock { +diff --git a/system_wrappers/source/clock.cc b/system_wrappers/source/clock.cc +index 88c99d6a68..f7460b831c 100644 +--- a/system_wrappers/source/clock.cc ++++ b/system_wrappers/source/clock.cc +@@ -57,7 +57,7 @@ class RealTimeClock : public Clock { + } + }; + +-Clock* Clock::GetRealTimeClock() { ++Clock* Clock::GetRealTimeClockRaw() { + static Clock* const clock = new RealTimeClock(); + return clock; + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0053.patch b/third_party/libwebrtc/moz-patch-stack/0053.patch index 0b486e91f18d..bf57f0375304 100644 --- a/third_party/libwebrtc/moz-patch-stack/0053.patch +++ b/third_party/libwebrtc/moz-patch-stack/0053.patch @@ -1,199 +1,91 @@ -From: Andreas Pehrson -Date: Tue, 2 Nov 2021 14:35:00 +0000 -Subject: Bug 1729455 - Inject RTCStatsTimestampMakerRealtimeClock into Call - instances. r=bwc +From: Landry Breuil +Date: Wed, 22 Dec 2021 00:09:00 +0000 +Subject: Bug 1654448 - P2 - readd partial support for BSD to webrtc + build;r=mjf -This patch makes libwebrtc use our clock for timestamps. -It also makes sure there's no use of the libwebrtc realtime clock, other than -for relative time tracking (like timeouts), and that future libwebrtc updates -don't introduce unaudited use of it. +only OpenBSD/amd64 is supported for now -Differential Revision: https://phabricator.services.mozilla.com/D127714 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/0744d68b8c944e69945de4ac5c4ca71332e78ad8 +Depends on D134432 + +Differential Revision: https://phabricator.services.mozilla.com/D134433 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/0300b32b7de70fb8976dc82d7d3bb3adb9685857 --- - audio/channel_send.cc | 2 +- - call/call.cc | 2 ++ - call/call_factory.cc | 4 ++++ - call/degraded_call.cc | 2 ++ - modules/audio_coding/acm2/acm_receiver.cc | 2 +- - modules/rtp_rtcp/include/flexfec_receiver.h | 2 ++ - modules/rtp_rtcp/source/flexfec_receiver.cc | 2 ++ - rtc_base/task_utils/repeating_task.h | 4 ++-- - system_wrappers/include/clock.h | 2 +- - system_wrappers/source/clock.cc | 2 +- - 10 files changed, 18 insertions(+), 6 deletions(-) + BUILD.gn | 3 +++ + modules/video_capture/BUILD.gn | 2 +- + modules/video_capture/linux/device_info_v4l2.h | 2 ++ + rtc_base/platform_thread_types.cc | 4 +++- + webrtc.gni | 2 +- + 5 files changed, 10 insertions(+), 3 deletions(-) -diff --git a/audio/channel_send.cc b/audio/channel_send.cc -index 4c1d056cc5..5ee97f290e 100644 ---- a/audio/channel_send.cc -+++ b/audio/channel_send.cc -@@ -428,7 +428,7 @@ ChannelSend::ChannelSend( - transport_controller->GetRtcpObserver(); - configuration.transport_feedback_callback = - transport_controller->transport_feedback_observer(); -- configuration.clock = (clock ? clock : Clock::GetRealTimeClock()); -+ configuration.clock = clock; - configuration.audio = true; - configuration.outgoing_transport = rtp_transport; - -diff --git a/call/call.cc b/call/call.cc -index b55492e53f..46c1dd3b9e 100644 ---- a/call/call.cc -+++ b/call/call.cc -@@ -478,12 +478,14 @@ std::string Call::Stats::ToString(int64_t time_ms) const { - return ss.str(); - } - -+/* Mozilla: Avoid this since it could use GetRealTimeClock(). - Call* Call::Create(const Call::Config& config) { - Clock* clock = Clock::GetRealTimeClock(); - return Create(config, clock, - RtpTransportControllerSendFactory().Create( - config.ExtractTransportConfig(), clock)); - } -+ */ - - Call* Call::Create(const Call::Config& config, - Clock* clock, -diff --git a/call/call_factory.cc b/call/call_factory.cc -index 380e80ce12..253f8cd7de 100644 ---- a/call/call_factory.cc -+++ b/call/call_factory.cc -@@ -95,6 +95,9 @@ Call* CallFactory::CreateCall(const Call::Config& config) { - - RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - -+ RTC_CHECK(false); -+ return nullptr; -+ /* Mozilla: Avoid this since it could use GetRealTimeClock(). - Call* call = - Call::Create(config, Clock::GetRealTimeClock(), - config.rtp_transport_controller_send_factory->Create( -@@ -107,6 +110,7 @@ Call* CallFactory::CreateCall(const Call::Config& config) { +diff --git a/BUILD.gn b/BUILD.gn +index 49b2e7a57f..e51827a372 100644 +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -220,6 +220,9 @@ config("common_inherited_config") { + if (is_linux || is_chromeos) { + defines += [ "WEBRTC_LINUX" ] } - - return call; -+ */ - } - - std::unique_ptr CreateCallFactory() { -diff --git a/call/degraded_call.cc b/call/degraded_call.cc -index 3f47fcded0..114be134ab 100644 ---- a/call/degraded_call.cc -+++ b/call/degraded_call.cc -@@ -129,6 +129,7 @@ bool DegradedCall::FakeNetworkPipeTransportAdapter::SendRtcp( - return true; - } - -+/* Mozilla: Avoid this since it could use GetRealTimeClock(). - DegradedCall::DegradedCall( - std::unique_ptr call, - const std::vector& send_configs, -@@ -165,6 +166,7 @@ DegradedCall::DegradedCall( - } ++ if (is_bsd) { ++ defines += [ "WEBRTC_BSD" ] ++ } + if (is_mac) { + defines += [ "WEBRTC_MAC" ] } +diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn +index d473dbb74c..8f89918359 100644 +--- a/modules/video_capture/BUILD.gn ++++ b/modules/video_capture/BUILD.gn +@@ -71,7 +71,7 @@ if (!build_with_chromium || is_linux || is_chromeos) { + "video_capture_options.h", + ] + +- if (is_linux || is_chromeos) { ++ if (is_linux || is_bsd || is_chromeos) { + sources += [ + "linux/device_info_linux.cc", + "linux/device_info_v4l2.cc", +diff --git a/modules/video_capture/linux/device_info_v4l2.h b/modules/video_capture/linux/device_info_v4l2.h +index e3c2395f49..119cb07ab8 100644 +--- a/modules/video_capture/linux/device_info_v4l2.h ++++ b/modules/video_capture/linux/device_info_v4l2.h +@@ -16,7 +16,9 @@ + #include "modules/video_capture/device_info_impl.h" + + #include "rtc_base/platform_thread.h" ++#ifdef WEBRTC_LINUX + #include ++#endif + + struct v4l2_capability; + +diff --git a/rtc_base/platform_thread_types.cc b/rtc_base/platform_thread_types.cc +index d64ea689bb..c3c6955a7b 100644 +--- a/rtc_base/platform_thread_types.cc ++++ b/rtc_base/platform_thread_types.cc +@@ -50,7 +50,9 @@ PlatformThreadId CurrentThreadId() { + return static_cast(pthread_self()); + #else + // Default implementation for nacl and solaris. +- return reinterpret_cast(pthread_self()); ++ // WEBRTC_BSD: pthread_t is a pointer, so cannot be casted to pid_t ++ // (aka int32_t) on 64-bit archs. Required on OpenBSD. ++ return reinterpret_cast(pthread_self()); + #endif + #endif // defined(WEBRTC_POSIX) } -+*/ +diff --git a/webrtc.gni b/webrtc.gni +index f47db9bec4..11f76de10b 100644 +--- a/webrtc.gni ++++ b/webrtc.gni +@@ -348,7 +348,7 @@ rtc_opus_dir = "//third_party/opus" - DegradedCall::~DegradedCall() { - RTC_DCHECK_RUN_ON(call_->worker_thread()); -diff --git a/modules/audio_coding/acm2/acm_receiver.cc b/modules/audio_coding/acm2/acm_receiver.cc -index a77e472ec1..a5bf88e547 100644 ---- a/modules/audio_coding/acm2/acm_receiver.cc -+++ b/modules/audio_coding/acm2/acm_receiver.cc -@@ -50,7 +50,7 @@ std::unique_ptr CreateNetEq( + # Desktop capturer is supported only on Windows, OSX and Linux. + rtc_desktop_capture_supported = +- (is_win && current_os != "winuwp") || is_mac || ++ (is_win && current_os != "winuwp") || is_mac || is_bsd || + ((is_linux || is_chromeos) && (rtc_use_x11_extensions || rtc_use_pipewire)) - AcmReceiver::Config::Config( - rtc::scoped_refptr decoder_factory) -- : clock(*Clock::GetRealTimeClock()), decoder_factory(decoder_factory) { -+ : clock(*Clock::GetRealTimeClockRaw()), decoder_factory(decoder_factory) { - // Post-decode VAD is disabled by default in NetEq, however, Audio - // Conference Mixer relies on VAD decisions and fails without them. - neteq_config.enable_post_decode_vad = true; -diff --git a/modules/rtp_rtcp/include/flexfec_receiver.h b/modules/rtp_rtcp/include/flexfec_receiver.h -index a869c8ad41..b6a33882d1 100644 ---- a/modules/rtp_rtcp/include/flexfec_receiver.h -+++ b/modules/rtp_rtcp/include/flexfec_receiver.h -@@ -30,9 +30,11 @@ class Clock; - - class FlexfecReceiver { - public: -+ /* Mozilla: Avoid this since it could use GetRealTimeClock(). - FlexfecReceiver(uint32_t ssrc, - uint32_t protected_media_ssrc, - RecoveredPacketReceiver* recovered_packet_receiver); -+ */ - FlexfecReceiver(Clock* clock, - uint32_t ssrc, - uint32_t protected_media_ssrc, -diff --git a/modules/rtp_rtcp/source/flexfec_receiver.cc b/modules/rtp_rtcp/source/flexfec_receiver.cc -index 9f30a726b1..fa89d07c62 100644 ---- a/modules/rtp_rtcp/source/flexfec_receiver.cc -+++ b/modules/rtp_rtcp/source/flexfec_receiver.cc -@@ -31,6 +31,7 @@ constexpr TimeDelta kPacketLogInterval = TimeDelta::Seconds(10); - - } // namespace - -+/* Mozilla: Avoid this since it could use GetRealTimeClock(). - FlexfecReceiver::FlexfecReceiver( - uint32_t ssrc, - uint32_t protected_media_ssrc, -@@ -39,6 +40,7 @@ FlexfecReceiver::FlexfecReceiver( - ssrc, - protected_media_ssrc, - recovered_packet_receiver) {} -+ */ - - FlexfecReceiver::FlexfecReceiver( - Clock* clock, -diff --git a/rtc_base/task_utils/repeating_task.h b/rtc_base/task_utils/repeating_task.h -index c45de95ecc..28c691c3de 100644 ---- a/rtc_base/task_utils/repeating_task.h -+++ b/rtc_base/task_utils/repeating_task.h -@@ -57,7 +57,7 @@ class RepeatingTaskHandle { - absl::AnyInvocable closure, - TaskQueueBase::DelayPrecision precision = - TaskQueueBase::DelayPrecision::kLow, -- Clock* clock = Clock::GetRealTimeClock(), -+ Clock* clock = Clock::GetRealTimeClockRaw(), - const Location& location = Location::Current()); - - // DelayedStart is equivalent to Start except that the first invocation of the -@@ -68,7 +68,7 @@ class RepeatingTaskHandle { - absl::AnyInvocable closure, - TaskQueueBase::DelayPrecision precision = - TaskQueueBase::DelayPrecision::kLow, -- Clock* clock = Clock::GetRealTimeClock(), -+ Clock* clock = Clock::GetRealTimeClockRaw(), - const Location& location = Location::Current()); - - // Stops future invocations of the repeating task closure. Can only be called -diff --git a/system_wrappers/include/clock.h b/system_wrappers/include/clock.h -index 60296070cc..214b34c970 100644 ---- a/system_wrappers/include/clock.h -+++ b/system_wrappers/include/clock.h -@@ -49,7 +49,7 @@ class RTC_EXPORT Clock { - } - - // Returns an instance of the real-time system clock implementation. -- static Clock* GetRealTimeClock(); -+ static Clock* GetRealTimeClockRaw(); - }; - - class SimulatedClock : public Clock { -diff --git a/system_wrappers/source/clock.cc b/system_wrappers/source/clock.cc -index 88c99d6a68..f7460b831c 100644 ---- a/system_wrappers/source/clock.cc -+++ b/system_wrappers/source/clock.cc -@@ -57,7 +57,7 @@ class RealTimeClock : public Clock { - } - }; - --Clock* Clock::GetRealTimeClock() { -+Clock* Clock::GetRealTimeClockRaw() { - static Clock* const clock = new RealTimeClock(); - return clock; - } + ############################################################################### -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0054.patch b/third_party/libwebrtc/moz-patch-stack/0054.patch index 8a92b510bbf2..d348eecb8dc0 100644 --- a/third_party/libwebrtc/moz-patch-stack/0054.patch +++ b/third_party/libwebrtc/moz-patch-stack/0054.patch @@ -1,91 +1,163 @@ -From: Landry Breuil -Date: Wed, 22 Dec 2021 00:09:00 +0000 -Subject: Bug 1654448 - P2 - readd partial support for BSD to webrtc - build;r=mjf +From: Michael Froman +Date: Mon, 4 Apr 2022 12:25:26 -0500 +Subject: Bug 1766646 - (fix) breakout Call::Stats and SharedModuleThread into + seperate files -only OpenBSD/amd64 is supported for now - -Depends on D134432 - -Differential Revision: https://phabricator.services.mozilla.com/D134433 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/0300b32b7de70fb8976dc82d7d3bb3adb9685857 --- - BUILD.gn | 3 +++ - modules/video_capture/BUILD.gn | 2 +- - modules/video_capture/linux/device_info_v4l2.h | 2 ++ - rtc_base/platform_thread_types.cc | 4 +++- - webrtc.gni | 2 +- - 5 files changed, 10 insertions(+), 3 deletions(-) + call/BUILD.gn | 6 ++++++ + call/call.cc | 13 ------------- + call/call.h | 13 ++----------- + call/call_basic_stats.cc | 20 ++++++++++++++++++++ + call/call_basic_stats.h | 21 +++++++++++++++++++++ + video/video_send_stream.h | 1 - + 6 files changed, 49 insertions(+), 25 deletions(-) + create mode 100644 call/call_basic_stats.cc + create mode 100644 call/call_basic_stats.h -diff --git a/BUILD.gn b/BUILD.gn -index ac93b47716..f43142e055 100644 ---- a/BUILD.gn -+++ b/BUILD.gn -@@ -212,6 +212,9 @@ config("common_inherited_config") { - if (is_linux || is_chromeos) { - defines += [ "WEBRTC_LINUX" ] - } -+ if (is_bsd) { -+ defines += [ "WEBRTC_BSD" ] +diff --git a/call/BUILD.gn b/call/BUILD.gn +index 173c220b9b..161cce992f 100644 +--- a/call/BUILD.gn ++++ b/call/BUILD.gn +@@ -33,6 +33,12 @@ rtc_library("call_interfaces") { + "syncable.cc", + "syncable.h", + ] ++ if (build_with_mozilla) { ++ sources += [ ++ "call_basic_stats.cc", ++ "call_basic_stats.h", ++ ] + } - if (is_mac) { - defines += [ "WEBRTC_MAC" ] - } -diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn -index d473dbb74c..8f89918359 100644 ---- a/modules/video_capture/BUILD.gn -+++ b/modules/video_capture/BUILD.gn -@@ -71,7 +71,7 @@ if (!build_with_chromium || is_linux || is_chromeos) { - "video_capture_options.h", - ] -- if (is_linux || is_chromeos) { -+ if (is_linux || is_bsd || is_chromeos) { - sources += [ - "linux/device_info_linux.cc", - "linux/device_info_v4l2.cc", -diff --git a/modules/video_capture/linux/device_info_v4l2.h b/modules/video_capture/linux/device_info_v4l2.h -index e3c2395f49..119cb07ab8 100644 ---- a/modules/video_capture/linux/device_info_v4l2.h -+++ b/modules/video_capture/linux/device_info_v4l2.h -@@ -16,7 +16,9 @@ - #include "modules/video_capture/device_info_impl.h" + deps = [ + ":audio_sender_interface", +diff --git a/call/call.cc b/call/call.cc +index 85297d9568..bca6072042 100644 +--- a/call/call.cc ++++ b/call/call.cc +@@ -465,19 +465,6 @@ class Call final : public webrtc::Call, + }; + } // namespace internal - #include "rtc_base/platform_thread.h" -+#ifdef WEBRTC_LINUX - #include -+#endif +-std::string Call::Stats::ToString(int64_t time_ms) const { +- char buf[1024]; +- rtc::SimpleStringBuilder ss(buf); +- ss << "Call stats: " << time_ms << ", {"; +- ss << "send_bw_bps: " << send_bandwidth_bps << ", "; +- ss << "recv_bw_bps: " << recv_bandwidth_bps << ", "; +- ss << "max_pad_bps: " << max_padding_bitrate_bps << ", "; +- ss << "pacer_delay_ms: " << pacer_delay_ms << ", "; +- ss << "rtt_ms: " << rtt_ms; +- ss << '}'; +- return ss.str(); +-} +- + /* Mozilla: Avoid this since it could use GetRealTimeClock(). + Call* Call::Create(const Call::Config& config) { + Clock* clock = Clock::GetRealTimeClock(); +diff --git a/call/call.h b/call/call.h +index 366978392e..42daa95a6c 100644 +--- a/call/call.h ++++ b/call/call.h +@@ -21,6 +21,7 @@ + #include "api/task_queue/task_queue_base.h" + #include "call/audio_receive_stream.h" + #include "call/audio_send_stream.h" ++#include "call/call_basic_stats.h" + #include "call/call_config.h" + #include "call/flexfec_receive_stream.h" + #include "call/packet_receiver.h" +@@ -30,7 +31,6 @@ + #include "rtc_base/copy_on_write_buffer.h" + #include "rtc_base/network/sent_packet.h" + #include "rtc_base/network_route.h" +-#include "rtc_base/ref_count.h" - struct v4l2_capability; + namespace webrtc { -diff --git a/rtc_base/platform_thread_types.cc b/rtc_base/platform_thread_types.cc -index d64ea689bb..c3c6955a7b 100644 ---- a/rtc_base/platform_thread_types.cc -+++ b/rtc_base/platform_thread_types.cc -@@ -50,7 +50,9 @@ PlatformThreadId CurrentThreadId() { - return static_cast(pthread_self()); - #else - // Default implementation for nacl and solaris. -- return reinterpret_cast(pthread_self()); -+ // WEBRTC_BSD: pthread_t is a pointer, so cannot be casted to pid_t -+ // (aka int32_t) on 64-bit archs. Required on OpenBSD. -+ return reinterpret_cast(pthread_self()); - #endif - #endif // defined(WEBRTC_POSIX) - } -diff --git a/webrtc.gni b/webrtc.gni -index 9e49573c98..ae3f99cb67 100644 ---- a/webrtc.gni -+++ b/webrtc.gni -@@ -340,7 +340,7 @@ rtc_opus_dir = "//third_party/opus" +@@ -47,16 +47,7 @@ namespace webrtc { + class Call { + public: + using Config = CallConfig; +- +- struct Stats { +- std::string ToString(int64_t time_ms) const; +- +- int send_bandwidth_bps = 0; // Estimated available send bandwidth. +- int max_padding_bitrate_bps = 0; // Cumulative configured max padding. +- int recv_bandwidth_bps = 0; // Estimated available receive bandwidth. +- int64_t pacer_delay_ms = 0; +- int64_t rtt_ms = -1; +- }; ++ using Stats = CallBasicStats; - # Desktop capturer is supported only on Windows, OSX and Linux. - rtc_desktop_capture_supported = -- (is_win && current_os != "winuwp") || is_mac || -+ (is_win && current_os != "winuwp") || is_mac || is_bsd || - ((is_linux || is_chromeos) && (rtc_use_x11_extensions || rtc_use_pipewire)) + static Call* Create(const Call::Config& config); + static Call* Create(const Call::Config& config, +diff --git a/call/call_basic_stats.cc b/call/call_basic_stats.cc +new file mode 100644 +index 0000000000..74333a663b +--- /dev/null ++++ b/call/call_basic_stats.cc +@@ -0,0 +1,20 @@ ++#include "call/call_basic_stats.h" ++ ++#include "rtc_base/strings/string_builder.h" ++ ++namespace webrtc { ++ ++std::string CallBasicStats::ToString(int64_t time_ms) const { ++ char buf[1024]; ++ rtc::SimpleStringBuilder ss(buf); ++ ss << "Call stats: " << time_ms << ", {"; ++ ss << "send_bw_bps: " << send_bandwidth_bps << ", "; ++ ss << "recv_bw_bps: " << recv_bandwidth_bps << ", "; ++ ss << "max_pad_bps: " << max_padding_bitrate_bps << ", "; ++ ss << "pacer_delay_ms: " << pacer_delay_ms << ", "; ++ ss << "rtt_ms: " << rtt_ms; ++ ss << '}'; ++ return ss.str(); ++} ++ ++} // namespace webrtc +diff --git a/call/call_basic_stats.h b/call/call_basic_stats.h +new file mode 100644 +index 0000000000..98febe9405 +--- /dev/null ++++ b/call/call_basic_stats.h +@@ -0,0 +1,21 @@ ++#ifndef CALL_CALL_BASIC_STATS_H_ ++#define CALL_CALL_BASIC_STATS_H_ ++ ++#include ++ ++namespace webrtc { ++ ++// named to avoid conflicts with video/call_stats.h ++struct CallBasicStats { ++ std::string ToString(int64_t time_ms) const; ++ ++ int send_bandwidth_bps = 0; // Estimated available send bandwidth. ++ int max_padding_bitrate_bps = 0; // Cumulative configured max padding. ++ int recv_bandwidth_bps = 0; // Estimated available receive bandwidth. ++ int64_t pacer_delay_ms = 0; ++ int64_t rtt_ms = -1; ++}; ++ ++} // namespace webrtc ++ ++#endif // CALL_CALL_BASIC_STATS_H_ +diff --git a/video/video_send_stream.h b/video/video_send_stream.h +index 1f4717fbec..55103ac979 100644 +--- a/video/video_send_stream.h ++++ b/video/video_send_stream.h +@@ -36,7 +36,6 @@ namespace test { + class VideoSendStreamPeer; + } // namespace test - ############################################################################### +-class CallStats; + class IvfFileWriter; + class RateLimiter; + class RtpRtcp; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0055.patch b/third_party/libwebrtc/moz-patch-stack/0055.patch index 7c2e5293a6e2..396dc2ee5414 100644 --- a/third_party/libwebrtc/moz-patch-stack/0055.patch +++ b/third_party/libwebrtc/moz-patch-stack/0055.patch @@ -1,163 +1,32 @@ From: Michael Froman -Date: Mon, 4 Apr 2022 12:25:26 -0500 -Subject: Bug 1766646 - (fix) breakout Call::Stats and SharedModuleThread into - seperate files +Date: Fri, 8 Apr 2022 11:36:36 -0500 +Subject: Bug 1766646 - (fix-b556b08668) avoid InlinedVector method that can + throw exception --- - call/BUILD.gn | 6 ++++++ - call/call.cc | 13 ------------- - call/call.h | 13 ++----------- - call/call_basic_stats.cc | 20 ++++++++++++++++++++ - call/call_basic_stats.h | 21 +++++++++++++++++++++ - video/video_send_stream.h | 1 - - 6 files changed, 49 insertions(+), 25 deletions(-) - create mode 100644 call/call_basic_stats.cc - create mode 100644 call/call_basic_stats.h + api/video_codecs/video_encoder.cc | 8 ++++++++ + 1 file changed, 8 insertions(+) -diff --git a/call/BUILD.gn b/call/BUILD.gn -index a85ad0c0d4..20e9241a83 100644 ---- a/call/BUILD.gn -+++ b/call/BUILD.gn -@@ -33,6 +33,12 @@ rtc_library("call_interfaces") { - "syncable.cc", - "syncable.h", - ] -+ if (build_with_mozilla) { -+ sources += [ -+ "call_basic_stats.cc", -+ "call_basic_stats.h", -+ ] -+ } - - deps = [ - ":audio_sender_interface", -diff --git a/call/call.cc b/call/call.cc -index 46c1dd3b9e..0421a21ee3 100644 ---- a/call/call.cc -+++ b/call/call.cc -@@ -465,19 +465,6 @@ class Call final : public webrtc::Call, - }; - } // namespace internal - --std::string Call::Stats::ToString(int64_t time_ms) const { -- char buf[1024]; -- rtc::SimpleStringBuilder ss(buf); -- ss << "Call stats: " << time_ms << ", {"; -- ss << "send_bw_bps: " << send_bandwidth_bps << ", "; -- ss << "recv_bw_bps: " << recv_bandwidth_bps << ", "; -- ss << "max_pad_bps: " << max_padding_bitrate_bps << ", "; -- ss << "pacer_delay_ms: " << pacer_delay_ms << ", "; -- ss << "rtt_ms: " << rtt_ms; -- ss << '}'; -- return ss.str(); --} -- - /* Mozilla: Avoid this since it could use GetRealTimeClock(). - Call* Call::Create(const Call::Config& config) { - Clock* clock = Clock::GetRealTimeClock(); -diff --git a/call/call.h b/call/call.h -index 366978392e..42daa95a6c 100644 ---- a/call/call.h -+++ b/call/call.h -@@ -21,6 +21,7 @@ - #include "api/task_queue/task_queue_base.h" - #include "call/audio_receive_stream.h" - #include "call/audio_send_stream.h" -+#include "call/call_basic_stats.h" - #include "call/call_config.h" - #include "call/flexfec_receive_stream.h" - #include "call/packet_receiver.h" -@@ -30,7 +31,6 @@ - #include "rtc_base/copy_on_write_buffer.h" - #include "rtc_base/network/sent_packet.h" - #include "rtc_base/network_route.h" --#include "rtc_base/ref_count.h" - - namespace webrtc { - -@@ -47,16 +47,7 @@ namespace webrtc { - class Call { - public: - using Config = CallConfig; -- -- struct Stats { -- std::string ToString(int64_t time_ms) const; -- -- int send_bandwidth_bps = 0; // Estimated available send bandwidth. -- int max_padding_bitrate_bps = 0; // Cumulative configured max padding. -- int recv_bandwidth_bps = 0; // Estimated available receive bandwidth. -- int64_t pacer_delay_ms = 0; -- int64_t rtt_ms = -1; -- }; -+ using Stats = CallBasicStats; - - static Call* Create(const Call::Config& config); - static Call* Create(const Call::Config& config, -diff --git a/call/call_basic_stats.cc b/call/call_basic_stats.cc -new file mode 100644 -index 0000000000..74333a663b ---- /dev/null -+++ b/call/call_basic_stats.cc -@@ -0,0 +1,20 @@ -+#include "call/call_basic_stats.h" -+ -+#include "rtc_base/strings/string_builder.h" -+ -+namespace webrtc { -+ -+std::string CallBasicStats::ToString(int64_t time_ms) const { -+ char buf[1024]; -+ rtc::SimpleStringBuilder ss(buf); -+ ss << "Call stats: " << time_ms << ", {"; -+ ss << "send_bw_bps: " << send_bandwidth_bps << ", "; -+ ss << "recv_bw_bps: " << recv_bandwidth_bps << ", "; -+ ss << "max_pad_bps: " << max_padding_bitrate_bps << ", "; -+ ss << "pacer_delay_ms: " << pacer_delay_ms << ", "; -+ ss << "rtt_ms: " << rtt_ms; -+ ss << '}'; -+ return ss.str(); -+} -+ -+} // namespace webrtc -diff --git a/call/call_basic_stats.h b/call/call_basic_stats.h -new file mode 100644 -index 0000000000..98febe9405 ---- /dev/null -+++ b/call/call_basic_stats.h -@@ -0,0 +1,21 @@ -+#ifndef CALL_CALL_BASIC_STATS_H_ -+#define CALL_CALL_BASIC_STATS_H_ -+ -+#include -+ -+namespace webrtc { -+ -+// named to avoid conflicts with video/call_stats.h -+struct CallBasicStats { -+ std::string ToString(int64_t time_ms) const; -+ -+ int send_bandwidth_bps = 0; // Estimated available send bandwidth. -+ int max_padding_bitrate_bps = 0; // Cumulative configured max padding. -+ int recv_bandwidth_bps = 0; // Estimated available receive bandwidth. -+ int64_t pacer_delay_ms = 0; -+ int64_t rtt_ms = -1; -+}; -+ -+} // namespace webrtc -+ -+#endif // CALL_CALL_BASIC_STATS_H_ -diff --git a/video/video_send_stream.h b/video/video_send_stream.h -index 1f4717fbec..55103ac979 100644 ---- a/video/video_send_stream.h -+++ b/video/video_send_stream.h -@@ -36,7 +36,6 @@ namespace test { - class VideoSendStreamPeer; - } // namespace test - --class CallStats; - class IvfFileWriter; - class RateLimiter; - class RtpRtcp; +diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc +index b0fe078b37..d2de985d5c 100644 +--- a/api/video_codecs/video_encoder.cc ++++ b/api/video_codecs/video_encoder.cc +@@ -180,7 +180,15 @@ std::string VideoEncoder::EncoderInfo::ToString() const { + for (size_t i = 0; i < preferred_pixel_formats.size(); ++i) { + if (i > 0) + oss << ", "; ++#if defined(WEBRTC_MOZILLA_BUILD) ++ // This could assert, as opposed to throw using the form in the ++ // else, but since we're in a for loop that uses .size() we can ++ // be fairly sure that this is safe without doing a further ++ // check to make sure 'i' is in-range. ++ oss << VideoFrameBufferTypeToString(preferred_pixel_formats[i]); ++#else + oss << VideoFrameBufferTypeToString(preferred_pixel_formats.at(i)); ++#endif + } + oss << "]"; + if (is_qp_trusted.has_value()) { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0056.patch b/third_party/libwebrtc/moz-patch-stack/0056.patch index e7c9d37660d7..09e0f1055e8b 100644 --- a/third_party/libwebrtc/moz-patch-stack/0056.patch +++ b/third_party/libwebrtc/moz-patch-stack/0056.patch @@ -1,32 +1,70 @@ From: Michael Froman -Date: Fri, 8 Apr 2022 11:36:36 -0500 -Subject: Bug 1766646 - (fix-b556b08668) avoid InlinedVector method that can - throw exception +Date: Thu, 28 Apr 2022 10:53:43 -0500 +Subject: Bug 1766646 - (fix-a0bb2ef2dc) add back VideoType enum values --- - api/video_codecs/video_encoder.cc | 8 ++++++++ - 1 file changed, 8 insertions(+) + common_video/libyuv/include/webrtc_libyuv.h | 3 +++ + common_video/libyuv/webrtc_libyuv.cc | 9 +++++++++ + 2 files changed, 12 insertions(+) -diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc -index b0fe078b37..d2de985d5c 100644 ---- a/api/video_codecs/video_encoder.cc -+++ b/api/video_codecs/video_encoder.cc -@@ -180,7 +180,15 @@ std::string VideoEncoder::EncoderInfo::ToString() const { - for (size_t i = 0; i < preferred_pixel_formats.size(); ++i) { - if (i > 0) - oss << ", "; -+#if defined(WEBRTC_MOZILLA_BUILD) -+ // This could assert, as opposed to throw using the form in the -+ // else, but since we're in a for loop that uses .size() we can -+ // be fairly sure that this is safe without doing a further -+ // check to make sure 'i' is in-range. -+ oss << VideoFrameBufferTypeToString(preferred_pixel_formats[i]); -+#else - oss << VideoFrameBufferTypeToString(preferred_pixel_formats.at(i)); -+#endif +diff --git a/common_video/libyuv/include/webrtc_libyuv.h b/common_video/libyuv/include/webrtc_libyuv.h +index 68831c70ab..253a33294d 100644 +--- a/common_video/libyuv/include/webrtc_libyuv.h ++++ b/common_video/libyuv/include/webrtc_libyuv.h +@@ -35,11 +35,14 @@ enum class VideoType { + kBGR24, + kARGB, + kABGR, ++ kARGB4444, + kRGB565, ++ kARGB1555, + kYUY2, + kYV12, + kUYVY, + kMJPEG, ++ kNV21, + kBGRA, + kNV12, + }; +diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc +index 31ba1feca3..05a4b184c2 100644 +--- a/common_video/libyuv/webrtc_libyuv.cc ++++ b/common_video/libyuv/webrtc_libyuv.cc +@@ -24,6 +24,7 @@ size_t CalcBufferSize(VideoType type, int width, int height) { + RTC_DCHECK_GE(height, 0); + switch (type) { + case VideoType::kI420: ++ case VideoType::kNV21: + case VideoType::kIYUV: + case VideoType::kYV12: + case VideoType::kNV12: { +@@ -31,7 +32,9 @@ size_t CalcBufferSize(VideoType type, int width, int height) { + int half_height = (height + 1) >> 1; + return width * height + half_width * half_height * 2; + } ++ case VideoType::kARGB4444: + case VideoType::kRGB565: ++ case VideoType::kARGB1555: + case VideoType::kYUY2: + case VideoType::kUYVY: + return width * height * 2; +@@ -104,10 +107,16 @@ int ConvertVideoType(VideoType video_type) { + return libyuv::FOURCC_UYVY; + case VideoType::kMJPEG: + return libyuv::FOURCC_MJPG; ++ case VideoType::kNV21: ++ return libyuv::FOURCC_NV21; + case VideoType::kARGB: + return libyuv::FOURCC_ARGB; + case VideoType::kBGRA: + return libyuv::FOURCC_BGRA; ++ case VideoType::kARGB4444: ++ return libyuv::FOURCC_R444; ++ case VideoType::kARGB1555: ++ return libyuv::FOURCC_RGBO; + case VideoType::kNV12: + return libyuv::FOURCC_NV12; } - oss << "]"; - if (is_qp_trusted.has_value()) { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0057.patch b/third_party/libwebrtc/moz-patch-stack/0057.patch index a0f71c89d817..da277c8046b5 100644 --- a/third_party/libwebrtc/moz-patch-stack/0057.patch +++ b/third_party/libwebrtc/moz-patch-stack/0057.patch @@ -1,70 +1,76 @@ From: Michael Froman -Date: Thu, 28 Apr 2022 10:53:43 -0500 -Subject: Bug 1766646 - (fix-a0bb2ef2dc) add back VideoType enum values +Date: Thu, 19 May 2022 15:32:32 -0500 +Subject: Bug 1772380 - Build 1766646 - (fix-c89fdd716c) fixes for the + refactored PlatformThread API;r?mjf --- - common_video/libyuv/include/webrtc_libyuv.h | 3 +++ - common_video/libyuv/webrtc_libyuv.cc | 9 +++++++++ - 2 files changed, 12 insertions(+) + .../video_capture/linux/device_info_v4l2.cc | 20 ++++++------------- + .../video_capture/linux/device_info_v4l2.h | 3 +-- + 2 files changed, 7 insertions(+), 16 deletions(-) -diff --git a/common_video/libyuv/include/webrtc_libyuv.h b/common_video/libyuv/include/webrtc_libyuv.h -index 68831c70ab..253a33294d 100644 ---- a/common_video/libyuv/include/webrtc_libyuv.h -+++ b/common_video/libyuv/include/webrtc_libyuv.h -@@ -35,11 +35,14 @@ enum class VideoType { - kBGR24, - kARGB, - kABGR, -+ kARGB4444, - kRGB565, -+ kARGB1555, - kYUY2, - kYV12, - kUYVY, - kMJPEG, -+ kNV21, - kBGRA, - kNV12, - }; -diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc -index 31ba1feca3..05a4b184c2 100644 ---- a/common_video/libyuv/webrtc_libyuv.cc -+++ b/common_video/libyuv/webrtc_libyuv.cc -@@ -24,6 +24,7 @@ size_t CalcBufferSize(VideoType type, int width, int height) { - RTC_DCHECK_GE(height, 0); - switch (type) { - case VideoType::kI420: -+ case VideoType::kNV21: - case VideoType::kIYUV: - case VideoType::kYV12: - case VideoType::kNV12: { -@@ -31,7 +32,9 @@ size_t CalcBufferSize(VideoType type, int width, int height) { - int half_height = (height + 1) >> 1; - return width * height + half_width * half_height * 2; +diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc +index c5d33ff9a4..04caaea592 100644 +--- a/modules/video_capture/linux/device_info_v4l2.cc ++++ b/modules/video_capture/linux/device_info_v4l2.cc +@@ -151,11 +151,6 @@ int DeviceInfoV4l2::ProcessInotifyEvents() + return 0; + } + +-void DeviceInfoV4l2::InotifyEventThread(void* obj) +-{ +- static_cast (obj)->InotifyProcess(); +-} +- + void DeviceInfoV4l2::InotifyProcess() + { + _fd_v4l = inotify_init(); +@@ -181,16 +176,14 @@ void DeviceInfoV4l2::InotifyProcess() + + DeviceInfoV4l2::DeviceInfoV4l2() : DeviceInfoImpl() + #ifdef WEBRTC_LINUX +- , _inotifyEventThread(new rtc::PlatformThread( +- InotifyEventThread, this, "InotifyEventThread")) + , _isShutdown(false) + #endif + { + #ifdef WEBRTC_LINUX +- if (_inotifyEventThread) +- { +- _inotifyEventThread->Start(); +- } ++ _inotifyEventThread = rtc::PlatformThread::SpawnJoinable( ++ [this] { ++ InotifyProcess(); ++ }, "InotifyEventThread"); + #endif + } + +@@ -202,9 +195,8 @@ DeviceInfoV4l2::~DeviceInfoV4l2() { + #ifdef WEBRTC_LINUX + _isShutdown = true; + +- if (_inotifyEventThread) { +- _inotifyEventThread->Stop(); +- _inotifyEventThread = nullptr; ++ if (!_inotifyEventThread.empty()) { ++ _inotifyEventThread.Finalize(); } -+ case VideoType::kARGB4444: - case VideoType::kRGB565: -+ case VideoType::kARGB1555: - case VideoType::kYUY2: - case VideoType::kUYVY: - return width * height * 2; -@@ -104,10 +107,16 @@ int ConvertVideoType(VideoType video_type) { - return libyuv::FOURCC_UYVY; - case VideoType::kMJPEG: - return libyuv::FOURCC_MJPG; -+ case VideoType::kNV21: -+ return libyuv::FOURCC_NV21; - case VideoType::kARGB: - return libyuv::FOURCC_ARGB; - case VideoType::kBGRA: - return libyuv::FOURCC_BGRA; -+ case VideoType::kARGB4444: -+ return libyuv::FOURCC_R444; -+ case VideoType::kARGB1555: -+ return libyuv::FOURCC_RGBO; - case VideoType::kNV12: - return libyuv::FOURCC_NV12; - } + #endif + } +diff --git a/modules/video_capture/linux/device_info_v4l2.h b/modules/video_capture/linux/device_info_v4l2.h +index 119cb07ab8..0bec3eb765 100644 +--- a/modules/video_capture/linux/device_info_v4l2.h ++++ b/modules/video_capture/linux/device_info_v4l2.h +@@ -60,8 +60,7 @@ class DeviceInfoV4l2 : public DeviceInfoImpl { + int EventCheck(int fd); + int HandleEvents(int fd); + int ProcessInotifyEvents(); +- std::unique_ptr _inotifyEventThread; +- static void InotifyEventThread(void*); ++ rtc::PlatformThread _inotifyEventThread; + void InotifyProcess(); + int _fd_v4l, _fd_dev, _wd_v4l, _wd_dev; /* accessed on InotifyEventThread thread */ + std::atomic _isShutdown; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0058.patch b/third_party/libwebrtc/moz-patch-stack/0058.patch index 940696a6e155..5fc44d5eb4bf 100644 --- a/third_party/libwebrtc/moz-patch-stack/0058.patch +++ b/third_party/libwebrtc/moz-patch-stack/0058.patch @@ -1,76 +1,174 @@ -From: Michael Froman -Date: Thu, 19 May 2022 15:32:32 -0500 -Subject: Bug 1772380 - Build 1766646 - (fix-c89fdd716c) fixes for the - refactored PlatformThread API;r?mjf +From: Andreas Pehrson +Date: Mon, 18 Jan 2021 11:07:00 +0100 +Subject: Bug 1766646 - (fix-ae0d117d51) ifdef our Csrc impl vs upstream's + impl, see Bug 1771332. --- - .../video_capture/linux/device_info_v4l2.cc | 20 ++++++------------- - .../video_capture/linux/device_info_v4l2.h | 3 +-- - 2 files changed, 7 insertions(+), 16 deletions(-) + modules/rtp_rtcp/source/rtp_header_extensions.cc | 4 ++++ + modules/rtp_rtcp/source/rtp_header_extensions.h | 4 ++++ + modules/rtp_rtcp/source/rtp_packet.cc | 4 ++++ + modules/rtp_rtcp/source/rtp_sender.cc | 4 ++++ + test/fuzzers/rtp_packet_fuzzer.cc | 4 ++++ + 5 files changed, 20 insertions(+) -diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc -index c5d33ff9a4..04caaea592 100644 ---- a/modules/video_capture/linux/device_info_v4l2.cc -+++ b/modules/video_capture/linux/device_info_v4l2.cc -@@ -151,11 +151,6 @@ int DeviceInfoV4l2::ProcessInotifyEvents() - return 0; +diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc +index 88815b5601..5f32cb7b39 100644 +--- a/modules/rtp_rtcp/source/rtp_header_extensions.cc ++++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc +@@ -185,6 +185,7 @@ bool AudioLevel::Write(rtc::ArrayView data, + return true; } --void DeviceInfoV4l2::InotifyEventThread(void* obj) --{ -- static_cast (obj)->InotifyProcess(); --} -- - void DeviceInfoV4l2::InotifyProcess() - { - _fd_v4l = inotify_init(); -@@ -181,16 +176,14 @@ void DeviceInfoV4l2::InotifyProcess() ++#if !defined(WEBRTC_MOZILLA_BUILD) + // An RTP Header Extension for Mixer-to-Client Audio Level Indication + // + // https://tools.ietf.org/html/rfc6465 +@@ -237,6 +238,7 @@ bool CsrcAudioLevel::Write(rtc::ArrayView data, + } + return true; + } ++#endif - DeviceInfoV4l2::DeviceInfoV4l2() : DeviceInfoImpl() - #ifdef WEBRTC_LINUX -- , _inotifyEventThread(new rtc::PlatformThread( -- InotifyEventThread, this, "InotifyEventThread")) - , _isShutdown(false) - #endif - { - #ifdef WEBRTC_LINUX -- if (_inotifyEventThread) -- { -- _inotifyEventThread->Start(); -- } -+ _inotifyEventThread = rtc::PlatformThread::SpawnJoinable( -+ [this] { -+ InotifyProcess(); -+ }, "InotifyEventThread"); - #endif + // From RFC 5450: Transmission Time Offsets in RTP Streams. + // +@@ -446,6 +448,7 @@ bool PlayoutDelayLimits::Write(rtc::ArrayView data, + return true; } -@@ -202,9 +195,8 @@ DeviceInfoV4l2::~DeviceInfoV4l2() { - #ifdef WEBRTC_LINUX - _isShutdown = true; ++#if defined(WEBRTC_MOZILLA_BUILD) + // CSRCAudioLevel + // Sample Audio Level Encoding Using the One-Byte Header Format + // Note that the range of len is 1 to 15 which is encoded as 0 to 14 +@@ -484,6 +487,7 @@ bool CsrcAudioLevel::Write(rtc::ArrayView data, + // This extension if used must have at least one audio level + return csrcAudioLevels.numAudioLevels; + } ++#endif -- if (_inotifyEventThread) { -- _inotifyEventThread->Stop(); -- _inotifyEventThread = nullptr; -+ if (!_inotifyEventThread.empty()) { -+ _inotifyEventThread.Finalize(); + // Video Content Type. + // +diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.h b/modules/rtp_rtcp/source/rtp_header_extensions.h +index e085f89c03..2b2f75c840 100644 +--- a/modules/rtp_rtcp/source/rtp_header_extensions.h ++++ b/modules/rtp_rtcp/source/rtp_header_extensions.h +@@ -94,6 +94,7 @@ class AudioLevel { + uint8_t audio_level); + }; + ++#if !defined(WEBRTC_MOZILLA_BUILD) + class CsrcAudioLevel { + public: + static constexpr RTPExtensionType kId = kRtpExtensionCsrcAudioLevel; +@@ -108,6 +109,7 @@ class CsrcAudioLevel { + static bool Write(rtc::ArrayView data, + rtc::ArrayView csrc_audio_levels); + }; ++#endif + + class TransmissionOffset { + public: +@@ -298,6 +300,7 @@ class ColorSpaceExtension { + static size_t WriteLuminance(uint8_t* data, float f, int denominator); + }; + ++#if defined(WEBRTC_MOZILLA_BUILD) + class CsrcAudioLevel { + public: + static constexpr RTPExtensionType kId = kRtpExtensionCsrcAudioLevel; +@@ -312,6 +315,7 @@ class CsrcAudioLevel { + static size_t ValueSize(const CsrcAudioLevelList& csrcAudioLevels); + static bool Write(rtc::ArrayView data, const CsrcAudioLevelList& csrcAudioLevels); + }; ++#endif + + // Base extension class for RTP header extensions which are strings. + // Subclasses must defined kId and kUri static constexpr members. +diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc +index b152cdbd9e..7181b303e1 100644 +--- a/modules/rtp_rtcp/source/rtp_packet.cc ++++ b/modules/rtp_rtcp/source/rtp_packet.cc +@@ -187,7 +187,9 @@ void RtpPacket::ZeroMutableExtensions() { + break; + } + case RTPExtensionType::kRtpExtensionAudioLevel: ++#if !defined(WEBRTC_MOZILLA_BUILD) + case RTPExtensionType::kRtpExtensionCsrcAudioLevel: ++#endif + case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime: + case RTPExtensionType::kRtpExtensionColorSpace: + case RTPExtensionType::kRtpExtensionGenericFrameDescriptor: +@@ -205,10 +207,12 @@ void RtpPacket::ZeroMutableExtensions() { + // Non-mutable extension. Don't change it. + break; + } ++#if defined(WEBRTC_MOZILLA_BUILD) + case RTPExtensionType::kRtpExtensionCsrcAudioLevel: { + // TODO: This is a Mozilla addition, we need to add a handler for this. + RTC_CHECK(false); + } ++#endif } - #endif + } } -diff --git a/modules/video_capture/linux/device_info_v4l2.h b/modules/video_capture/linux/device_info_v4l2.h -index 119cb07ab8..0bec3eb765 100644 ---- a/modules/video_capture/linux/device_info_v4l2.h -+++ b/modules/video_capture/linux/device_info_v4l2.h -@@ -60,8 +60,7 @@ class DeviceInfoV4l2 : public DeviceInfoImpl { - int EventCheck(int fd); - int HandleEvents(int fd); - int ProcessInotifyEvents(); -- std::unique_ptr _inotifyEventThread; -- static void InotifyEventThread(void*); -+ rtc::PlatformThread _inotifyEventThread; - void InotifyProcess(); - int _fd_v4l, _fd_dev, _wd_v4l, _wd_dev; /* accessed on InotifyEventThread thread */ - std::atomic _isShutdown; +diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc +index f3d90e3c0b..bdb79fe8cc 100644 +--- a/modules/rtp_rtcp/source/rtp_sender.cc ++++ b/modules/rtp_rtcp/source/rtp_sender.cc +@@ -104,7 +104,9 @@ bool IsNonVolatile(RTPExtensionType type) { + switch (type) { + case kRtpExtensionTransmissionTimeOffset: + case kRtpExtensionAudioLevel: ++#if !defined(WEBRTC_MOZILLA_BUILD) + case kRtpExtensionCsrcAudioLevel: ++#endif + case kRtpExtensionAbsoluteSendTime: + case kRtpExtensionTransportSequenceNumber: + case kRtpExtensionTransportSequenceNumber02: +@@ -128,10 +130,12 @@ bool IsNonVolatile(RTPExtensionType type) { + case kRtpExtensionNumberOfExtensions: + RTC_DCHECK_NOTREACHED(); + return false; ++#if defined(WEBRTC_MOZILLA_BUILD) + case kRtpExtensionCsrcAudioLevel: + // TODO: Mozilla implement for CsrcAudioLevel + RTC_CHECK(false); + return false; ++#endif + } + RTC_CHECK_NOTREACHED(); + } +diff --git a/test/fuzzers/rtp_packet_fuzzer.cc b/test/fuzzers/rtp_packet_fuzzer.cc +index 0e10a8fa3a..5d117529bb 100644 +--- a/test/fuzzers/rtp_packet_fuzzer.cc ++++ b/test/fuzzers/rtp_packet_fuzzer.cc +@@ -77,11 +77,13 @@ void FuzzOneInput(const uint8_t* data, size_t size) { + uint8_t audio_level; + packet.GetExtension(&voice_activity, &audio_level); + break; ++#if !defined(WEBRTC_MOZILLA_BUILD) + case kRtpExtensionCsrcAudioLevel: { + std::vector audio_levels; + packet.GetExtension(&audio_levels); + break; + } ++#endif + case kRtpExtensionAbsoluteSendTime: + uint32_t sendtime; + packet.GetExtension(&sendtime); +@@ -164,11 +166,13 @@ void FuzzOneInput(const uint8_t* data, size_t size) { + // This extension requires state to read and so complicated that + // deserves own fuzzer. + break; ++#if defined(WEBRTC_MOZILLA_BUILD) + case kRtpExtensionCsrcAudioLevel: { + CsrcAudioLevelList levels; + packet.GetExtension(&levels); + break; + } ++#endif + } + } + -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0059.patch b/third_party/libwebrtc/moz-patch-stack/0059.patch index 40fec323ea02..28551bb1a2ea 100644 --- a/third_party/libwebrtc/moz-patch-stack/0059.patch +++ b/third_party/libwebrtc/moz-patch-stack/0059.patch @@ -1,174 +1,25 @@ -From: Andreas Pehrson -Date: Mon, 18 Jan 2021 11:07:00 +0100 -Subject: Bug 1766646 - (fix-ae0d117d51) ifdef our Csrc impl vs upstream's - impl, see Bug 1771332. +From: Michael Froman +Date: Wed, 1 Jun 2022 12:47:00 -0500 +Subject: Bug 1766646 - (fix-f137b75a4d) specify default constructor on + config.emplace(...) --- - modules/rtp_rtcp/source/rtp_header_extensions.cc | 4 ++++ - modules/rtp_rtcp/source/rtp_header_extensions.h | 4 ++++ - modules/rtp_rtcp/source/rtp_packet.cc | 4 ++++ - modules/rtp_rtcp/source/rtp_sender.cc | 4 ++++ - test/fuzzers/rtp_packet_fuzzer.cc | 4 ++++ - 5 files changed, 20 insertions(+) + modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) -diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc -index a57d9e7f62..de29fd2075 100644 ---- a/modules/rtp_rtcp/source/rtp_header_extensions.cc -+++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc -@@ -185,6 +185,7 @@ bool AudioLevel::Write(rtc::ArrayView data, - return true; - } - -+#if !defined(WEBRTC_MOZILLA_BUILD) - // An RTP Header Extension for Mixer-to-Client Audio Level Indication - // - // https://tools.ietf.org/html/rfc6465 -@@ -237,6 +238,7 @@ bool CsrcAudioLevel::Write(rtc::ArrayView data, +diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc +index 4c0f5fc5ee..1afac9fb42 100644 +--- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc ++++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc +@@ -468,7 +468,7 @@ absl::optional LossBasedBweV2::CreateConfig( + if (!enabled.Get()) { + return config; } - return true; - } -+#endif - - // From RFC 5450: Transmission Time Offsets in RTP Streams. - // -@@ -446,6 +448,7 @@ bool PlayoutDelayLimits::Write(rtc::ArrayView data, - return true; - } - -+#if defined(WEBRTC_MOZILLA_BUILD) - // CSRCAudioLevel - // Sample Audio Level Encoding Using the One-Byte Header Format - // Note that the range of len is 1 to 15 which is encoded as 0 to 14 -@@ -484,6 +487,7 @@ bool CsrcAudioLevel::Write(rtc::ArrayView data, - // This extension if used must have at least one audio level - return csrcAudioLevels.numAudioLevels; - } -+#endif - - // Video Content Type. - // -diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.h b/modules/rtp_rtcp/source/rtp_header_extensions.h -index e085f89c03..2b2f75c840 100644 ---- a/modules/rtp_rtcp/source/rtp_header_extensions.h -+++ b/modules/rtp_rtcp/source/rtp_header_extensions.h -@@ -94,6 +94,7 @@ class AudioLevel { - uint8_t audio_level); - }; - -+#if !defined(WEBRTC_MOZILLA_BUILD) - class CsrcAudioLevel { - public: - static constexpr RTPExtensionType kId = kRtpExtensionCsrcAudioLevel; -@@ -108,6 +109,7 @@ class CsrcAudioLevel { - static bool Write(rtc::ArrayView data, - rtc::ArrayView csrc_audio_levels); - }; -+#endif - - class TransmissionOffset { - public: -@@ -298,6 +300,7 @@ class ColorSpaceExtension { - static size_t WriteLuminance(uint8_t* data, float f, int denominator); - }; - -+#if defined(WEBRTC_MOZILLA_BUILD) - class CsrcAudioLevel { - public: - static constexpr RTPExtensionType kId = kRtpExtensionCsrcAudioLevel; -@@ -312,6 +315,7 @@ class CsrcAudioLevel { - static size_t ValueSize(const CsrcAudioLevelList& csrcAudioLevels); - static bool Write(rtc::ArrayView data, const CsrcAudioLevelList& csrcAudioLevels); - }; -+#endif - - // Base extension class for RTP header extensions which are strings. - // Subclasses must defined kId and kUri static constexpr members. -diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc -index 9495841984..fd2f5c5ae8 100644 ---- a/modules/rtp_rtcp/source/rtp_packet.cc -+++ b/modules/rtp_rtcp/source/rtp_packet.cc -@@ -187,7 +187,9 @@ void RtpPacket::ZeroMutableExtensions() { - break; - } - case RTPExtensionType::kRtpExtensionAudioLevel: -+#if !defined(WEBRTC_MOZILLA_BUILD) - case RTPExtensionType::kRtpExtensionCsrcAudioLevel: -+#endif - case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime: - case RTPExtensionType::kRtpExtensionColorSpace: - case RTPExtensionType::kRtpExtensionGenericFrameDescriptor: -@@ -205,10 +207,12 @@ void RtpPacket::ZeroMutableExtensions() { - // Non-mutable extension. Don't change it. - break; - } -+#if defined(WEBRTC_MOZILLA_BUILD) - case RTPExtensionType::kRtpExtensionCsrcAudioLevel: { - // TODO: This is a Mozilla addition, we need to add a handler for this. - RTC_CHECK(false); - } -+#endif - } - } - } -diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc -index a60d742dd4..13cb30ee7f 100644 ---- a/modules/rtp_rtcp/source/rtp_sender.cc -+++ b/modules/rtp_rtcp/source/rtp_sender.cc -@@ -104,7 +104,9 @@ bool IsNonVolatile(RTPExtensionType type) { - switch (type) { - case kRtpExtensionTransmissionTimeOffset: - case kRtpExtensionAudioLevel: -+#if !defined(WEBRTC_MOZILLA_BUILD) - case kRtpExtensionCsrcAudioLevel: -+#endif - case kRtpExtensionAbsoluteSendTime: - case kRtpExtensionTransportSequenceNumber: - case kRtpExtensionTransportSequenceNumber02: -@@ -128,10 +130,12 @@ bool IsNonVolatile(RTPExtensionType type) { - case kRtpExtensionNumberOfExtensions: - RTC_DCHECK_NOTREACHED(); - return false; -+#if defined(WEBRTC_MOZILLA_BUILD) - case kRtpExtensionCsrcAudioLevel: - // TODO: Mozilla implement for CsrcAudioLevel - RTC_CHECK(false); - return false; -+#endif - } - RTC_CHECK_NOTREACHED(); - } -diff --git a/test/fuzzers/rtp_packet_fuzzer.cc b/test/fuzzers/rtp_packet_fuzzer.cc -index 0e10a8fa3a..5d117529bb 100644 ---- a/test/fuzzers/rtp_packet_fuzzer.cc -+++ b/test/fuzzers/rtp_packet_fuzzer.cc -@@ -77,11 +77,13 @@ void FuzzOneInput(const uint8_t* data, size_t size) { - uint8_t audio_level; - packet.GetExtension(&voice_activity, &audio_level); - break; -+#if !defined(WEBRTC_MOZILLA_BUILD) - case kRtpExtensionCsrcAudioLevel: { - std::vector audio_levels; - packet.GetExtension(&audio_levels); - break; - } -+#endif - case kRtpExtensionAbsoluteSendTime: - uint32_t sendtime; - packet.GetExtension(&sendtime); -@@ -164,11 +166,13 @@ void FuzzOneInput(const uint8_t* data, size_t size) { - // This extension requires state to read and so complicated that - // deserves own fuzzer. - break; -+#if defined(WEBRTC_MOZILLA_BUILD) - case kRtpExtensionCsrcAudioLevel: { - CsrcAudioLevelList levels; - packet.GetExtension(&levels); - break; - } -+#endif - } - } - +- config.emplace(); ++ config.emplace(Config()); + config->bandwidth_rampup_upper_bound_factor = + bandwidth_rampup_upper_bound_factor.Get(); + config->rampup_acceleration_max_factor = rampup_acceleration_max_factor.Get(); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0060.patch b/third_party/libwebrtc/moz-patch-stack/0060.patch index 5a713e2f2e30..bb3da2b8a4fc 100644 --- a/third_party/libwebrtc/moz-patch-stack/0060.patch +++ b/third_party/libwebrtc/moz-patch-stack/0060.patch @@ -1,25 +1,27 @@ From: Michael Froman -Date: Wed, 1 Jun 2022 12:47:00 -0500 -Subject: Bug 1766646 - (fix-f137b75a4d) specify default constructor on - config.emplace(...) +Date: Tue, 21 Jun 2022 11:11:09 -0500 +Subject: Bug 1773223 - Generate webrtc moz.builds for all platforms at once. + r=mjf,firefox-build-system-reviewers,ahochheiden --- - modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + build_overrides/build.gni | 4 ++++ + 1 file changed, 4 insertions(+) -diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc -index 4c0f5fc5ee..1afac9fb42 100644 ---- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc -+++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc -@@ -468,7 +468,7 @@ absl::optional LossBasedBweV2::CreateConfig( - if (!enabled.Get()) { - return config; - } -- config.emplace(); -+ config.emplace(Config()); - config->bandwidth_rampup_upper_bound_factor = - bandwidth_rampup_upper_bound_factor.Get(); - config->rampup_acceleration_max_factor = rampup_acceleration_max_factor.Get(); +diff --git a/build_overrides/build.gni b/build_overrides/build.gni +index 900367d0e1..e427ebba8e 100644 +--- a/build_overrides/build.gni ++++ b/build_overrides/build.gni +@@ -45,6 +45,10 @@ if (host_os == "mac" || host_os == "linux") { + use_system_xcode = _result == 0 + } + ++use_system_xcode = false ++xcode_version = "10.15" ++mac_xcode_version = "default" ++ + declare_args() { + # WebRTC doesn't depend on //base from production code but only for testing + # purposes. In any case, it doesn't depend on //third_party/perfetto which -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0061.patch b/third_party/libwebrtc/moz-patch-stack/0061.patch index dff6eb08f69f..8679a8557a07 100644 --- a/third_party/libwebrtc/moz-patch-stack/0061.patch +++ b/third_party/libwebrtc/moz-patch-stack/0061.patch @@ -1,27 +1,26 @@ From: Michael Froman -Date: Tue, 21 Jun 2022 11:11:09 -0500 -Subject: Bug 1773223 - Generate webrtc moz.builds for all platforms at once. - r=mjf,firefox-build-system-reviewers,ahochheiden +Date: Tue, 21 Jun 2022 11:17:46 -0500 +Subject: Bug 1772380 - to upstream - ref count this in lambda capture --- - build_overrides/build.gni | 4 ++++ - 1 file changed, 4 insertions(+) + modules/video_capture/linux/video_capture_v4l2.cc | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) -diff --git a/build_overrides/build.gni b/build_overrides/build.gni -index 900367d0e1..e427ebba8e 100644 ---- a/build_overrides/build.gni -+++ b/build_overrides/build.gni -@@ -45,6 +45,10 @@ if (host_os == "mac" || host_os == "linux") { - use_system_xcode = _result == 0 - } - -+use_system_xcode = false -+xcode_version = "10.15" -+mac_xcode_version = "default" -+ - declare_args() { - # WebRTC doesn't depend on //base from production code but only for testing - # purposes. In any case, it doesn't depend on //third_party/perfetto which +diff --git a/modules/video_capture/linux/video_capture_v4l2.cc b/modules/video_capture/linux/video_capture_v4l2.cc +index 00cede01cb..d6813b13fd 100644 +--- a/modules/video_capture/linux/video_capture_v4l2.cc ++++ b/modules/video_capture/linux/video_capture_v4l2.cc +@@ -296,8 +296,8 @@ int32_t VideoCaptureModuleV4L2::StartCapture( + if (_captureThread.empty()) { + quit_ = false; + _captureThread = rtc::PlatformThread::SpawnJoinable( +- [this] { +- while (CaptureProcess()) { ++ [self = rtc::scoped_refptr(this)] { ++ while (self->CaptureProcess()) { + } + }, + "CaptureThread", -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0062.patch b/third_party/libwebrtc/moz-patch-stack/0062.patch index 7cc4982d1669..05d346684777 100644 --- a/third_party/libwebrtc/moz-patch-stack/0062.patch +++ b/third_party/libwebrtc/moz-patch-stack/0062.patch @@ -1,26 +1,51 @@ From: Michael Froman -Date: Tue, 21 Jun 2022 11:17:46 -0500 -Subject: Bug 1772380 - to upstream - ref count this in lambda capture +Date: Wed, 3 Aug 2022 20:21:25 -0500 +Subject: Bug 1780582 - work around generating VideoFrameBufferType;r=mjf --- - modules/video_capture/linux/video_capture_v4l2.cc | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) + .../api/org/webrtc/VideoFrameBufferType.java | 33 +++++++++++++++++++ + 1 file changed, 33 insertions(+) + create mode 100644 sdk/android/api/org/webrtc/VideoFrameBufferType.java -diff --git a/modules/video_capture/linux/video_capture_v4l2.cc b/modules/video_capture/linux/video_capture_v4l2.cc -index 00cede01cb..d6813b13fd 100644 ---- a/modules/video_capture/linux/video_capture_v4l2.cc -+++ b/modules/video_capture/linux/video_capture_v4l2.cc -@@ -296,8 +296,8 @@ int32_t VideoCaptureModuleV4L2::StartCapture( - if (_captureThread.empty()) { - quit_ = false; - _captureThread = rtc::PlatformThread::SpawnJoinable( -- [this] { -- while (CaptureProcess()) { -+ [self = rtc::scoped_refptr(this)] { -+ while (self->CaptureProcess()) { - } - }, - "CaptureThread", +diff --git a/sdk/android/api/org/webrtc/VideoFrameBufferType.java b/sdk/android/api/org/webrtc/VideoFrameBufferType.java +new file mode 100644 +index 0000000000..7b05b88cba +--- /dev/null ++++ b/sdk/android/api/org/webrtc/VideoFrameBufferType.java +@@ -0,0 +1,33 @@ ++ ++// Copyright 2022 The Chromium Authors. All rights reserved. ++// Use of this source code is governed by a BSD-style license that can be ++// found in the LICENSE file. ++ ++// This file is autogenerated by ++// java_cpp_enum.py ++// From ++// ../../api/video/video_frame_buffer.h ++ ++package org.webrtc; ++ ++import androidx.annotation.IntDef; ++ ++import java.lang.annotation.Retention; ++import java.lang.annotation.RetentionPolicy; ++ ++@IntDef({ ++ VideoFrameBufferType.NATIVE, VideoFrameBufferType.I420, VideoFrameBufferType.I420A, ++ VideoFrameBufferType.I422, VideoFrameBufferType.I444, VideoFrameBufferType.I010, ++ VideoFrameBufferType.I210, VideoFrameBufferType.NV12 ++}) ++@Retention(RetentionPolicy.SOURCE) ++public @interface VideoFrameBufferType { ++ int NATIVE = 0; ++ int I420 = 1; ++ int I420A = 2; ++ int I422 = 3; ++ int I444 = 4; ++ int I010 = 5; ++ int I210 = 6; ++ int NV12 = 7; ++} -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0063.patch b/third_party/libwebrtc/moz-patch-stack/0063.patch index db47a28e49b7..6a28945ad26a 100644 --- a/third_party/libwebrtc/moz-patch-stack/0063.patch +++ b/third_party/libwebrtc/moz-patch-stack/0063.patch @@ -1,51 +1,56 @@ -From: Michael Froman -Date: Wed, 3 Aug 2022 20:21:25 -0500 -Subject: Bug 1780582 - work around generating VideoFrameBufferType;r=mjf +From: Andreas Pehrson +Date: Mon, 5 Sep 2022 13:56:00 +0000 +Subject: Bug 1786502 - Lock access to DeviceInfo devicechange callbacks. + r=webrtc-reviewers,jib +Differential Revision: https://phabricator.services.mozilla.com/D155365 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/e826dfadfe1264c59d9b13e3c17d6f75a40f5c33 --- - .../api/org/webrtc/VideoFrameBufferType.java | 33 +++++++++++++++++++ - 1 file changed, 33 insertions(+) - create mode 100644 sdk/android/api/org/webrtc/VideoFrameBufferType.java + modules/video_capture/video_capture.h | 8 +++++++- + 1 file changed, 7 insertions(+), 1 deletion(-) -diff --git a/sdk/android/api/org/webrtc/VideoFrameBufferType.java b/sdk/android/api/org/webrtc/VideoFrameBufferType.java -new file mode 100644 -index 0000000000..7b05b88cba ---- /dev/null -+++ b/sdk/android/api/org/webrtc/VideoFrameBufferType.java -@@ -0,0 +1,33 @@ -+ -+// Copyright 2022 The Chromium Authors. All rights reserved. -+// Use of this source code is governed by a BSD-style license that can be -+// found in the LICENSE file. -+ -+// This file is autogenerated by -+// java_cpp_enum.py -+// From -+// ../../api/video/video_frame_buffer.h -+ -+package org.webrtc; -+ -+import androidx.annotation.IntDef; -+ -+import java.lang.annotation.Retention; -+import java.lang.annotation.RetentionPolicy; -+ -+@IntDef({ -+ VideoFrameBufferType.NATIVE, VideoFrameBufferType.I420, VideoFrameBufferType.I420A, -+ VideoFrameBufferType.I422, VideoFrameBufferType.I444, VideoFrameBufferType.I010, -+ VideoFrameBufferType.I210, VideoFrameBufferType.NV12 -+}) -+@Retention(RetentionPolicy.SOURCE) -+public @interface VideoFrameBufferType { -+ int NATIVE = 0; -+ int I420 = 1; -+ int I420A = 2; -+ int I422 = 3; -+ int I444 = 4; -+ int I010 = 5; -+ int I210 = 6; -+ int NV12 = 7; -+} +diff --git a/modules/video_capture/video_capture.h b/modules/video_capture/video_capture.h +index 58485f28e9..04203033b0 100644 +--- a/modules/video_capture/video_capture.h ++++ b/modules/video_capture/video_capture.h +@@ -16,6 +16,8 @@ + #include "modules/desktop_capture/desktop_capture_types.h" + #include "modules/video_capture/raw_video_sink_interface.h" + #include "modules/video_capture/video_capture_defines.h" ++#include "rtc_base/synchronization/mutex.h" ++#include "rtc_base/thread_annotations.h" + #include + + #if defined(ANDROID) +@@ -40,15 +42,18 @@ class VideoCaptureModule : public rtc::RefCountInterface { + virtual uint32_t NumberOfDevices() = 0; + virtual int32_t Refresh() = 0; + virtual void DeviceChange() { ++ MutexLock lock(&_inputCallbacksMutex); + for (auto inputCallBack : _inputCallBacks) { + inputCallBack->OnDeviceChange(); + } + } + virtual void RegisterVideoInputFeedBack(VideoInputFeedBack* callBack) { ++ MutexLock lock(&_inputCallbacksMutex); + _inputCallBacks.insert(callBack); + } + + virtual void DeRegisterVideoInputFeedBack(VideoInputFeedBack* callBack) { ++ MutexLock lock(&_inputCallbacksMutex); + auto it = _inputCallBacks.find(callBack); + if (it != _inputCallBacks.end()) { + _inputCallBacks.erase(it); +@@ -102,7 +107,8 @@ class VideoCaptureModule : public rtc::RefCountInterface { + + virtual ~DeviceInfo() {} + private: +- std::set _inputCallBacks; ++ Mutex _inputCallbacksMutex; ++ std::set _inputCallBacks RTC_GUARDED_BY(_inputCallbacksMutex); + }; + + // Register capture data callback -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0064.patch b/third_party/libwebrtc/moz-patch-stack/0064.patch index 52010b70ad1b..b9f01b49cf49 100644 --- a/third_party/libwebrtc/moz-patch-stack/0064.patch +++ b/third_party/libwebrtc/moz-patch-stack/0064.patch @@ -1,56 +1,52 @@ -From: Andreas Pehrson -Date: Mon, 5 Sep 2022 13:56:00 +0000 -Subject: Bug 1786502 - Lock access to DeviceInfo devicechange callbacks. - r=webrtc-reviewers,jib +From: Michael Froman +Date: Mon, 24 Oct 2022 13:00:00 -0500 +Subject: Bug 1797161 - pt1 - tweak BUILD.gn around task_queue_win usage. r?ng! -Differential Revision: https://phabricator.services.mozilla.com/D155365 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/e826dfadfe1264c59d9b13e3c17d6f75a40f5c33 +Add assurance that we will not build task_queue_win.cc to avoid +possible win32k API usage. + +Differential Revision: https://phabricator.services.mozilla.com/D160115 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/f097eb8cbd8b7686ce306a46a4db691194fd39c1 --- - modules/video_capture/video_capture.h | 8 +++++++- - 1 file changed, 7 insertions(+), 1 deletion(-) + api/task_queue/BUILD.gn | 5 +++++ + rtc_base/BUILD.gn | 4 ++++ + 2 files changed, 9 insertions(+) -diff --git a/modules/video_capture/video_capture.h b/modules/video_capture/video_capture.h -index 58485f28e9..04203033b0 100644 ---- a/modules/video_capture/video_capture.h -+++ b/modules/video_capture/video_capture.h -@@ -16,6 +16,8 @@ - #include "modules/desktop_capture/desktop_capture_types.h" - #include "modules/video_capture/raw_video_sink_interface.h" - #include "modules/video_capture/video_capture_defines.h" -+#include "rtc_base/synchronization/mutex.h" -+#include "rtc_base/thread_annotations.h" - #include +diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn +index 116a50762e..b9c50c1644 100644 +--- a/api/task_queue/BUILD.gn ++++ b/api/task_queue/BUILD.gn +@@ -31,6 +31,11 @@ rtc_library("task_queue") { + ] + } - #if defined(ANDROID) -@@ -40,15 +42,18 @@ class VideoCaptureModule : public rtc::RefCountInterface { - virtual uint32_t NumberOfDevices() = 0; - virtual int32_t Refresh() = 0; - virtual void DeviceChange() { -+ MutexLock lock(&_inputCallbacksMutex); - for (auto inputCallBack : _inputCallBacks) { - inputCallBack->OnDeviceChange(); - } - } - virtual void RegisterVideoInputFeedBack(VideoInputFeedBack* callBack) { -+ MutexLock lock(&_inputCallbacksMutex); - _inputCallBacks.insert(callBack); - } - - virtual void DeRegisterVideoInputFeedBack(VideoInputFeedBack* callBack) { -+ MutexLock lock(&_inputCallbacksMutex); - auto it = _inputCallBacks.find(callBack); - if (it != _inputCallBacks.end()) { - _inputCallBacks.erase(it); -@@ -102,7 +107,8 @@ class VideoCaptureModule : public rtc::RefCountInterface { - - virtual ~DeviceInfo() {} - private: -- std::set _inputCallBacks; -+ Mutex _inputCallbacksMutex; -+ std::set _inputCallBacks RTC_GUARDED_BY(_inputCallbacksMutex); - }; - - // Register capture data callback ++# Mozilla - we want to ensure that rtc_include_tests is set to false ++# to guarantee that default_task_queue_factory is not used so we ++# know that remaining win32k code in task_queue_win.cc is not built. ++# See Bug 1797161 for more info. ++assert(!rtc_include_tests, "Mozilla - verify rtc_include_tests is off") + if (rtc_include_tests) { + rtc_library("task_queue_test") { + visibility = [ "*" ] +diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn +index 6ad19ce618..84b2f2ff32 100644 +--- a/rtc_base/BUILD.gn ++++ b/rtc_base/BUILD.gn +@@ -738,10 +738,14 @@ if (is_mac || is_ios) { + if (is_win) { + rtc_library("rtc_task_queue_win") { + visibility = [ "../api/task_queue:default_task_queue_factory" ] ++# See Bug 1797161 for more info. Remove from build until win32k ++# usage is removed. ++if (!build_with_mozilla) { + sources = [ + "task_queue_win.cc", + "task_queue_win.h", + ] ++} + deps = [ + ":checks", + ":logging", -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0065.patch b/third_party/libwebrtc/moz-patch-stack/0065.patch index 13a405d2bb30..1157ab23d5fd 100644 --- a/third_party/libwebrtc/moz-patch-stack/0065.patch +++ b/third_party/libwebrtc/moz-patch-stack/0065.patch @@ -1,52 +1,34 @@ From: Michael Froman -Date: Mon, 24 Oct 2022 13:00:00 -0500 -Subject: Bug 1797161 - pt1 - tweak BUILD.gn around task_queue_win usage. r?ng! +Date: Mon, 24 Oct 2022 14:03:00 -0500 +Subject: Bug 1797161 - pt3 - add static_assert to ensure we don't include + task_queue_win.cc in Mozilla builds. r?ng! -Add assurance that we will not build task_queue_win.cc to avoid -possible win32k API usage. - -Differential Revision: https://phabricator.services.mozilla.com/D160115 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/f097eb8cbd8b7686ce306a46a4db691194fd39c1 +Differential Revision: https://phabricator.services.mozilla.com/D160117 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/50b15e036924203147e34ec20e2689fe4a847645 --- - api/task_queue/BUILD.gn | 5 +++++ - rtc_base/BUILD.gn | 4 ++++ - 2 files changed, 9 insertions(+) + rtc_base/task_queue_win.cc | 9 +++++++++ + 1 file changed, 9 insertions(+) -diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn -index 116a50762e..b9c50c1644 100644 ---- a/api/task_queue/BUILD.gn -+++ b/api/task_queue/BUILD.gn -@@ -31,6 +31,11 @@ rtc_library("task_queue") { - ] - } +diff --git a/rtc_base/task_queue_win.cc b/rtc_base/task_queue_win.cc +index 7e46d58e27..bf55a25c69 100644 +--- a/rtc_base/task_queue_win.cc ++++ b/rtc_base/task_queue_win.cc +@@ -8,6 +8,15 @@ + * be found in the AUTHORS file in the root of the source tree. + */ -+# Mozilla - we want to ensure that rtc_include_tests is set to false -+# to guarantee that default_task_queue_factory is not used so we -+# know that remaining win32k code in task_queue_win.cc is not built. -+# See Bug 1797161 for more info. -+assert(!rtc_include_tests, "Mozilla - verify rtc_include_tests is off") - if (rtc_include_tests) { - rtc_library("task_queue_test") { - visibility = [ "*" ] -diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn -index f2fedb3063..033f6e0e64 100644 ---- a/rtc_base/BUILD.gn -+++ b/rtc_base/BUILD.gn -@@ -701,10 +701,14 @@ if (is_mac || is_ios) { - if (is_win) { - rtc_library("rtc_task_queue_win") { - visibility = [ "../api/task_queue:default_task_queue_factory" ] -+# See Bug 1797161 for more info. Remove from build until win32k -+# usage is removed. -+if (!build_with_mozilla) { - sources = [ - "task_queue_win.cc", - "task_queue_win.h", - ] -+} - deps = [ - ":checks", - ":logging", ++// Mozilla - this file should not be included in Mozilla builds until ++// win32k API usage is removed. This was once done in Bug 1395259, but ++// the upstreaming attempt stalled. Until win32k usage is officially ++// removed upstream, we have reverted to upstream's version of the file ++// (to reduce or elminate merge conflicts), and a static assert is ++// placed here to ensure this file isn't accidentally included in the ++// Mozilla build. ++static_assert(false, "This file should not be built, see Bug 1797161."); ++ + #include "rtc_base/task_queue_win.h" + + // clang-format off -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0066.patch b/third_party/libwebrtc/moz-patch-stack/0066.patch index 2324b06500e9..8215b847e209 100644 --- a/third_party/libwebrtc/moz-patch-stack/0066.patch +++ b/third_party/libwebrtc/moz-patch-stack/0066.patch @@ -1,34 +1,81 @@ -From: Michael Froman -Date: Mon, 24 Oct 2022 14:03:00 -0500 -Subject: Bug 1797161 - pt3 - add static_assert to ensure we don't include - task_queue_win.cc in Mozilla builds. r?ng! +From: Andreas Pehrson +Date: Mon, 12 Dec 2022 15:47:00 +0000 +Subject: Bug 1451394 - Expose mac camera capture backend in .gn and switch it + to gecko libyuv. r=webrtc-reviewers,mjf -Differential Revision: https://phabricator.services.mozilla.com/D160117 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/50b15e036924203147e34ec20e2689fe4a847645 +Differential Revision: https://phabricator.services.mozilla.com/D163682 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/b0658888969395dca938597783c8a377b9bea209 --- - rtc_base/task_queue_win.cc | 9 +++++++++ - 1 file changed, 9 insertions(+) + BUILD.gn | 4 ++++ + sdk/BUILD.gn | 6 ++++++ + 2 files changed, 10 insertions(+) -diff --git a/rtc_base/task_queue_win.cc b/rtc_base/task_queue_win.cc -index 7e46d58e27..bf55a25c69 100644 ---- a/rtc_base/task_queue_win.cc -+++ b/rtc_base/task_queue_win.cc -@@ -8,6 +8,15 @@ - * be found in the AUTHORS file in the root of the source tree. - */ +diff --git a/BUILD.gn b/BUILD.gn +index e51827a372..a7771d29c0 100644 +--- a/BUILD.gn ++++ b/BUILD.gn +@@ -592,6 +592,10 @@ if (!build_with_chromium) { + ] + } -+// Mozilla - this file should not be included in Mozilla builds until -+// win32k API usage is removed. This was once done in Bug 1395259, but -+// the upstreaming attempt stalled. Until win32k usage is officially -+// removed upstream, we have reverted to upstream's version of the file -+// (to reduce or elminate merge conflicts), and a static assert is -+// placed here to ensure this file isn't accidentally included in the -+// Mozilla build. -+static_assert(false, "This file should not be built, see Bug 1797161."); ++ if (build_with_mozilla && is_mac) { ++ deps += [ "sdk:videocapture_objc" ] ++ } + - #include "rtc_base/task_queue_win.h" + if (rtc_enable_protobuf) { + deps += [ "logging:rtc_event_log_proto" ] + } +diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn +index 4f5ceb5ed3..eea26dc31d 100644 +--- a/sdk/BUILD.gn ++++ b/sdk/BUILD.gn +@@ -533,6 +533,7 @@ if (is_ios || is_mac) { + } + } - // clang-format off ++ if (!build_with_mozilla) { + rtc_library("videosource_objc") { + sources = [ + "objc/api/peerconnection/RTCVideoSource+Private.h", +@@ -562,6 +563,7 @@ if (is_ios || is_mac) { + ":used_from_extension", + ] + } ++ } + + rtc_library("videoframebuffer_objc") { + visibility = [ "*" ] +@@ -594,6 +596,7 @@ if (is_ios || is_mac) { + ] + } + ++ if (!build_with_mozilla) { + rtc_library("metal_objc") { + visibility = [ "*" ] + allow_poison = [ +@@ -655,6 +658,7 @@ if (is_ios || is_mac) { + ":videoframebuffer_objc", + ] + } ++ } + + rtc_library("videocapture_objc") { + visibility = [ "*" ] +@@ -683,6 +687,7 @@ if (is_ios || is_mac) { + ] + } + ++ if (!build_with_mozilla) { + rtc_library("videocodec_objc") { + visibility = [ "*" ] + configs += [ "..:no_global_constructors" ] +@@ -1729,5 +1734,6 @@ if (is_ios || is_mac) { + "VideoToolbox.framework", + ] + } ++ } + } + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0067.patch b/third_party/libwebrtc/moz-patch-stack/0067.patch index 82e46afe4514..b8c1e6d0c7c4 100644 --- a/third_party/libwebrtc/moz-patch-stack/0067.patch +++ b/third_party/libwebrtc/moz-patch-stack/0067.patch @@ -1,81 +1,31 @@ From: Andreas Pehrson Date: Mon, 12 Dec 2022 15:47:00 +0000 -Subject: Bug 1451394 - Expose mac camera capture backend in .gn and switch it - to gecko libyuv. r=webrtc-reviewers,mjf +Subject: Bug 1451394 - Record video frame captures with PerformanceRecorder in + the new mac camera backend. r=padenot -Differential Revision: https://phabricator.services.mozilla.com/D163682 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/b0658888969395dca938597783c8a377b9bea209 +Also includes: +Bug 1806605 - Pass TrackingId instead of nsCString to CaptureStage. + +Differential Revision: https://phabricator.services.mozilla.com/D163687 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a7362238c9e6fbe0d28200f6b41fc40a0c9a2158 --- - BUILD.gn | 4 ++++ - sdk/BUILD.gn | 6 ++++++ - 2 files changed, 10 insertions(+) + modules/video_capture/video_capture.h | 3 +++ + 1 file changed, 3 insertions(+) -diff --git a/BUILD.gn b/BUILD.gn -index f43142e055..72bfb3393c 100644 ---- a/BUILD.gn -+++ b/BUILD.gn -@@ -584,6 +584,10 @@ if (!build_with_chromium) { - ] - } +diff --git a/modules/video_capture/video_capture.h b/modules/video_capture/video_capture.h +index 04203033b0..6614032299 100644 +--- a/modules/video_capture/video_capture.h ++++ b/modules/video_capture/video_capture.h +@@ -154,6 +154,9 @@ class VideoCaptureModule : public rtc::RefCountInterface { + // Return whether the rotation is applied or left pending. + virtual bool GetApplyRotation() = 0; -+ if (build_with_mozilla && is_mac) { -+ deps += [ "sdk:videocapture_objc" ] -+ } ++ // Mozilla: TrackingId setter for use in profiler markers. ++ virtual void SetTrackingId(uint32_t aTrackingIdProcId) {} + - if (rtc_enable_protobuf) { - deps += [ "logging:rtc_event_log_proto" ] - } -diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn -index ff89b21721..bef4521dc6 100644 ---- a/sdk/BUILD.gn -+++ b/sdk/BUILD.gn -@@ -461,6 +461,7 @@ if (is_ios || is_mac) { - } - } - -+ if (!build_with_mozilla) { - rtc_library("videosource_objc") { - sources = [ - "objc/api/peerconnection/RTCVideoSource+Private.h", -@@ -490,6 +491,7 @@ if (is_ios || is_mac) { - ":used_from_extension", - ] - } -+ } - - rtc_library("videoframebuffer_objc") { - visibility = [ "*" ] -@@ -522,6 +524,7 @@ if (is_ios || is_mac) { - ] - } - -+ if (!build_with_mozilla) { - rtc_library("opengl_objc") { - sources = [ - "objc/components/renderer/opengl/RTCDefaultShader.h", -@@ -674,6 +677,7 @@ if (is_ios || is_mac) { - ":videoframebuffer_objc", - ] - } -+ } - - rtc_library("videocapture_objc") { - visibility = [ "*" ] -@@ -702,6 +706,7 @@ if (is_ios || is_mac) { - ] - } - -+ if (!build_with_mozilla) { - rtc_library("videocodec_objc") { - visibility = [ "*" ] - configs += [ "..:no_global_constructors" ] -@@ -1753,5 +1758,6 @@ if (is_ios || is_mac) { - "VideoToolbox.framework", - ] - } -+ } - } - } + protected: + ~VideoCaptureModule() override {} + }; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0068.patch b/third_party/libwebrtc/moz-patch-stack/0068.patch index 4d94e635354c..3c4e3a740315 100644 --- a/third_party/libwebrtc/moz-patch-stack/0068.patch +++ b/third_party/libwebrtc/moz-patch-stack/0068.patch @@ -1,31 +1,349 @@ From: Andreas Pehrson -Date: Mon, 12 Dec 2022 15:47:00 +0000 -Subject: Bug 1451394 - Record video frame captures with PerformanceRecorder in - the new mac camera backend. r=padenot +Date: Tue, 23 Nov 2021 14:11:00 +0000 +Subject: Bug 1742181 - libwebrtc: Implement packetsDiscarded bookkeeping for + received video. r=ng + +Depends on D131707 + +Differential Revision: https://phabricator.services.mozilla.com/D131708 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d0196a45a1f449874fc2a759e85e403c45c25575 Also includes: -Bug 1806605 - Pass TrackingId instead of nsCString to CaptureStage. -Differential Revision: https://phabricator.services.mozilla.com/D163687 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a7362238c9e6fbe0d28200f6b41fc40a0c9a2158 +Bug 1804288 - (fix-de7ae5755b) reimplement Bug 1742181 - libwebrtc: Implement packetsDiscarded bookkeeping for received video. r=pehrsons + +Differential Revision: https://phabricator.services.mozilla.com/D163959 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/ee566d1bfb654d36e5d58dce637fb0580b989ac1 --- - modules/video_capture/video_capture.h | 3 +++ - 1 file changed, 3 insertions(+) + api/video/frame_buffer.cc | 25 ++++++++++++++++++++++--- + api/video/frame_buffer.h | 4 ++++ + call/video_receive_stream.h | 2 ++ + modules/video_coding/packet_buffer.cc | 10 +++++++--- + modules/video_coding/packet_buffer.h | 5 ++++- + video/receive_statistics_proxy.cc | 5 +++++ + video/receive_statistics_proxy.h | 1 + + video/rtp_video_stream_receiver2.cc | 5 ++++- + video/rtp_video_stream_receiver2.h | 3 +++ + video/video_receive_stream2.cc | 1 + + video/video_stream_buffer_controller.cc | 12 ++++++++++++ + video/video_stream_buffer_controller.h | 5 +++++ + 12 files changed, 70 insertions(+), 8 deletions(-) -diff --git a/modules/video_capture/video_capture.h b/modules/video_capture/video_capture.h -index 04203033b0..6614032299 100644 ---- a/modules/video_capture/video_capture.h -+++ b/modules/video_capture/video_capture.h -@@ -154,6 +154,9 @@ class VideoCaptureModule : public rtc::RefCountInterface { - // Return whether the rotation is applied or left pending. - virtual bool GetApplyRotation() = 0; +diff --git a/api/video/frame_buffer.cc b/api/video/frame_buffer.cc +index 4cdf2212a6..8267b8e6cb 100644 +--- a/api/video/frame_buffer.cc ++++ b/api/video/frame_buffer.cc +@@ -140,14 +140,29 @@ void FrameBuffer::DropNextDecodableTemporalUnit() { + } -+ // Mozilla: TrackingId setter for use in profiler markers. -+ virtual void SetTrackingId(uint32_t aTrackingIdProcId) {} + auto end_it = std::next(next_decodable_temporal_unit_->last_frame); +- num_dropped_frames_ += std::count_if( +- frames_.begin(), end_it, +- [](const auto& f) { return f.second.encoded_frame != nullptr; }); + - protected: - ~VideoCaptureModule() override {} ++ UpdateDroppedFramesAndDiscardedPackets(frames_.begin(), end_it); + + frames_.erase(frames_.begin(), end_it); + FindNextAndLastDecodableTemporalUnit(); + } + ++void FrameBuffer::UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it, ++ FrameIterator end_it) { ++ unsigned int num_discarded_packets = 0; ++ unsigned int num_dropped_frames = ++ std::count_if(begin_it, end_it, [&](const auto& f) { ++ if (f.second.encoded_frame) { ++ const auto& packetInfos = f.second.encoded_frame->PacketInfos(); ++ num_discarded_packets += packetInfos.size(); ++ } ++ return f.second.encoded_frame != nullptr; ++ }); ++ ++ num_dropped_frames_ += num_dropped_frames; ++ num_discarded_packets_ += num_discarded_packets; ++} ++ + absl::optional FrameBuffer::LastContinuousFrameId() const { + return last_continuous_frame_id_; + } +@@ -167,6 +182,9 @@ int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const { + int FrameBuffer::GetTotalNumberOfDroppedFrames() const { + return num_dropped_frames_; + } ++int FrameBuffer::GetTotalNumberOfDiscardedPackets() const { ++ return num_discarded_packets_; ++} + + size_t FrameBuffer::CurrentSize() const { + return frames_.size(); +@@ -269,6 +287,7 @@ void FrameBuffer::FindNextAndLastDecodableTemporalUnit() { + } + + void FrameBuffer::Clear() { ++ UpdateDroppedFramesAndDiscardedPackets(frames_.begin(), frames_.end()); + frames_.clear(); + next_decodable_temporal_unit_.reset(); + decodable_temporal_units_info_.reset(); +diff --git a/api/video/frame_buffer.h b/api/video/frame_buffer.h +index 94edf64d5a..81fd12da58 100644 +--- a/api/video/frame_buffer.h ++++ b/api/video/frame_buffer.h +@@ -66,6 +66,7 @@ class FrameBuffer { + + int GetTotalNumberOfContinuousTemporalUnits() const; + int GetTotalNumberOfDroppedFrames() const; ++ int GetTotalNumberOfDiscardedPackets() const; + size_t CurrentSize() const; + + private: +@@ -87,6 +88,8 @@ class FrameBuffer { + void PropagateContinuity(const FrameIterator& frame_it); + void FindNextAndLastDecodableTemporalUnit(); + void Clear(); ++ void UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it, ++ FrameIterator end_it); + + const bool legacy_frame_id_jump_behavior_; + const size_t max_size_; +@@ -99,6 +102,7 @@ class FrameBuffer { + + int num_continuous_temporal_units_ = 0; + int num_dropped_frames_ = 0; ++ int num_discarded_packets_ = 0; }; + + } // namespace webrtc +diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h +index 87ee39e142..0dc7dee71d 100644 +--- a/call/video_receive_stream.h ++++ b/call/video_receive_stream.h +@@ -112,6 +112,8 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { + // https://www.w3.org/TR/webrtc-stats/#dom-rtcvideoreceiverstats-framesdropped + uint32_t frames_dropped = 0; + uint32_t frames_decoded = 0; ++ // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats-packetsdiscarded ++ uint64_t packets_discarded = 0; + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime + TimeDelta total_decode_time = TimeDelta::Zero(); + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay +diff --git a/modules/video_coding/packet_buffer.cc b/modules/video_coding/packet_buffer.cc +index 52ef5c2d85..be45db6ef0 100644 +--- a/modules/video_coding/packet_buffer.cc ++++ b/modules/video_coding/packet_buffer.cc +@@ -115,25 +115,27 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( + return result; + } + +-void PacketBuffer::ClearTo(uint16_t seq_num) { ++uint32_t PacketBuffer::ClearTo(uint16_t seq_num) { + // We have already cleared past this sequence number, no need to do anything. + if (is_cleared_to_first_seq_num_ && + AheadOf(first_seq_num_, seq_num)) { +- return; ++ return 0; + } + + // If the packet buffer was cleared between a frame was created and returned. + if (!first_packet_received_) +- return; ++ return 0; + + // Avoid iterating over the buffer more than once by capping the number of + // iterations to the `size_` of the buffer. + ++seq_num; ++ uint32_t num_cleared_packets = 0; + size_t diff = ForwardDiff(first_seq_num_, seq_num); + size_t iterations = std::min(diff, buffer_.size()); + for (size_t i = 0; i < iterations; ++i) { + auto& stored = buffer_[first_seq_num_ % buffer_.size()]; + if (stored != nullptr && AheadOf(seq_num, stored->seq_num)) { ++ ++num_cleared_packets; + stored = nullptr; + } + ++first_seq_num_; +@@ -149,6 +151,8 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { + + received_padding_.erase(received_padding_.begin(), + received_padding_.lower_bound(seq_num)); ++ ++ return num_cleared_packets; + } + + void PacketBuffer::Clear() { +diff --git a/modules/video_coding/packet_buffer.h b/modules/video_coding/packet_buffer.h +index 53e08c95a1..47b2ffe199 100644 +--- a/modules/video_coding/packet_buffer.h ++++ b/modules/video_coding/packet_buffer.h +@@ -78,7 +78,10 @@ class PacketBuffer { + ABSL_MUST_USE_RESULT InsertResult + InsertPacket(std::unique_ptr packet); + ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t seq_num); +- void ClearTo(uint16_t seq_num); ++ ++ // Clear all packets older than |seq_num|. Returns the number of packets ++ // cleared. ++ uint32_t ClearTo(uint16_t seq_num); + void Clear(); + + void ForceSpsPpsIdrIsH264Keyframe(); +diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc +index 5bb30041e3..abf9ef9b5e 100644 +--- a/video/receive_statistics_proxy.cc ++++ b/video/receive_statistics_proxy.cc +@@ -798,6 +798,11 @@ void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { + })); + } + ++void ReceiveStatisticsProxy::OnDiscardedPackets(uint32_t packets_discarded) { ++ RTC_DCHECK_RUN_ON(&main_thread_); ++ stats_.packets_discarded += packets_discarded; ++} ++ + void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { + RTC_DCHECK_RUN_ON(&main_thread_); + last_codec_type_ = codec_type; +diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h +index d8da3064fd..2fd89eedd0 100644 +--- a/video/receive_statistics_proxy.h ++++ b/video/receive_statistics_proxy.h +@@ -92,6 +92,7 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, + void OnDecodableFrame(TimeDelta jitter_buffer_delay, + TimeDelta target_delay, + TimeDelta minimum_delay) override; ++ void OnDiscardedPackets(uint32_t packets_discarded) override; + void OnFrameBufferTimingsUpdated(int estimated_max_decode_time_ms, + int current_delay_ms, + int target_delay_ms, +diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc +index 2670b7fe58..98acbb1a51 100644 +--- a/video/rtp_video_stream_receiver2.cc ++++ b/video/rtp_video_stream_receiver2.cc +@@ -244,6 +244,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + NackPeriodicProcessor* nack_periodic_processor, ++ VideoStreamBufferControllerStatsObserver* vcm_receive_statistics, + OnCompleteFrameCallback* complete_frame_callback, + rtc::scoped_refptr frame_decryptor, + rtc::scoped_refptr frame_transformer, +@@ -292,6 +293,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + &rtcp_feedback_buffer_, + &rtcp_feedback_buffer_, + field_trials_)), ++ vcm_receive_statistics_(vcm_receive_statistics), + packet_buffer_(kPacketBufferStartSize, + PacketBufferMaxSize(field_trials_)), + reference_finder_(std::make_unique()), +@@ -1210,7 +1212,8 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { + int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num); + packet_infos_.erase(packet_infos_.begin(), + packet_infos_.upper_bound(unwrapped_rtp_seq_num)); +- packet_buffer_.ClearTo(seq_num); ++ uint32_t num_packets_cleared = packet_buffer_.ClearTo(seq_num); ++ vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); + reference_finder_->ClearTo(seq_num); + } + } +diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h +index 0e96d7f2cd..10329005ba 100644 +--- a/video/rtp_video_stream_receiver2.h ++++ b/video/rtp_video_stream_receiver2.h +@@ -49,6 +49,7 @@ + #include "rtc_base/thread_annotations.h" + #include "video/buffered_frame_decryptor.h" + #include "video/unique_timestamp_counter.h" ++#include "video/video_stream_buffer_controller.h" + + namespace webrtc { + +@@ -91,6 +92,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, + RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, + RtcpCnameCallback* rtcp_cname_callback, + NackPeriodicProcessor* nack_periodic_processor, ++ VideoStreamBufferControllerStatsObserver* vcm_receive_statistics, + // The KeyFrameRequestSender is optional; if not provided, key frame + // requests are sent via the internal RtpRtcp module. + OnCompleteFrameCallback* complete_frame_callback, +@@ -362,6 +364,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, + std::unique_ptr loss_notification_controller_ + RTC_GUARDED_BY(packet_sequence_checker_); + ++ VideoStreamBufferControllerStatsObserver* const vcm_receive_statistics_; + video_coding::PacketBuffer packet_buffer_ + RTC_GUARDED_BY(packet_sequence_checker_); + UniqueTimestampCounter frame_counter_ +diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc +index 0f42fa50b7..808cb932fe 100644 +--- a/video/video_receive_stream2.cc ++++ b/video/video_receive_stream2.cc +@@ -211,6 +211,7 @@ VideoReceiveStream2::VideoReceiveStream2( + &stats_proxy_, + &stats_proxy_, + nack_periodic_processor, ++ &stats_proxy_, + this, // OnCompleteFrameCallback + std::move(config_.frame_decryptor), + std::move(config_.frame_transformer), +diff --git a/video/video_stream_buffer_controller.cc b/video/video_stream_buffer_controller.cc +index 455f064b01..20588fbcad 100644 +--- a/video/video_stream_buffer_controller.cc ++++ b/video/video_stream_buffer_controller.cc +@@ -263,6 +263,7 @@ void VideoStreamBufferController::OnFrameReady( + + // Update stats. + UpdateDroppedFrames(); ++ UpdateDiscardedPackets(); + UpdateFrameBufferTimings(min_receive_time, now); + UpdateTimingFrameInfo(); + +@@ -328,6 +329,17 @@ void VideoStreamBufferController::UpdateDroppedFrames() + buffer_->GetTotalNumberOfDroppedFrames(); + } + ++void VideoStreamBufferController::UpdateDiscardedPackets() ++ RTC_RUN_ON(&worker_sequence_checker_) { ++ const int discarded_packets = buffer_->GetTotalNumberOfDiscardedPackets() - ++ packets_discarded_before_last_new_frame_; ++ if (discarded_packets > 0) { ++ stats_proxy_->OnDiscardedPackets(discarded_packets); ++ } ++ packets_discarded_before_last_new_frame_ = ++ buffer_->GetTotalNumberOfDiscardedPackets(); ++} ++ + void VideoStreamBufferController::UpdateFrameBufferTimings( + Timestamp min_receive_time, + Timestamp now) { +diff --git a/video/video_stream_buffer_controller.h b/video/video_stream_buffer_controller.h +index bb67304d06..b6d7bd46ae 100644 +--- a/video/video_stream_buffer_controller.h ++++ b/video/video_stream_buffer_controller.h +@@ -52,6 +52,8 @@ class VideoStreamBufferControllerStatsObserver { + TimeDelta target_delay, + TimeDelta minimum_delay) = 0; + ++ virtual void OnDiscardedPackets(uint32_t packets_discarded) = 0; ++ + // Various jitter buffer delays determined by VCMTiming. + virtual void OnFrameBufferTimingsUpdated(int estimated_max_decode_time_ms, + int current_delay_ms, +@@ -94,6 +96,7 @@ class VideoStreamBufferController { + void OnTimeout(TimeDelta delay); + void FrameReadyForDecode(uint32_t rtp_timestamp, Timestamp render_time); + void UpdateDroppedFrames() RTC_RUN_ON(&worker_sequence_checker_); ++ void UpdateDiscardedPackets() RTC_RUN_ON(&worker_sequence_checker_); + void UpdateFrameBufferTimings(Timestamp min_receive_time, Timestamp now); + void UpdateTimingFrameInfo(); + bool IsTooManyFramesQueued() const RTC_RUN_ON(&worker_sequence_checker_); +@@ -122,6 +125,8 @@ class VideoStreamBufferController { + RTC_GUARDED_BY(&worker_sequence_checker_); + int frames_dropped_before_last_new_frame_ + RTC_GUARDED_BY(&worker_sequence_checker_) = 0; ++ int packets_discarded_before_last_new_frame_ ++ RTC_GUARDED_BY(&worker_sequence_checker_) = 0; + VCMVideoProtection protection_mode_ + RTC_GUARDED_BY(&worker_sequence_checker_) = kProtectionNack; + -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0069.patch b/third_party/libwebrtc/moz-patch-stack/0069.patch index a3d319cdd29d..eded45f01d69 100644 --- a/third_party/libwebrtc/moz-patch-stack/0069.patch +++ b/third_party/libwebrtc/moz-patch-stack/0069.patch @@ -1,349 +1,49 @@ From: Andreas Pehrson -Date: Tue, 23 Nov 2021 14:11:00 +0000 -Subject: Bug 1742181 - libwebrtc: Implement packetsDiscarded bookkeeping for - received video. r=ng +Date: Thu, 6 Jan 2022 00:16:00 +0000 +Subject: Bug 1748478 - Propagate calculated discarded packets to stats. r=bwc -Depends on D131707 - -Differential Revision: https://phabricator.services.mozilla.com/D131708 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/d0196a45a1f449874fc2a759e85e403c45c25575 - -Also includes: - -Bug 1804288 - (fix-de7ae5755b) reimplement Bug 1742181 - libwebrtc: Implement packetsDiscarded bookkeeping for received video. r=pehrsons - -Differential Revision: https://phabricator.services.mozilla.com/D163959 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/ee566d1bfb654d36e5d58dce637fb0580b989ac1 +Differential Revision: https://phabricator.services.mozilla.com/D135061 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/56fbf0469e25fa0d589c51ca112ce534a7c0ab91 --- - api/video/frame_buffer.cc | 25 ++++++++++++++++++++++--- - api/video/frame_buffer.h | 4 ++++ - call/video_receive_stream.h | 2 ++ - modules/video_coding/packet_buffer.cc | 10 +++++++--- - modules/video_coding/packet_buffer.h | 5 ++++- - video/receive_statistics_proxy.cc | 5 +++++ - video/receive_statistics_proxy.h | 1 + - video/rtp_video_stream_receiver2.cc | 5 ++++- - video/rtp_video_stream_receiver2.h | 3 +++ - video/video_receive_stream2.cc | 1 + - video/video_stream_buffer_controller.cc | 12 ++++++++++++ - video/video_stream_buffer_controller.h | 5 +++++ - 12 files changed, 70 insertions(+), 8 deletions(-) + video/receive_statistics_proxy.cc | 9 +++++++-- + video/rtp_video_stream_receiver2.cc | 4 +++- + 2 files changed, 10 insertions(+), 3 deletions(-) -diff --git a/api/video/frame_buffer.cc b/api/video/frame_buffer.cc -index 4cdf2212a6..8267b8e6cb 100644 ---- a/api/video/frame_buffer.cc -+++ b/api/video/frame_buffer.cc -@@ -140,14 +140,29 @@ void FrameBuffer::DropNextDecodableTemporalUnit() { - } - - auto end_it = std::next(next_decodable_temporal_unit_->last_frame); -- num_dropped_frames_ += std::count_if( -- frames_.begin(), end_it, -- [](const auto& f) { return f.second.encoded_frame != nullptr; }); -+ -+ UpdateDroppedFramesAndDiscardedPackets(frames_.begin(), end_it); - - frames_.erase(frames_.begin(), end_it); - FindNextAndLastDecodableTemporalUnit(); - } - -+void FrameBuffer::UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it, -+ FrameIterator end_it) { -+ unsigned int num_discarded_packets = 0; -+ unsigned int num_dropped_frames = -+ std::count_if(begin_it, end_it, [&](const auto& f) { -+ if (f.second.encoded_frame) { -+ const auto& packetInfos = f.second.encoded_frame->PacketInfos(); -+ num_discarded_packets += packetInfos.size(); -+ } -+ return f.second.encoded_frame != nullptr; -+ }); -+ -+ num_dropped_frames_ += num_dropped_frames; -+ num_discarded_packets_ += num_discarded_packets; -+} -+ - absl::optional FrameBuffer::LastContinuousFrameId() const { - return last_continuous_frame_id_; - } -@@ -167,6 +182,9 @@ int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const { - int FrameBuffer::GetTotalNumberOfDroppedFrames() const { - return num_dropped_frames_; - } -+int FrameBuffer::GetTotalNumberOfDiscardedPackets() const { -+ return num_discarded_packets_; -+} - - size_t FrameBuffer::CurrentSize() const { - return frames_.size(); -@@ -269,6 +287,7 @@ void FrameBuffer::FindNextAndLastDecodableTemporalUnit() { - } - - void FrameBuffer::Clear() { -+ UpdateDroppedFramesAndDiscardedPackets(frames_.begin(), frames_.end()); - frames_.clear(); - next_decodable_temporal_unit_.reset(); - decodable_temporal_units_info_.reset(); -diff --git a/api/video/frame_buffer.h b/api/video/frame_buffer.h -index 94edf64d5a..81fd12da58 100644 ---- a/api/video/frame_buffer.h -+++ b/api/video/frame_buffer.h -@@ -66,6 +66,7 @@ class FrameBuffer { - - int GetTotalNumberOfContinuousTemporalUnits() const; - int GetTotalNumberOfDroppedFrames() const; -+ int GetTotalNumberOfDiscardedPackets() const; - size_t CurrentSize() const; - - private: -@@ -87,6 +88,8 @@ class FrameBuffer { - void PropagateContinuity(const FrameIterator& frame_it); - void FindNextAndLastDecodableTemporalUnit(); - void Clear(); -+ void UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it, -+ FrameIterator end_it); - - const bool legacy_frame_id_jump_behavior_; - const size_t max_size_; -@@ -99,6 +102,7 @@ class FrameBuffer { - - int num_continuous_temporal_units_ = 0; - int num_dropped_frames_ = 0; -+ int num_discarded_packets_ = 0; - }; - - } // namespace webrtc -diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h -index 8587d2b5db..aafe47275d 100644 ---- a/call/video_receive_stream.h -+++ b/call/video_receive_stream.h -@@ -112,6 +112,8 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { - // https://www.w3.org/TR/webrtc-stats/#dom-rtcvideoreceiverstats-framesdropped - uint32_t frames_dropped = 0; - uint32_t frames_decoded = 0; -+ // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats-packetsdiscarded -+ uint64_t packets_discarded = 0; - // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime - TimeDelta total_decode_time = TimeDelta::Zero(); - // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay -diff --git a/modules/video_coding/packet_buffer.cc b/modules/video_coding/packet_buffer.cc -index 52ef5c2d85..be45db6ef0 100644 ---- a/modules/video_coding/packet_buffer.cc -+++ b/modules/video_coding/packet_buffer.cc -@@ -115,25 +115,27 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( - return result; - } - --void PacketBuffer::ClearTo(uint16_t seq_num) { -+uint32_t PacketBuffer::ClearTo(uint16_t seq_num) { - // We have already cleared past this sequence number, no need to do anything. - if (is_cleared_to_first_seq_num_ && - AheadOf(first_seq_num_, seq_num)) { -- return; -+ return 0; - } - - // If the packet buffer was cleared between a frame was created and returned. - if (!first_packet_received_) -- return; -+ return 0; - - // Avoid iterating over the buffer more than once by capping the number of - // iterations to the `size_` of the buffer. - ++seq_num; -+ uint32_t num_cleared_packets = 0; - size_t diff = ForwardDiff(first_seq_num_, seq_num); - size_t iterations = std::min(diff, buffer_.size()); - for (size_t i = 0; i < iterations; ++i) { - auto& stored = buffer_[first_seq_num_ % buffer_.size()]; - if (stored != nullptr && AheadOf(seq_num, stored->seq_num)) { -+ ++num_cleared_packets; - stored = nullptr; - } - ++first_seq_num_; -@@ -149,6 +151,8 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { - - received_padding_.erase(received_padding_.begin(), - received_padding_.lower_bound(seq_num)); -+ -+ return num_cleared_packets; - } - - void PacketBuffer::Clear() { -diff --git a/modules/video_coding/packet_buffer.h b/modules/video_coding/packet_buffer.h -index 53e08c95a1..47b2ffe199 100644 ---- a/modules/video_coding/packet_buffer.h -+++ b/modules/video_coding/packet_buffer.h -@@ -78,7 +78,10 @@ class PacketBuffer { - ABSL_MUST_USE_RESULT InsertResult - InsertPacket(std::unique_ptr packet); - ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t seq_num); -- void ClearTo(uint16_t seq_num); -+ -+ // Clear all packets older than |seq_num|. Returns the number of packets -+ // cleared. -+ uint32_t ClearTo(uint16_t seq_num); - void Clear(); - - void ForceSpsPpsIdrIsH264Keyframe(); diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc -index 049f21226c..ae35ee9169 100644 +index abf9ef9b5e..af670b053d 100644 --- a/video/receive_statistics_proxy.cc +++ b/video/receive_statistics_proxy.cc -@@ -840,6 +840,11 @@ void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { - })); +@@ -799,8 +799,13 @@ void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { + } + + void ReceiveStatisticsProxy::OnDiscardedPackets(uint32_t packets_discarded) { +- RTC_DCHECK_RUN_ON(&main_thread_); +- stats_.packets_discarded += packets_discarded; ++ // Can be called on either the decode queue or the worker thread ++ // See FrameBuffer2 for more details. ++ worker_thread_->PostTask( ++ SafeTask(task_safety_.flag(), [packets_discarded, this]() { ++ RTC_DCHECK_RUN_ON(&main_thread_); ++ stats_.packets_discarded += packets_discarded; ++ })); } -+void ReceiveStatisticsProxy::OnDiscardedPackets(uint32_t packets_discarded) { -+ RTC_DCHECK_RUN_ON(&main_thread_); -+ stats_.packets_discarded += packets_discarded; -+} -+ void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { - RTC_DCHECK_RUN_ON(&main_thread_); - last_codec_type_ = codec_type; -diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h -index d8da3064fd..2fd89eedd0 100644 ---- a/video/receive_statistics_proxy.h -+++ b/video/receive_statistics_proxy.h -@@ -92,6 +92,7 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, - void OnDecodableFrame(TimeDelta jitter_buffer_delay, - TimeDelta target_delay, - TimeDelta minimum_delay) override; -+ void OnDiscardedPackets(uint32_t packets_discarded) override; - void OnFrameBufferTimingsUpdated(int estimated_max_decode_time_ms, - int current_delay_ms, - int target_delay_ms, diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index 72d3596f96..4946105caa 100644 +index 98acbb1a51..c236b619d4 100644 --- a/video/rtp_video_stream_receiver2.cc +++ b/video/rtp_video_stream_receiver2.cc -@@ -244,6 +244,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( - RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, - RtcpCnameCallback* rtcp_cname_callback, - NackPeriodicProcessor* nack_periodic_processor, -+ VideoStreamBufferControllerStatsObserver* vcm_receive_statistics, - OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer, -@@ -292,6 +293,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( - &rtcp_feedback_buffer_, - &rtcp_feedback_buffer_, - field_trials_)), -+ vcm_receive_statistics_(vcm_receive_statistics), - packet_buffer_(kPacketBufferStartSize, - PacketBufferMaxSize(field_trials_)), - reference_finder_(std::make_unique()), -@@ -1204,7 +1206,8 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { - int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(seq_num); +@@ -1213,7 +1213,9 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { packet_infos_.erase(packet_infos_.begin(), packet_infos_.upper_bound(unwrapped_rtp_seq_num)); -- packet_buffer_.ClearTo(seq_num); -+ uint32_t num_packets_cleared = packet_buffer_.ClearTo(seq_num); -+ vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); + uint32_t num_packets_cleared = packet_buffer_.ClearTo(seq_num); +- vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); ++ if (num_packets_cleared > 0) { ++ vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); ++ } reference_finder_->ClearTo(seq_num); } } -diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h -index dc9cea422d..a39e7bd006 100644 ---- a/video/rtp_video_stream_receiver2.h -+++ b/video/rtp_video_stream_receiver2.h -@@ -49,6 +49,7 @@ - #include "rtc_base/thread_annotations.h" - #include "video/buffered_frame_decryptor.h" - #include "video/unique_timestamp_counter.h" -+#include "video/video_stream_buffer_controller.h" - - namespace webrtc { - -@@ -91,6 +92,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, - RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, - RtcpCnameCallback* rtcp_cname_callback, - NackPeriodicProcessor* nack_periodic_processor, -+ VideoStreamBufferControllerStatsObserver* vcm_receive_statistics, - // The KeyFrameRequestSender is optional; if not provided, key frame - // requests are sent via the internal RtpRtcp module. - OnCompleteFrameCallback* complete_frame_callback, -@@ -361,6 +363,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, - std::unique_ptr loss_notification_controller_ - RTC_GUARDED_BY(packet_sequence_checker_); - -+ VideoStreamBufferControllerStatsObserver* const vcm_receive_statistics_; - video_coding::PacketBuffer packet_buffer_ - RTC_GUARDED_BY(packet_sequence_checker_); - UniqueTimestampCounter frame_counter_ -diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc -index 2263f1dbf8..c6e8252135 100644 ---- a/video/video_receive_stream2.cc -+++ b/video/video_receive_stream2.cc -@@ -211,6 +211,7 @@ VideoReceiveStream2::VideoReceiveStream2( - &stats_proxy_, - &stats_proxy_, - nack_periodic_processor, -+ &stats_proxy_, - this, // OnCompleteFrameCallback - std::move(config_.frame_decryptor), - std::move(config_.frame_transformer), -diff --git a/video/video_stream_buffer_controller.cc b/video/video_stream_buffer_controller.cc -index 455f064b01..20588fbcad 100644 ---- a/video/video_stream_buffer_controller.cc -+++ b/video/video_stream_buffer_controller.cc -@@ -263,6 +263,7 @@ void VideoStreamBufferController::OnFrameReady( - - // Update stats. - UpdateDroppedFrames(); -+ UpdateDiscardedPackets(); - UpdateFrameBufferTimings(min_receive_time, now); - UpdateTimingFrameInfo(); - -@@ -328,6 +329,17 @@ void VideoStreamBufferController::UpdateDroppedFrames() - buffer_->GetTotalNumberOfDroppedFrames(); - } - -+void VideoStreamBufferController::UpdateDiscardedPackets() -+ RTC_RUN_ON(&worker_sequence_checker_) { -+ const int discarded_packets = buffer_->GetTotalNumberOfDiscardedPackets() - -+ packets_discarded_before_last_new_frame_; -+ if (discarded_packets > 0) { -+ stats_proxy_->OnDiscardedPackets(discarded_packets); -+ } -+ packets_discarded_before_last_new_frame_ = -+ buffer_->GetTotalNumberOfDiscardedPackets(); -+} -+ - void VideoStreamBufferController::UpdateFrameBufferTimings( - Timestamp min_receive_time, - Timestamp now) { -diff --git a/video/video_stream_buffer_controller.h b/video/video_stream_buffer_controller.h -index bb67304d06..b6d7bd46ae 100644 ---- a/video/video_stream_buffer_controller.h -+++ b/video/video_stream_buffer_controller.h -@@ -52,6 +52,8 @@ class VideoStreamBufferControllerStatsObserver { - TimeDelta target_delay, - TimeDelta minimum_delay) = 0; - -+ virtual void OnDiscardedPackets(uint32_t packets_discarded) = 0; -+ - // Various jitter buffer delays determined by VCMTiming. - virtual void OnFrameBufferTimingsUpdated(int estimated_max_decode_time_ms, - int current_delay_ms, -@@ -94,6 +96,7 @@ class VideoStreamBufferController { - void OnTimeout(TimeDelta delay); - void FrameReadyForDecode(uint32_t rtp_timestamp, Timestamp render_time); - void UpdateDroppedFrames() RTC_RUN_ON(&worker_sequence_checker_); -+ void UpdateDiscardedPackets() RTC_RUN_ON(&worker_sequence_checker_); - void UpdateFrameBufferTimings(Timestamp min_receive_time, Timestamp now); - void UpdateTimingFrameInfo(); - bool IsTooManyFramesQueued() const RTC_RUN_ON(&worker_sequence_checker_); -@@ -122,6 +125,8 @@ class VideoStreamBufferController { - RTC_GUARDED_BY(&worker_sequence_checker_); - int frames_dropped_before_last_new_frame_ - RTC_GUARDED_BY(&worker_sequence_checker_) = 0; -+ int packets_discarded_before_last_new_frame_ -+ RTC_GUARDED_BY(&worker_sequence_checker_) = 0; - VCMVideoProtection protection_mode_ - RTC_GUARDED_BY(&worker_sequence_checker_) = kProtectionNack; - -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0070.patch b/third_party/libwebrtc/moz-patch-stack/0070.patch index 6808f32223ab..69fde29b61ad 100644 --- a/third_party/libwebrtc/moz-patch-stack/0070.patch +++ b/third_party/libwebrtc/moz-patch-stack/0070.patch @@ -1,49 +1,224 @@ From: Andreas Pehrson Date: Thu, 6 Jan 2022 00:16:00 +0000 -Subject: Bug 1748478 - Propagate calculated discarded packets to stats. r=bwc +Subject: Bug 1748458 - Add TRACE_EVENTs for dropped frames and packets for + received video. r=bwc -Differential Revision: https://phabricator.services.mozilla.com/D135061 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/56fbf0469e25fa0d589c51ca112ce534a7c0ab91 +This lets us see in the profiler how many received frames and packets we decide +to drop and the reasons why. + +Differential Revision: https://phabricator.services.mozilla.com/D135062 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/08e252da94c4752eccfd845eef13d8517953cc6a + +Also includes: + +Bug 1804288 - (fix-de7ae5755b) reimplement Bug 1748458 - Add TRACE_EVENTs for dropped frames and packets for received video. r=pehrsons + +Differential Revision: https://phabricator.services.mozilla.com/D163960 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/8e9a326a99cd5eaa6e447ff57c01ad9d79a09744 --- - video/receive_statistics_proxy.cc | 9 +++++++-- - video/rtp_video_stream_receiver2.cc | 4 +++- - 2 files changed, 10 insertions(+), 3 deletions(-) + api/video/frame_buffer.cc | 33 +++++++++++++++++++++++++ + video/receive_statistics_proxy.cc | 11 +++++++++ + video/rtp_video_stream_receiver2.cc | 4 +++ + video/video_stream_buffer_controller.cc | 7 ++++++ + 4 files changed, 55 insertions(+) +diff --git a/api/video/frame_buffer.cc b/api/video/frame_buffer.cc +index 8267b8e6cb..f5d93f5f76 100644 +--- a/api/video/frame_buffer.cc ++++ b/api/video/frame_buffer.cc +@@ -16,6 +16,7 @@ + #include "absl/container/inlined_vector.h" + #include "rtc_base/logging.h" + #include "rtc_base/numerics/sequence_number_util.h" ++#include "rtc_base/trace_event.h" + + namespace webrtc { + namespace { +@@ -68,7 +69,12 @@ FrameBuffer::FrameBuffer(int max_size, + decoded_frame_history_(max_decode_history) {} + + bool FrameBuffer::InsertFrame(std::unique_ptr frame) { ++ const uint32_t ssrc = ++ frame->PacketInfos().empty() ? 0 : frame->PacketInfos()[0].ssrc(); + if (!ValidReferences(*frame)) { ++ TRACE_EVENT2("webrtc", ++ "FrameBuffer::InsertFrame Frame dropped (Invalid references)", ++ "remote_ssrc", ssrc, "frame_id", frame->Id()); + RTC_DLOG(LS_WARNING) << "Frame " << frame->Id() + << " has invalid references, dropping frame."; + return false; +@@ -78,23 +84,35 @@ bool FrameBuffer::InsertFrame(std::unique_ptr frame) { + if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() && + AheadOf(frame->Timestamp(), + *decoded_frame_history_.GetLastDecodedFrameTimestamp())) { ++ TRACE_EVENT2("webrtc", ++ "FrameBuffer::InsertFrame Frames dropped (OOO + PicId jump)", ++ "remote_ssrc", ssrc, "frame_id", frame->Id()); + RTC_DLOG(LS_WARNING) + << "Keyframe " << frame->Id() + << " has newer timestamp but older picture id, clearing buffer."; + Clear(); + } else { + // Already decoded past this frame. ++ TRACE_EVENT2("webrtc", ++ "FrameBuffer::InsertFrame Frame dropped (Out of order)", ++ "remote_ssrc", ssrc, "frame_id", frame->Id()); + return false; + } + } + + if (frames_.size() == max_size_) { + if (frame->is_keyframe()) { ++ TRACE_EVENT2("webrtc", ++ "FrameBuffer::InsertFrame Frames dropped (KF + Full buffer)", ++ "remote_ssrc", ssrc, "frame_id", frame->Id()); + RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id() + << " inserted into full buffer, clearing buffer."; + Clear(); + } else { + // No space for this frame. ++ TRACE_EVENT2("webrtc", ++ "FrameBuffer::InsertFrame Frame dropped (Full buffer)", ++ "remote_ssrc", ssrc, "frame_id", frame->Id()); + return false; + } + } +@@ -149,16 +167,31 @@ void FrameBuffer::DropNextDecodableTemporalUnit() { + + void FrameBuffer::UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it, + FrameIterator end_it) { ++ uint32_t dropped_ssrc = 0; ++ int64_t dropped_frame_id = 0; + unsigned int num_discarded_packets = 0; + unsigned int num_dropped_frames = + std::count_if(begin_it, end_it, [&](const auto& f) { + if (f.second.encoded_frame) { + const auto& packetInfos = f.second.encoded_frame->PacketInfos(); ++ dropped_frame_id = f.first; ++ if (!packetInfos.empty()) { ++ dropped_ssrc = packetInfos[0].ssrc(); ++ } + num_discarded_packets += packetInfos.size(); + } + return f.second.encoded_frame != nullptr; + }); + ++ if (num_dropped_frames > 0) { ++ TRACE_EVENT2("webrtc", "FrameBuffer Dropping Old Frames", "remote_ssrc", ++ dropped_ssrc, "frame_id", dropped_frame_id); ++ } ++ if (num_discarded_packets > 0) { ++ TRACE_EVENT2("webrtc", "FrameBuffer Discarding Old Packets", "remote_ssrc", ++ dropped_ssrc, "frame_id", dropped_frame_id); ++ } ++ + num_dropped_frames_ += num_dropped_frames; + num_discarded_packets_ += num_discarded_packets; + } diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc -index ae35ee9169..78b541b8f5 100644 +index af670b053d..8d00af91ff 100644 --- a/video/receive_statistics_proxy.cc +++ b/video/receive_statistics_proxy.cc -@@ -841,8 +841,13 @@ void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { - } +@@ -20,6 +20,7 @@ + #include "rtc_base/strings/string_builder.h" + #include "rtc_base/thread.h" + #include "rtc_base/time_utils.h" ++#include "rtc_base/trace_event.h" + #include "system_wrappers/include/clock.h" + #include "system_wrappers/include/metrics.h" + #include "video/video_receive_stream2.h" +@@ -760,6 +761,9 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, + VideoContentType content_type) { + RTC_DCHECK_RUN_ON(&main_thread_); ++ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnCompleteFrame", ++ "remote_ssrc", remote_ssrc_, "is_keyframe", is_keyframe); ++ + if (is_keyframe) { + ++stats_.frame_counts.key_frames; + } else { +@@ -791,6 +795,8 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, + void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { + // Can be called on either the decode queue or the worker thread + // See FrameBuffer2 for more details. ++ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnDroppedFrames", ++ "remote_ssrc", remote_ssrc_, "frames_dropped", frames_dropped); + worker_thread_->PostTask( + SafeTask(task_safety_.flag(), [frames_dropped, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); +@@ -801,6 +807,9 @@ void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { void ReceiveStatisticsProxy::OnDiscardedPackets(uint32_t packets_discarded) { -- RTC_DCHECK_RUN_ON(&main_thread_); -- stats_.packets_discarded += packets_discarded; -+ // Can be called on either the decode queue or the worker thread -+ // See FrameBuffer2 for more details. -+ worker_thread_->PostTask( -+ SafeTask(task_safety_.flag(), [packets_discarded, this]() { -+ RTC_DCHECK_RUN_ON(&main_thread_); -+ stats_.packets_discarded += packets_discarded; -+ })); + // Can be called on either the decode queue or the worker thread + // See FrameBuffer2 for more details. ++ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnDiscardedPackets", ++ "remote_ssrc", remote_ssrc_, "packets_discarded", ++ packets_discarded); + worker_thread_->PostTask( + SafeTask(task_safety_.flag(), [packets_discarded, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); +@@ -829,6 +838,8 @@ void ReceiveStatisticsProxy::OnStreamInactive() { + + void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms) { + RTC_DCHECK_RUN_ON(&main_thread_); ++ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnRttUpdate", ++ "remote_ssrc", remote_ssrc_, "avg_rtt_ms", avg_rtt_ms); + avg_rtt_ms_ = avg_rtt_ms; } - void ReceiveStatisticsProxy::OnPreDecode(VideoCodecType codec_type, int qp) { diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index 4946105caa..a4ba853b91 100644 +index c236b619d4..0926552e4e 100644 --- a/video/rtp_video_stream_receiver2.cc +++ b/video/rtp_video_stream_receiver2.cc -@@ -1207,7 +1207,9 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { - packet_infos_.erase(packet_infos_.begin(), +@@ -44,6 +44,7 @@ + #include "rtc_base/checks.h" + #include "rtc_base/logging.h" + #include "rtc_base/strings/string_builder.h" ++#include "rtc_base/trace_event.h" + #include "system_wrappers/include/metrics.h" + #include "system_wrappers/include/ntp_time.h" + +@@ -1214,6 +1215,9 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { packet_infos_.upper_bound(unwrapped_rtp_seq_num)); uint32_t num_packets_cleared = packet_buffer_.ClearTo(seq_num); -- vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); -+ if (num_packets_cleared > 0) { -+ vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); -+ } + if (num_packets_cleared > 0) { ++ TRACE_EVENT2("webrtc", ++ "RtpVideoStreamReceiver2::FrameDecoded Cleared Old Packets", ++ "remote_ssrc", config_.rtp.remote_ssrc, "seq_num", seq_num); + vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); + } reference_finder_->ClearTo(seq_num); - } - } +diff --git a/video/video_stream_buffer_controller.cc b/video/video_stream_buffer_controller.cc +index 20588fbcad..9da894ab95 100644 +--- a/video/video_stream_buffer_controller.cc ++++ b/video/video_stream_buffer_controller.cc +@@ -31,6 +31,7 @@ + #include "rtc_base/checks.h" + #include "rtc_base/logging.h" + #include "rtc_base/thread_annotations.h" ++#include "rtc_base/trace_event.h" + #include "video/frame_decode_scheduler.h" + #include "video/frame_decode_timing.h" + #include "video/task_queue_frame_decode_scheduler.h" +@@ -152,6 +153,9 @@ absl::optional VideoStreamBufferController::InsertFrame( + std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + FrameMetadata metadata(*frame); ++ const uint32_t ssrc = ++ frame->PacketInfos().empty() ? 0 : frame->PacketInfos()[0].ssrc(); ++ const int64_t frameId = frame->Id(); + int complete_units = buffer_->GetTotalNumberOfContinuousTemporalUnits(); + if (buffer_->InsertFrame(std::move(frame))) { + RTC_DCHECK(metadata.receive_time) << "Frame receive time must be set!"; +@@ -162,6 +166,9 @@ absl::optional VideoStreamBufferController::InsertFrame( + *metadata.receive_time); + } + if (complete_units < buffer_->GetTotalNumberOfContinuousTemporalUnits()) { ++ TRACE_EVENT2("webrtc", ++ "VideoStreamBufferController::InsertFrame Frame Complete", ++ "remote_ssrc", ssrc, "frame_id", frameId); + stats_proxy_->OnCompleteFrame(metadata.is_keyframe, metadata.size, + metadata.contentType); + MaybeScheduleFrameForRelease(); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0071.patch b/third_party/libwebrtc/moz-patch-stack/0071.patch index 6240427a81b5..7e36ef6fe7e8 100644 --- a/third_party/libwebrtc/moz-patch-stack/0071.patch +++ b/third_party/libwebrtc/moz-patch-stack/0071.patch @@ -1,224 +1,30 @@ From: Andreas Pehrson Date: Thu, 6 Jan 2022 00:16:00 +0000 -Subject: Bug 1748458 - Add TRACE_EVENTs for dropped frames and packets for - received video. r=bwc +Subject: Bug 1748458 - Add TRACE_EVENT for keyframe requests. r=bwc -This lets us see in the profiler how many received frames and packets we decide -to drop and the reasons why. - -Differential Revision: https://phabricator.services.mozilla.com/D135062 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/08e252da94c4752eccfd845eef13d8517953cc6a - -Also includes: - -Bug 1804288 - (fix-de7ae5755b) reimplement Bug 1748458 - Add TRACE_EVENTs for dropped frames and packets for received video. r=pehrsons - -Differential Revision: https://phabricator.services.mozilla.com/D163960 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/8e9a326a99cd5eaa6e447ff57c01ad9d79a09744 +Differential Revision: https://phabricator.services.mozilla.com/D135113 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/5b2a7894ef1cf096d0e8977754507c0820e757fc --- - api/video/frame_buffer.cc | 33 +++++++++++++++++++++++++ - video/receive_statistics_proxy.cc | 11 +++++++++ - video/rtp_video_stream_receiver2.cc | 4 +++ - video/video_stream_buffer_controller.cc | 7 ++++++ - 4 files changed, 55 insertions(+) + video/rtp_video_stream_receiver2.cc | 6 ++++++ + 1 file changed, 6 insertions(+) -diff --git a/api/video/frame_buffer.cc b/api/video/frame_buffer.cc -index 8267b8e6cb..f5d93f5f76 100644 ---- a/api/video/frame_buffer.cc -+++ b/api/video/frame_buffer.cc -@@ -16,6 +16,7 @@ - #include "absl/container/inlined_vector.h" - #include "rtc_base/logging.h" - #include "rtc_base/numerics/sequence_number_util.h" -+#include "rtc_base/trace_event.h" - - namespace webrtc { - namespace { -@@ -68,7 +69,12 @@ FrameBuffer::FrameBuffer(int max_size, - decoded_frame_history_(max_decode_history) {} - - bool FrameBuffer::InsertFrame(std::unique_ptr frame) { -+ const uint32_t ssrc = -+ frame->PacketInfos().empty() ? 0 : frame->PacketInfos()[0].ssrc(); - if (!ValidReferences(*frame)) { -+ TRACE_EVENT2("webrtc", -+ "FrameBuffer::InsertFrame Frame dropped (Invalid references)", -+ "remote_ssrc", ssrc, "frame_id", frame->Id()); - RTC_DLOG(LS_WARNING) << "Frame " << frame->Id() - << " has invalid references, dropping frame."; - return false; -@@ -78,23 +84,35 @@ bool FrameBuffer::InsertFrame(std::unique_ptr frame) { - if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() && - AheadOf(frame->Timestamp(), - *decoded_frame_history_.GetLastDecodedFrameTimestamp())) { -+ TRACE_EVENT2("webrtc", -+ "FrameBuffer::InsertFrame Frames dropped (OOO + PicId jump)", -+ "remote_ssrc", ssrc, "frame_id", frame->Id()); - RTC_DLOG(LS_WARNING) - << "Keyframe " << frame->Id() - << " has newer timestamp but older picture id, clearing buffer."; - Clear(); - } else { - // Already decoded past this frame. -+ TRACE_EVENT2("webrtc", -+ "FrameBuffer::InsertFrame Frame dropped (Out of order)", -+ "remote_ssrc", ssrc, "frame_id", frame->Id()); - return false; - } - } - - if (frames_.size() == max_size_) { - if (frame->is_keyframe()) { -+ TRACE_EVENT2("webrtc", -+ "FrameBuffer::InsertFrame Frames dropped (KF + Full buffer)", -+ "remote_ssrc", ssrc, "frame_id", frame->Id()); - RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id() - << " inserted into full buffer, clearing buffer."; - Clear(); - } else { - // No space for this frame. -+ TRACE_EVENT2("webrtc", -+ "FrameBuffer::InsertFrame Frame dropped (Full buffer)", -+ "remote_ssrc", ssrc, "frame_id", frame->Id()); - return false; - } - } -@@ -149,16 +167,31 @@ void FrameBuffer::DropNextDecodableTemporalUnit() { - - void FrameBuffer::UpdateDroppedFramesAndDiscardedPackets(FrameIterator begin_it, - FrameIterator end_it) { -+ uint32_t dropped_ssrc = 0; -+ int64_t dropped_frame_id = 0; - unsigned int num_discarded_packets = 0; - unsigned int num_dropped_frames = - std::count_if(begin_it, end_it, [&](const auto& f) { - if (f.second.encoded_frame) { - const auto& packetInfos = f.second.encoded_frame->PacketInfos(); -+ dropped_frame_id = f.first; -+ if (!packetInfos.empty()) { -+ dropped_ssrc = packetInfos[0].ssrc(); -+ } - num_discarded_packets += packetInfos.size(); - } - return f.second.encoded_frame != nullptr; - }); - -+ if (num_dropped_frames > 0) { -+ TRACE_EVENT2("webrtc", "FrameBuffer Dropping Old Frames", "remote_ssrc", -+ dropped_ssrc, "frame_id", dropped_frame_id); -+ } -+ if (num_discarded_packets > 0) { -+ TRACE_EVENT2("webrtc", "FrameBuffer Discarding Old Packets", "remote_ssrc", -+ dropped_ssrc, "frame_id", dropped_frame_id); -+ } -+ - num_dropped_frames_ += num_dropped_frames; - num_discarded_packets_ += num_discarded_packets; - } -diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc -index 78b541b8f5..98eb94786f 100644 ---- a/video/receive_statistics_proxy.cc -+++ b/video/receive_statistics_proxy.cc -@@ -20,6 +20,7 @@ - #include "rtc_base/strings/string_builder.h" - #include "rtc_base/thread.h" - #include "rtc_base/time_utils.h" -+#include "rtc_base/trace_event.h" - #include "system_wrappers/include/clock.h" - #include "system_wrappers/include/metrics.h" - #include "video/video_receive_stream2.h" -@@ -802,6 +803,9 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, - VideoContentType content_type) { - RTC_DCHECK_RUN_ON(&main_thread_); - -+ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnCompleteFrame", -+ "remote_ssrc", remote_ssrc_, "is_keyframe", is_keyframe); -+ - if (is_keyframe) { - ++stats_.frame_counts.key_frames; - } else { -@@ -833,6 +837,8 @@ void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe, - void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { - // Can be called on either the decode queue or the worker thread - // See FrameBuffer2 for more details. -+ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnDroppedFrames", -+ "remote_ssrc", remote_ssrc_, "frames_dropped", frames_dropped); - worker_thread_->PostTask( - SafeTask(task_safety_.flag(), [frames_dropped, this]() { - RTC_DCHECK_RUN_ON(&main_thread_); -@@ -843,6 +849,9 @@ void ReceiveStatisticsProxy::OnDroppedFrames(uint32_t frames_dropped) { - void ReceiveStatisticsProxy::OnDiscardedPackets(uint32_t packets_discarded) { - // Can be called on either the decode queue or the worker thread - // See FrameBuffer2 for more details. -+ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnDiscardedPackets", -+ "remote_ssrc", remote_ssrc_, "packets_discarded", -+ packets_discarded); - worker_thread_->PostTask( - SafeTask(task_safety_.flag(), [packets_discarded, this]() { - RTC_DCHECK_RUN_ON(&main_thread_); -@@ -871,6 +880,8 @@ void ReceiveStatisticsProxy::OnStreamInactive() { - - void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms) { - RTC_DCHECK_RUN_ON(&main_thread_); -+ TRACE_EVENT2("webrtc", "ReceiveStatisticsProxy::OnRttUpdate", -+ "remote_ssrc", remote_ssrc_, "avg_rtt_ms", avg_rtt_ms); - avg_rtt_ms_ = avg_rtt_ms; - } - diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index a4ba853b91..9997fa25e3 100644 +index 0926552e4e..be8ee88c09 100644 --- a/video/rtp_video_stream_receiver2.cc +++ b/video/rtp_video_stream_receiver2.cc -@@ -44,6 +44,7 @@ - #include "rtc_base/checks.h" - #include "rtc_base/logging.h" - #include "rtc_base/strings/string_builder.h" -+#include "rtc_base/trace_event.h" - #include "system_wrappers/include/metrics.h" - #include "system_wrappers/include/ntp_time.h" +@@ -735,6 +735,12 @@ void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) { -@@ -1208,6 +1209,9 @@ void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) { - packet_infos_.upper_bound(unwrapped_rtp_seq_num)); - uint32_t num_packets_cleared = packet_buffer_.ClearTo(seq_num); - if (num_packets_cleared > 0) { -+ TRACE_EVENT2("webrtc", -+ "RtpVideoStreamReceiver2::FrameDecoded Cleared Old Packets", -+ "remote_ssrc", config_.rtp.remote_ssrc, "seq_num", seq_num); - vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared); - } - reference_finder_->ClearTo(seq_num); -diff --git a/video/video_stream_buffer_controller.cc b/video/video_stream_buffer_controller.cc -index 20588fbcad..9da894ab95 100644 ---- a/video/video_stream_buffer_controller.cc -+++ b/video/video_stream_buffer_controller.cc -@@ -31,6 +31,7 @@ - #include "rtc_base/checks.h" - #include "rtc_base/logging.h" - #include "rtc_base/thread_annotations.h" -+#include "rtc_base/trace_event.h" - #include "video/frame_decode_scheduler.h" - #include "video/frame_decode_timing.h" - #include "video/task_queue_frame_decode_scheduler.h" -@@ -152,6 +153,9 @@ absl::optional VideoStreamBufferController::InsertFrame( - std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - FrameMetadata metadata(*frame); -+ const uint32_t ssrc = -+ frame->PacketInfos().empty() ? 0 : frame->PacketInfos()[0].ssrc(); -+ const int64_t frameId = frame->Id(); - int complete_units = buffer_->GetTotalNumberOfContinuousTemporalUnits(); - if (buffer_->InsertFrame(std::move(frame))) { - RTC_DCHECK(metadata.receive_time) << "Frame receive time must be set!"; -@@ -162,6 +166,9 @@ absl::optional VideoStreamBufferController::InsertFrame( - *metadata.receive_time); - } - if (complete_units < buffer_->GetTotalNumberOfContinuousTemporalUnits()) { -+ TRACE_EVENT2("webrtc", -+ "VideoStreamBufferController::InsertFrame Frame Complete", -+ "remote_ssrc", ssrc, "frame_id", frameId); - stats_proxy_->OnCompleteFrame(metadata.is_keyframe, metadata.size, - metadata.contentType); - MaybeScheduleFrameForRelease(); + void RtpVideoStreamReceiver2::RequestKeyFrame() { + RTC_DCHECK_RUN_ON(&worker_task_checker_); ++ TRACE_EVENT2("webrtc", "RtpVideoStreamReceiver2::RequestKeyFrame", ++ "remote_ssrc", config_.rtp.remote_ssrc, "method", ++ keyframe_request_method_ == KeyFrameReqMethod::kPliRtcp ? "PLI" ++ : keyframe_request_method_ == KeyFrameReqMethod::kFirRtcp ? "FIR" ++ : keyframe_request_method_ == KeyFrameReqMethod::kNone ? "None" ++ : "Other"); + // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests + // issued by anything other than the LossNotificationController if it (the + // sender) is relying on LNTF alone. -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0072.patch b/third_party/libwebrtc/moz-patch-stack/0072.patch index 965386a32441..51b6e3220567 100644 --- a/third_party/libwebrtc/moz-patch-stack/0072.patch +++ b/third_party/libwebrtc/moz-patch-stack/0072.patch @@ -1,30 +1,26 @@ From: Andreas Pehrson -Date: Thu, 6 Jan 2022 00:16:00 +0000 -Subject: Bug 1748458 - Add TRACE_EVENT for keyframe requests. r=bwc +Date: Wed, 11 Jan 2023 22:42:00 +0000 +Subject: Bug 1800942 - Add DCHECKs to + TimestampExtrapolator::ExtrapolateLocalTime. r=mjf -Differential Revision: https://phabricator.services.mozilla.com/D135113 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/5b2a7894ef1cf096d0e8977754507c0820e757fc +Differential Revision: https://phabricator.services.mozilla.com/D166536 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c5df7f40392464ffc63f44a53ddcaab2091741e0 --- - video/rtp_video_stream_receiver2.cc | 6 ++++++ - 1 file changed, 6 insertions(+) + modules/video_coding/timing/timestamp_extrapolator.cc | 1 + + 1 file changed, 1 insertion(+) -diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index 9997fa25e3..738da0758b 100644 ---- a/video/rtp_video_stream_receiver2.cc -+++ b/video/rtp_video_stream_receiver2.cc -@@ -735,6 +735,12 @@ void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) { +diff --git a/modules/video_coding/timing/timestamp_extrapolator.cc b/modules/video_coding/timing/timestamp_extrapolator.cc +index a90df8bf7f..77e5508a76 100644 +--- a/modules/video_coding/timing/timestamp_extrapolator.cc ++++ b/modules/video_coding/timing/timestamp_extrapolator.cc +@@ -125,6 +125,7 @@ void TimestampExtrapolator::Update(Timestamp now, uint32_t ts90khz) { + absl::optional TimestampExtrapolator::ExtrapolateLocalTime( + uint32_t timestamp90khz) const { + int64_t unwrapped_ts90khz = unwrapper_.PeekUnwrap(timestamp90khz); ++ RTC_DCHECK_GE(unwrapped_ts90khz, 0); - void RtpVideoStreamReceiver2::RequestKeyFrame() { - RTC_DCHECK_RUN_ON(&worker_task_checker_); -+ TRACE_EVENT2("webrtc", "RtpVideoStreamReceiver2::RequestKeyFrame", -+ "remote_ssrc", config_.rtp.remote_ssrc, "method", -+ keyframe_request_method_ == KeyFrameReqMethod::kPliRtcp ? "PLI" -+ : keyframe_request_method_ == KeyFrameReqMethod::kFirRtcp ? "FIR" -+ : keyframe_request_method_ == KeyFrameReqMethod::kNone ? "None" -+ : "Other"); - // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests - // issued by anything other than the LossNotificationController if it (the - // sender) is relying on LNTF alone. + if (!first_unwrapped_timestamp_) { + return absl::nullopt; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0073.patch b/third_party/libwebrtc/moz-patch-stack/0073.patch index 2f0355706b9c..69a6d4978fcf 100644 --- a/third_party/libwebrtc/moz-patch-stack/0073.patch +++ b/third_party/libwebrtc/moz-patch-stack/0073.patch @@ -1,26 +1,28 @@ From: Andreas Pehrson -Date: Wed, 11 Jan 2023 22:42:00 +0000 -Subject: Bug 1800942 - Add DCHECKs to - TimestampExtrapolator::ExtrapolateLocalTime. r=mjf +Date: Wed, 8 Feb 2023 08:01:00 +0000 +Subject: Bug 1814692 - Don't attempt realtime scheduling rtc::PlatformThreads. + r=webrtc-reviewers,bwc -Differential Revision: https://phabricator.services.mozilla.com/D166536 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c5df7f40392464ffc63f44a53ddcaab2091741e0 +Differential Revision: https://phabricator.services.mozilla.com/D169036 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/9e64a965e26c8379261466e5273c3b383164b2c7 --- - modules/video_coding/timing/timestamp_extrapolator.cc | 1 + - 1 file changed, 1 insertion(+) + rtc_base/platform_thread.cc | 3 +++ + 1 file changed, 3 insertions(+) -diff --git a/modules/video_coding/timing/timestamp_extrapolator.cc b/modules/video_coding/timing/timestamp_extrapolator.cc -index a90df8bf7f..77e5508a76 100644 ---- a/modules/video_coding/timing/timestamp_extrapolator.cc -+++ b/modules/video_coding/timing/timestamp_extrapolator.cc -@@ -125,6 +125,7 @@ void TimestampExtrapolator::Update(Timestamp now, uint32_t ts90khz) { - absl::optional TimestampExtrapolator::ExtrapolateLocalTime( - uint32_t timestamp90khz) const { - int64_t unwrapped_ts90khz = unwrapper_.PeekUnwrap(timestamp90khz); -+ RTC_DCHECK_GE(unwrapped_ts90khz, 0); - - if (!first_unwrapped_timestamp_) { - return absl::nullopt; +diff --git a/rtc_base/platform_thread.cc b/rtc_base/platform_thread.cc +index 71a9f1b224..bcbb784b97 100644 +--- a/rtc_base/platform_thread.cc ++++ b/rtc_base/platform_thread.cc +@@ -50,6 +50,9 @@ bool SetPriority(ThreadPriority priority) { + // TODO(tommi): Switch to the same mechanism as Chromium uses for changing + // thread priorities. + return true; ++#elif defined(WEBRTC_MOZILLA_BUILD) && defined(WEBRTC_LINUX) ++ // Only realtime audio uses realtime scheduling in Firefox. ++ return true; + #else + const int policy = SCHED_FIFO; + const int min_prio = sched_get_priority_min(policy); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0074.patch b/third_party/libwebrtc/moz-patch-stack/0074.patch index b587367fc3e1..3f4b4db0b258 100644 --- a/third_party/libwebrtc/moz-patch-stack/0074.patch +++ b/third_party/libwebrtc/moz-patch-stack/0074.patch @@ -1,28 +1,77 @@ -From: Andreas Pehrson -Date: Wed, 8 Feb 2023 08:01:00 +0000 -Subject: Bug 1814692 - Don't attempt realtime scheduling rtc::PlatformThreads. - r=webrtc-reviewers,bwc +From: Jan Grulich +Date: Fri, 10 Mar 2023 09:21:00 +0000 +Subject: Bug 1819035 - get EGL display based on the used platform in the + browser r=webrtc-reviewers,ng -Differential Revision: https://phabricator.services.mozilla.com/D169036 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/9e64a965e26c8379261466e5273c3b383164b2c7 +Because of a possible misconfiguration or a possible driver issue it +might happen that the browser will use a different driver on X11 and +end up using yet another one for wayland/gbm, which might lead to not +working screen sharing in the better case, but also to a crash in the +other driver (Nvidia). This adds a check for platform the browser runs +on, if it's XWayland or Wayland and based on that query EGL display for +that specific platform, rather than going for the Wayland one only. + +Differential Revision: https://phabricator.services.mozilla.com/D171858 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c8606497de1f461a6352456e0e511c2ae498d526 --- - rtc_base/platform_thread.cc | 3 +++ - 1 file changed, 3 insertions(+) + .../linux/wayland/egl_dmabuf.cc | 30 +++++++++++++++++-- + 1 file changed, 28 insertions(+), 2 deletions(-) -diff --git a/rtc_base/platform_thread.cc b/rtc_base/platform_thread.cc -index 71a9f1b224..bcbb784b97 100644 ---- a/rtc_base/platform_thread.cc -+++ b/rtc_base/platform_thread.cc -@@ -50,6 +50,9 @@ bool SetPriority(ThreadPriority priority) { - // TODO(tommi): Switch to the same mechanism as Chromium uses for changing - // thread priorities. - return true; -+#elif defined(WEBRTC_MOZILLA_BUILD) && defined(WEBRTC_LINUX) -+ // Only realtime audio uses realtime scheduling in Firefox. -+ return true; - #else - const int policy = SCHED_FIFO; - const int min_prio = sched_get_priority_min(policy); +diff --git a/modules/desktop_capture/linux/wayland/egl_dmabuf.cc b/modules/desktop_capture/linux/wayland/egl_dmabuf.cc +index b529077c6d..6a019c64b4 100644 +--- a/modules/desktop_capture/linux/wayland/egl_dmabuf.cc ++++ b/modules/desktop_capture/linux/wayland/egl_dmabuf.cc +@@ -13,6 +13,7 @@ + #include + #include + #include ++#include + #include + #include + #include +@@ -217,6 +218,26 @@ static void CloseLibrary(void* library) { + } + } + ++static bool IsWaylandDisplay() { ++ static auto sGdkWaylandDisplayGetType = ++ (GType (*)(void))dlsym(RTLD_DEFAULT, "gdk_wayland_display_get_type"); ++ if (!sGdkWaylandDisplayGetType) { ++ return false; ++ } ++ return (G_TYPE_CHECK_INSTANCE_TYPE ((gdk_display_get_default()), ++ sGdkWaylandDisplayGetType())); ++} ++ ++static bool IsX11Display() { ++ static auto sGdkX11DisplayGetType = ++ (GType (*)(void))dlsym(RTLD_DEFAULT, "gdk_x11_display_get_type"); ++ if (!sGdkX11DisplayGetType) { ++ return false; ++ } ++ return (G_TYPE_CHECK_INSTANCE_TYPE ((gdk_display_get_default()), ++ sGdkX11DisplayGetType())); ++} ++ + static void* g_lib_egl = nullptr; + + RTC_NO_SANITIZE("cfi-icall") +@@ -362,8 +383,13 @@ EglDmaBuf::EglDmaBuf() { + return; + } + +- egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_WAYLAND_KHR, +- (void*)EGL_DEFAULT_DISPLAY, nullptr); ++ if (IsWaylandDisplay()) { ++ egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_WAYLAND_KHR, ++ (void*)EGL_DEFAULT_DISPLAY, nullptr); ++ } else if (IsX11Display()) { ++ egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_X11_KHR, ++ (void*)EGL_DEFAULT_DISPLAY, nullptr); ++ } + + if (egl_.display == EGL_NO_DISPLAY) { + RTC_LOG(LS_ERROR) << "Failed to obtain default EGL display: " -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0075.patch b/third_party/libwebrtc/moz-patch-stack/0075.patch index 12410016307d..c00ba9d6a876 100644 --- a/third_party/libwebrtc/moz-patch-stack/0075.patch +++ b/third_party/libwebrtc/moz-patch-stack/0075.patch @@ -1,139 +1,34 @@ -From: Jan Grulich -Date: Mon, 20 Feb 2023 21:25:00 +0000 -Subject: Bug 1817263 - fix OS picker behavior under Wayland r=ng,jib,stransky +From: Andreas Pehrson +Date: Mon, 27 Feb 2023 16:22:00 +0000 +Subject: Bug 1817024 - (fix-fdcfefa708) In PhysicalSocket avoid a non-trivial + designated initializer. r=mjf,webrtc-reviewers -Recent WebRTC backports and changes that are about to be backported from -upstream to Firefox breaks and will break how we work with PipWire based -desktop capturer. Currently when constructing device list, a fallback to -ScreenCapturerX11 is used, as we don't call set_allow_pipewire(), which -wouldn't make a difference anyway. In such case the only thing we need -is a placeholder for a screen that will request OS level prompt. We also -need a way to request both screens and windows in one xdg-desktop-portal -call as recent WebRTC made each type be called separately, therefore the -introduction of GenericCapturer. Lastly we need to make sure when there -is a MediaDevice requesting the OS prompt, that it will be checked as -first. +This fixes a build failure in the base-toolchain job with GCC 7.5.0: + In file included from Unified_cpp_threading_gn0.cpp:38:0: + .../third_party/libwebrtc/rtc_base/physical_socket_server.cc: In member function 'int rtc::PhysicalSocket::DoReadFromSocket(void*, size_t, rtc::SocketAddress*, int64_t*)': + .../third_party/libwebrtc/rtc_base/physical_socket_server.cc:463:51: sorry, unimplemented: non-trivial designated initializers not supported + msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; + ^ -In order to use unmodified libwebrtc, Firefox would need to rework the -OS picker to request each type (screens and windows) separately so we -can just use regular ScreenCapturer and WindowCapturer. This should be -done ideally the way Chromium does it, where users can actually see -even the preview of what they picked over xdg-desktop-portal before it -is actually shared with requesting web page and they also have option -to make the request again in case they picked a wrong window or screen. - -Differential Revision: https://phabricator.services.mozilla.com/D169627 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/acd6266642951aacf8915a56777c780cae9e9af3 +Differential Revision: https://phabricator.services.mozilla.com/D171057 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a3447f709befd84a282ca40f29b7a5ea76d5b68d --- - .../desktop_capture/desktop_capture_types.h | 2 +- - modules/desktop_capture/desktop_capturer.cc | 28 +++++++++++++++++++ - modules/desktop_capture/desktop_capturer.h | 13 +++++++++ - .../linux/wayland/screencast_portal.cc | 2 ++ - 4 files changed, 44 insertions(+), 1 deletion(-) + rtc_base/physical_socket_server.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) -diff --git a/modules/desktop_capture/desktop_capture_types.h b/modules/desktop_capture/desktop_capture_types.h -index 381d1021c4..e777a45f92 100644 ---- a/modules/desktop_capture/desktop_capture_types.h -+++ b/modules/desktop_capture/desktop_capture_types.h -@@ -19,7 +19,7 @@ typedef int pid_t; // matching what used to be in - - namespace webrtc { - --enum class CaptureType { kWindow, kScreen }; -+enum class CaptureType { kWindow, kScreen, kAnyScreenContent }; - - // Type used to identify windows on the desktop. Values are platform-specific: - // - On Windows: HWND cast to intptr_t. -diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc -index 4baa93cab9..7df6becb4e 100644 ---- a/modules/desktop_capture/desktop_capturer.cc -+++ b/modules/desktop_capture/desktop_capturer.cc -@@ -26,6 +26,10 @@ - #include "rtc_base/win/windows_version.h" - #endif // defined(RTC_ENABLE_WIN_WGC) - -+#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -+#include "modules/desktop_capture/linux/wayland/base_capturer_pipewire.h" -+#endif -+ - namespace webrtc { - - void LogDesktopCapturerFullscreenDetectorUsage() { -@@ -84,6 +88,30 @@ std::unique_ptr DesktopCapturer::CreateWindowCapturer( - return capturer; - } - -+#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -+// static -+std::unique_ptr DesktopCapturer::CreateGenericCapturer( -+ const DesktopCaptureOptions& options) { -+ std::unique_ptr capturer = CreateRawGenericCapturer(options); -+ if (capturer && options.detect_updated_region()) { -+ capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); -+ } -+ -+ return capturer; -+} -+ -+// static -+std::unique_ptr DesktopCapturer::CreateRawGenericCapturer( -+ const DesktopCaptureOptions& options) { -+ if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) { -+ return std::make_unique(options, -+ CaptureType::kAnyScreenContent); -+ } -+ -+ return nullptr; -+} -+#endif // defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -+ - // static - std::unique_ptr DesktopCapturer::CreateScreenCapturer( - const DesktopCaptureOptions& options) { -diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h -index 9b667739a8..f4e2861025 100644 ---- a/modules/desktop_capture/desktop_capturer.h -+++ b/modules/desktop_capture/desktop_capturer.h -@@ -179,6 +179,12 @@ class RTC_EXPORT DesktopCapturer { - // The return value if `pos` is out of the scope of the source is undefined. - virtual bool IsOccluded(const DesktopVector& pos); - -+#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -+ // Creates a DesktopCapturer instance which targets to capture windows and screens. -+ static std::unique_ptr CreateGenericCapturer( -+ const DesktopCaptureOptions& options); -+#endif -+ - // Creates a DesktopCapturer instance which targets to capture windows. - static std::unique_ptr CreateWindowCapturer( - const DesktopCaptureOptions& options); -@@ -207,6 +213,13 @@ class RTC_EXPORT DesktopCapturer { - // CroppingWindowCapturer needs to create raw capturers without wrappers, so - // the following two functions are protected. - -+#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -+ // Creates a platform specific DesktopCapturer instance which targets to -+ // capture windows and screens. -+ static std::unique_ptr CreateRawGenericCapturer( -+ const DesktopCaptureOptions& options); -+#endif -+ - // Creates a platform specific DesktopCapturer instance which targets to - // capture windows. - static std::unique_ptr CreateRawWindowCapturer( -diff --git a/modules/desktop_capture/linux/wayland/screencast_portal.cc b/modules/desktop_capture/linux/wayland/screencast_portal.cc -index a473802176..61ed84ebb5 100644 ---- a/modules/desktop_capture/linux/wayland/screencast_portal.cc -+++ b/modules/desktop_capture/linux/wayland/screencast_portal.cc -@@ -41,6 +41,8 @@ ScreenCastPortal::CaptureSourceType ScreenCastPortal::ToCaptureSourceType( - return ScreenCastPortal::CaptureSourceType::kScreen; - case CaptureType::kWindow: - return ScreenCastPortal::CaptureSourceType::kWindow; -+ case CaptureType::kAnyScreenContent: -+ return ScreenCastPortal::CaptureSourceType::kAnyScreenContent; - } - } - +diff --git a/rtc_base/physical_socket_server.cc b/rtc_base/physical_socket_server.cc +index 2dfdd9a5df..d0053dd82b 100644 +--- a/rtc_base/physical_socket_server.cc ++++ b/rtc_base/physical_socket_server.cc +@@ -462,7 +462,7 @@ int PhysicalSocket::DoReadFromSocket(void* buffer, + int received = 0; + if (read_scm_timestamp_experiment_) { + iovec iov = {.iov_base = buffer, .iov_len = length}; +- msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; ++ msghdr msg = {.msg_name = nullptr, .msg_namelen = 0, .msg_iov = &iov, .msg_iovlen = 1}; + if (out_addr) { + out_addr->Clear(); + msg.msg_name = addr; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0076.patch b/third_party/libwebrtc/moz-patch-stack/0076.patch index 700b2b45cb35..affbb277e232 100644 --- a/third_party/libwebrtc/moz-patch-stack/0076.patch +++ b/third_party/libwebrtc/moz-patch-stack/0076.patch @@ -1,100 +1,25 @@ -From: Jan Grulich -Date: Mon, 27 Feb 2023 13:57:00 +0000 -Subject: Bug 1819044 - fix build non-pipewire builds - r=webrtc-reviewers,pehrsons +From: Byron Campen +Date: Tue, 4 Apr 2023 16:34:00 -0500 +Subject: Bug 1822194 - (fix-3b51cd328e) - Add missing designated initializer + that gcc is sad about. -We should check only for PipeWire presence when building code specific -to PipeWire. - -Differential Revision: https://phabricator.services.mozilla.com/D171071 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/7a879ad084a6e9768479c73cc5c3f4e9d95a2ab9 - -Also includes: - - Bug 1819044 - fix build non-pipewire builds (attempt #2) r=webrtc-reviewers,pehrsons - - Make the new API available to everyone and just return an empty capturer - in case when building without PipeWire. It will not make any difference - because using X11 based capturers on Wayland is useless anyway so if we - fail for missing PipeWire on Wayland, it will have the same outcome. - - Differential Revision: https://phabricator.services.mozilla.com/D171192 - Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/ad247b0aac896d884eba5e40f0ec8a9f50d8b85b +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/108046c7cbb21c6cf19320c0804e9aee1a3eb4bf --- - modules/desktop_capture/desktop_capturer.cc | 7 +++---- - modules/desktop_capture/desktop_capturer.h | 4 ---- - 2 files changed, 3 insertions(+), 8 deletions(-) + modules/audio_processing/audio_processing_impl.cc | 1 + + 1 file changed, 1 insertion(+) -diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc -index 7df6becb4e..1af19a1fd2 100644 ---- a/modules/desktop_capture/desktop_capturer.cc -+++ b/modules/desktop_capture/desktop_capturer.cc -@@ -26,7 +26,7 @@ - #include "rtc_base/win/windows_version.h" - #endif // defined(RTC_ENABLE_WIN_WGC) - --#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) -+#if defined(WEBRTC_USE_PIPEWIRE) - #include "modules/desktop_capture/linux/wayland/base_capturer_pipewire.h" - #endif - -@@ -88,7 +88,6 @@ std::unique_ptr DesktopCapturer::CreateWindowCapturer( - return capturer; - } - --#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - // static - std::unique_ptr DesktopCapturer::CreateGenericCapturer( - const DesktopCaptureOptions& options) { -@@ -100,17 +99,17 @@ std::unique_ptr DesktopCapturer::CreateGenericCapturer( - return capturer; - } - --// static - std::unique_ptr DesktopCapturer::CreateRawGenericCapturer( - const DesktopCaptureOptions& options) { -+#if defined(WEBRTC_USE_PIPEWIRE) - if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) { - return std::make_unique(options, - CaptureType::kAnyScreenContent); - } -+#endif // defined(WEBRTC_USE_PIPEWIRE) - - return nullptr; - } --#endif // defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - - // static - std::unique_ptr DesktopCapturer::CreateScreenCapturer( -diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h -index f4e2861025..64f3187f51 100644 ---- a/modules/desktop_capture/desktop_capturer.h -+++ b/modules/desktop_capture/desktop_capturer.h -@@ -179,11 +179,9 @@ class RTC_EXPORT DesktopCapturer { - // The return value if `pos` is out of the scope of the source is undefined. - virtual bool IsOccluded(const DesktopVector& pos); - --#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - // Creates a DesktopCapturer instance which targets to capture windows and screens. - static std::unique_ptr CreateGenericCapturer( - const DesktopCaptureOptions& options); --#endif - - // Creates a DesktopCapturer instance which targets to capture windows. - static std::unique_ptr CreateWindowCapturer( -@@ -213,12 +211,10 @@ class RTC_EXPORT DesktopCapturer { - // CroppingWindowCapturer needs to create raw capturers without wrappers, so - // the following two functions are protected. - --#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - // Creates a platform specific DesktopCapturer instance which targets to - // capture windows and screens. - static std::unique_ptr CreateRawGenericCapturer( - const DesktopCaptureOptions& options); --#endif - - // Creates a platform specific DesktopCapturer instance which targets to - // capture windows. +diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc +index c80cc76a3d..c304453388 100644 +--- a/modules/audio_processing/audio_processing_impl.cc ++++ b/modules/audio_processing/audio_processing_impl.cc +@@ -450,6 +450,7 @@ AudioProcessingImpl::GetGainController2ExperimentParams() { + }, + .adaptive_digital_controller = + { ++ .enabled = false, + .headroom_db = static_cast(headroom_db.Get()), + .max_gain_db = static_cast(max_gain_db.Get()), + .initial_gain_db = -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0077.patch b/third_party/libwebrtc/moz-patch-stack/0077.patch index fe8d43b2a4cb..95d608bbaad3 100644 --- a/third_party/libwebrtc/moz-patch-stack/0077.patch +++ b/third_party/libwebrtc/moz-patch-stack/0077.patch @@ -1,77 +1,27 @@ -From: Jan Grulich -Date: Fri, 10 Mar 2023 09:21:00 +0000 -Subject: Bug 1819035 - get EGL display based on the used platform in the - browser r=webrtc-reviewers,ng +From: Byron Campen +Date: Fri, 7 Apr 2023 20:28:00 +0000 +Subject: Bug 1819048: Remove this bad assertion. r=webrtc-reviewers,jib -Because of a possible misconfiguration or a possible driver issue it -might happen that the browser will use a different driver on X11 and -end up using yet another one for wayland/gbm, which might lead to not -working screen sharing in the better case, but also to a crash in the -other driver (Nvidia). This adds a check for platform the browser runs -on, if it's XWayland or Wayland and based on that query EGL display for -that specific platform, rather than going for the Wayland one only. - -Differential Revision: https://phabricator.services.mozilla.com/D171858 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/c8606497de1f461a6352456e0e511c2ae498d526 +Differential Revision: https://phabricator.services.mozilla.com/D174978 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/5a52e1b0c808edfda82f0abea668699eb68098dc --- - .../linux/wayland/egl_dmabuf.cc | 30 +++++++++++++++++-- - 1 file changed, 28 insertions(+), 2 deletions(-) + video/task_queue_frame_decode_scheduler.cc | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) -diff --git a/modules/desktop_capture/linux/wayland/egl_dmabuf.cc b/modules/desktop_capture/linux/wayland/egl_dmabuf.cc -index b529077c6d..6a019c64b4 100644 ---- a/modules/desktop_capture/linux/wayland/egl_dmabuf.cc -+++ b/modules/desktop_capture/linux/wayland/egl_dmabuf.cc -@@ -13,6 +13,7 @@ - #include - #include - #include -+#include - #include - #include - #include -@@ -217,6 +218,26 @@ static void CloseLibrary(void* library) { - } - } - -+static bool IsWaylandDisplay() { -+ static auto sGdkWaylandDisplayGetType = -+ (GType (*)(void))dlsym(RTLD_DEFAULT, "gdk_wayland_display_get_type"); -+ if (!sGdkWaylandDisplayGetType) { -+ return false; -+ } -+ return (G_TYPE_CHECK_INSTANCE_TYPE ((gdk_display_get_default()), -+ sGdkWaylandDisplayGetType())); -+} -+ -+static bool IsX11Display() { -+ static auto sGdkX11DisplayGetType = -+ (GType (*)(void))dlsym(RTLD_DEFAULT, "gdk_x11_display_get_type"); -+ if (!sGdkX11DisplayGetType) { -+ return false; -+ } -+ return (G_TYPE_CHECK_INSTANCE_TYPE ((gdk_display_get_default()), -+ sGdkX11DisplayGetType())); -+} -+ - static void* g_lib_egl = nullptr; - - RTC_NO_SANITIZE("cfi-icall") -@@ -362,8 +383,13 @@ EglDmaBuf::EglDmaBuf() { - return; - } - -- egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_WAYLAND_KHR, -- (void*)EGL_DEFAULT_DISPLAY, nullptr); -+ if (IsWaylandDisplay()) { -+ egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_WAYLAND_KHR, -+ (void*)EGL_DEFAULT_DISPLAY, nullptr); -+ } else if (IsX11Display()) { -+ egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_X11_KHR, -+ (void*)EGL_DEFAULT_DISPLAY, nullptr); -+ } - - if (egl_.display == EGL_NO_DISPLAY) { - RTC_LOG(LS_ERROR) << "Failed to obtain default EGL display: " +diff --git a/video/task_queue_frame_decode_scheduler.cc b/video/task_queue_frame_decode_scheduler.cc +index cd109c2932..6dd7b47f17 100644 +--- a/video/task_queue_frame_decode_scheduler.cc ++++ b/video/task_queue_frame_decode_scheduler.cc +@@ -37,7 +37,8 @@ void TaskQueueFrameDecodeScheduler::ScheduleFrame( + uint32_t rtp, + FrameDecodeTiming::FrameSchedule schedule, + FrameReleaseCallback cb) { +- RTC_DCHECK(!stopped_) << "Can not schedule frames after stopped."; ++ // Mozilla modification, until https://bugs.webrtc.org/14944 is fixed ++ //RTC_DCHECK(!stopped_) << "Can not schedule frames after stopped."; + RTC_DCHECK(!scheduled_rtp_.has_value()) + << "Can not schedule two frames for release at the same time."; + RTC_DCHECK(cb); -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0078.patch b/third_party/libwebrtc/moz-patch-stack/0078.patch index 582db4bdf458..16516dff70f0 100644 --- a/third_party/libwebrtc/moz-patch-stack/0078.patch +++ b/third_party/libwebrtc/moz-patch-stack/0078.patch @@ -1,34 +1,63 @@ From: Andreas Pehrson -Date: Mon, 27 Feb 2023 16:22:00 +0000 -Subject: Bug 1817024 - (fix-fdcfefa708) In PhysicalSocket avoid a non-trivial - designated initializer. r=mjf,webrtc-reviewers +Date: Wed, 10 May 2023 07:06:00 +0000 +Subject: Bug 1810949 - cherry-pick upstream libwebrtc commit 91d5fc2ed6. + r=webrtc-reviewers,mjf -This fixes a build failure in the base-toolchain job with GCC 7.5.0: - In file included from Unified_cpp_threading_gn0.cpp:38:0: - .../third_party/libwebrtc/rtc_base/physical_socket_server.cc: In member function 'int rtc::PhysicalSocket::DoReadFromSocket(void*, size_t, rtc::SocketAddress*, int64_t*)': - .../third_party/libwebrtc/rtc_base/physical_socket_server.cc:463:51: sorry, unimplemented: non-trivial designated initializers not supported - msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; - ^ +Upstream commit: https://webrtc.googlesource.com/src/+/91d5fc2ed6ef347d90182868320267d45cf9525b + Support more pixel formats in v4l2 camera backend -Differential Revision: https://phabricator.services.mozilla.com/D171057 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a3447f709befd84a282ca40f29b7a5ea76d5b68d + These were tested with gstreamer and v4l2loopback, example setup: + $ sudo v4l2loopback-ctl add -n BGRA 10 + $ gst-launch-1.0 videotestsrc pattern=smpte-rp-219 ! \ + video/x-raw,format=BGRA ! v4l2sink device=/dev/video10 > /dev/null & + + Then conversion was confirmed with video_loopback: + $ ./video_loopback --capture_device_index=3 --logs 2>&1 | grep -i \ + capture + + Bug: webrtc:14830 + Change-Id: I35c8e453cf7f9a2923935b0ad82477a3144e8c12 + Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/291532 + Commit-Queue: Stefan Holmer + Reviewed-by: Mirko Bonadei + Reviewed-by: Stefan Holmer + Cr-Commit-Position: refs/heads/main@{#39979} + +Differential Revision: https://phabricator.services.mozilla.com/D177232 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/92dc582fdcf3a2fdb3fcdbcd96080d081de8f8d5 --- - rtc_base/physical_socket_server.cc | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + .../video_capture/linux/device_info_v4l2.cc | 18 ++++++++++++++++++ + 1 file changed, 18 insertions(+) -diff --git a/rtc_base/physical_socket_server.cc b/rtc_base/physical_socket_server.cc -index 2dfdd9a5df..d0053dd82b 100644 ---- a/rtc_base/physical_socket_server.cc -+++ b/rtc_base/physical_socket_server.cc -@@ -462,7 +462,7 @@ int PhysicalSocket::DoReadFromSocket(void* buffer, - int received = 0; - if (read_scm_timestamp_experiment_) { - iovec iov = {.iov_base = buffer, .iov_len = length}; -- msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; -+ msghdr msg = {.msg_name = nullptr, .msg_namelen = 0, .msg_iov = &iov, .msg_iovlen = 1}; - if (out_addr) { - out_addr->Clear(); - msg.msg_name = addr; +diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc +index 04caaea592..abd2886f85 100644 +--- a/modules/video_capture/linux/device_info_v4l2.cc ++++ b/modules/video_capture/linux/device_info_v4l2.cc +@@ -57,6 +57,24 @@ + #define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) ) + #endif + ++// These defines are here to support building on kernel 3.16 which some ++// downstream projects, e.g. Firefox, use. ++// TODO(apehrson): Remove them and their undefs when no longer needed. ++#ifndef V4L2_PIX_FMT_ABGR32 ++#define ABGR32_OVERRIDE 1 ++#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') ++#endif ++ ++#ifndef V4L2_PIX_FMT_ARGB32 ++#define ARGB32_OVERRIDE 1 ++#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') ++#endif ++ ++#ifndef V4L2_PIX_FMT_RGBA32 ++#define RGBA32_OVERRIDE 1 ++#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') ++#endif ++ + namespace webrtc { + namespace videocapturemodule { + #ifdef WEBRTC_LINUX -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0079.patch b/third_party/libwebrtc/moz-patch-stack/0079.patch index 4481170312e3..6a9539cc281f 100644 --- a/third_party/libwebrtc/moz-patch-stack/0079.patch +++ b/third_party/libwebrtc/moz-patch-stack/0079.patch @@ -1,25 +1,58 @@ -From: Byron Campen -Date: Tue, 4 Apr 2023 16:34:00 -0500 -Subject: Bug 1822194 - (fix-3b51cd328e) - Add missing designated initializer - that gcc is sad about. +From: Nico Grunbaum +Date: Tue, 6 Jun 2023 16:37:00 -0700 +Subject: Bug 1833237 - (fix-f0be3bee1f) remove reference to + portal:pipewire_base;r?pehrsons -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/108046c7cbb21c6cf19320c0804e9aee1a3eb4bf +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/8ff886a4d366b4be35b329d1ef733a6df542067c --- - modules/audio_processing/audio_processing_impl.cc | 1 + - 1 file changed, 1 insertion(+) + modules/video_capture/BUILD.gn | 4 ++++ + modules/video_capture/linux/device_info_pipewire.cc | 3 ++- + modules/video_capture/linux/device_info_pipewire.h | 3 ++- + 3 files changed, 8 insertions(+), 2 deletions(-) -diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc -index c80cc76a3d..c304453388 100644 ---- a/modules/audio_processing/audio_processing_impl.cc -+++ b/modules/audio_processing/audio_processing_impl.cc -@@ -450,6 +450,7 @@ AudioProcessingImpl::GetGainController2ExperimentParams() { - }, - .adaptive_digital_controller = - { -+ .enabled = false, - .headroom_db = static_cast(headroom_db.Get()), - .max_gain_db = static_cast(max_gain_db.Get()), - .initial_gain_db = +diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn +index 8f89918359..45a0272eee 100644 +--- a/modules/video_capture/BUILD.gn ++++ b/modules/video_capture/BUILD.gn +@@ -104,6 +104,10 @@ if (!build_with_chromium || is_linux || is_chromeos) { + "../../media:rtc_media_base", + "../portal", + ] ++ if (build_with_mozilla) { ++ configs -= [ "../portal:pipewire_base" ] ++ public_deps = [ "//third_party/pipewire" ] ++ } + } + } + if (is_win) { +diff --git a/modules/video_capture/linux/device_info_pipewire.cc b/modules/video_capture/linux/device_info_pipewire.cc +index 1dee78f5ee..2cb6161514 100644 +--- a/modules/video_capture/linux/device_info_pipewire.cc ++++ b/modules/video_capture/linux/device_info_pipewire.cc +@@ -47,7 +47,8 @@ int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber, + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8, +- uint32_t productUniqueIdUTF8Length) { ++ uint32_t productUniqueIdUTF8Length, ++ pid_t* pid) { + if (deviceNumber >= NumberOfDevices()) + return -1; + +diff --git a/modules/video_capture/linux/device_info_pipewire.h b/modules/video_capture/linux/device_info_pipewire.h +index a006c85d1b..724717be5e 100644 +--- a/modules/video_capture/linux/device_info_pipewire.h ++++ b/modules/video_capture/linux/device_info_pipewire.h +@@ -29,7 +29,8 @@ class DeviceInfoPipeWire : public DeviceInfoImpl { + char* deviceUniqueIdUTF8, + uint32_t deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = nullptr, +- uint32_t productUniqueIdUTF8Length = 0) override; ++ uint32_t productUniqueIdUTF8Length = 0, ++ pid_t* pid = 0) override; + /* + * Fills the membervariable _captureCapabilities with capabilites for the + * given device name. -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0080.patch b/third_party/libwebrtc/moz-patch-stack/0080.patch index c83b628ee922..4948dbeaad9b 100644 --- a/third_party/libwebrtc/moz-patch-stack/0080.patch +++ b/third_party/libwebrtc/moz-patch-stack/0080.patch @@ -1,27 +1,32 @@ -From: Byron Campen -Date: Fri, 7 Apr 2023 20:28:00 +0000 -Subject: Bug 1819048: Remove this bad assertion. r=webrtc-reviewers,jib +From: Jan-Ivar Bruaroey +Date: Wed, 28 Jun 2023 20:45:00 -0400 +Subject: Bug 1839451 - (fix-0f43da2248) Keep mozilla's + RTCPReceiver::RemoteRTCPSenderInfo function working. -Differential Revision: https://phabricator.services.mozilla.com/D174978 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/5a52e1b0c808edfda82f0abea668699eb68098dc +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/154c9cdb386d0f50c5e1549270e1af6ab4969602 --- - video/task_queue_frame_decode_scheduler.cc | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) + modules/rtp_rtcp/source/rtcp_receiver.cc | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) -diff --git a/video/task_queue_frame_decode_scheduler.cc b/video/task_queue_frame_decode_scheduler.cc -index cd109c2932..6dd7b47f17 100644 ---- a/video/task_queue_frame_decode_scheduler.cc -+++ b/video/task_queue_frame_decode_scheduler.cc -@@ -37,7 +37,8 @@ void TaskQueueFrameDecodeScheduler::ScheduleFrame( - uint32_t rtp, - FrameDecodeTiming::FrameSchedule schedule, - FrameReleaseCallback cb) { -- RTC_DCHECK(!stopped_) << "Can not schedule frames after stopped."; -+ // Mozilla modification, until https://bugs.webrtc.org/14944 is fixed -+ //RTC_DCHECK(!stopped_) << "Can not schedule frames after stopped."; - RTC_DCHECK(!scheduled_rtp_.has_value()) - << "Can not schedule two frames for release at the same time."; - RTC_DCHECK(cb); +diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc +index 94de316421..bda6ad9a52 100644 +--- a/modules/rtp_rtcp/source/rtcp_receiver.cc ++++ b/modules/rtp_rtcp/source/rtcp_receiver.cc +@@ -368,10 +368,10 @@ void RTCPReceiver::RemoteRTCPSenderInfo(uint32_t* packet_count, + int64_t* ntp_timestamp_ms, + int64_t* remote_ntp_timestamp_ms) const { + MutexLock lock(&rtcp_receiver_lock_); +- *packet_count = remote_sender_packet_count_; +- *octet_count = remote_sender_octet_count_; +- *ntp_timestamp_ms = last_received_sr_ntp_.ToMs(); +- *remote_ntp_timestamp_ms = remote_sender_ntp_time_.ToMs(); ++ *packet_count = remote_sender_.packets_sent; ++ *octet_count = remote_sender_.bytes_sent; ++ *ntp_timestamp_ms = remote_sender_.last_arrival_timestamp.ToMs(); ++ *remote_ntp_timestamp_ms = remote_sender_.last_remote_timestamp.ToMs(); + } + + std::vector RTCPReceiver::GetLatestReportBlockData() const { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0081.patch b/third_party/libwebrtc/moz-patch-stack/0081.patch index 8f859fc813f7..c58972069d69 100644 --- a/third_party/libwebrtc/moz-patch-stack/0081.patch +++ b/third_party/libwebrtc/moz-patch-stack/0081.patch @@ -1,63 +1,36 @@ -From: Andreas Pehrson -Date: Wed, 10 May 2023 07:06:00 +0000 -Subject: Bug 1810949 - cherry-pick upstream libwebrtc commit 91d5fc2ed6. - r=webrtc-reviewers,mjf +From: Nico Grunbaum +Date: Thu, 22 Jun 2023 16:23:00 +0000 +Subject: Bug 1837918 - libwebrtc update broke the build on + OpenBSD;r=mjf,webrtc-reviewers -Upstream commit: https://webrtc.googlesource.com/src/+/91d5fc2ed6ef347d90182868320267d45cf9525b - Support more pixel formats in v4l2 camera backend - - These were tested with gstreamer and v4l2loopback, example setup: - $ sudo v4l2loopback-ctl add -n BGRA 10 - $ gst-launch-1.0 videotestsrc pattern=smpte-rp-219 ! \ - video/x-raw,format=BGRA ! v4l2sink device=/dev/video10 > /dev/null & - - Then conversion was confirmed with video_loopback: - $ ./video_loopback --capture_device_index=3 --logs 2>&1 | grep -i \ - capture - - Bug: webrtc:14830 - Change-Id: I35c8e453cf7f9a2923935b0ad82477a3144e8c12 - Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/291532 - Commit-Queue: Stefan Holmer - Reviewed-by: Mirko Bonadei - Reviewed-by: Stefan Holmer - Cr-Commit-Position: refs/heads/main@{#39979} - -Differential Revision: https://phabricator.services.mozilla.com/D177232 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/92dc582fdcf3a2fdb3fcdbcd96080d081de8f8d5 +Differential Revision: https://phabricator.services.mozilla.com/D181791 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/2a6a838b7021bb285f9485c2ceda6ba2543e0d6f --- - .../video_capture/linux/device_info_v4l2.cc | 18 ++++++++++++++++++ - 1 file changed, 18 insertions(+) + modules/video_capture/video_capture_options.h | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) -diff --git a/modules/video_capture/linux/device_info_v4l2.cc b/modules/video_capture/linux/device_info_v4l2.cc -index 04caaea592..abd2886f85 100644 ---- a/modules/video_capture/linux/device_info_v4l2.cc -+++ b/modules/video_capture/linux/device_info_v4l2.cc -@@ -57,6 +57,24 @@ - #define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) ) +diff --git a/modules/video_capture/video_capture_options.h b/modules/video_capture/video_capture_options.h +index 6f72f7927e..37965305d9 100644 +--- a/modules/video_capture/video_capture_options.h ++++ b/modules/video_capture/video_capture_options.h +@@ -55,7 +55,7 @@ class RTC_EXPORT VideoCaptureOptions { + + void Init(Callback* callback); + +-#if defined(WEBRTC_LINUX) ++#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) + bool allow_v4l2() const { return allow_v4l2_; } + void set_allow_v4l2(bool allow) { allow_v4l2_ = allow; } + #endif +@@ -68,7 +68,7 @@ class RTC_EXPORT VideoCaptureOptions { #endif -+// These defines are here to support building on kernel 3.16 which some -+// downstream projects, e.g. Firefox, use. -+// TODO(apehrson): Remove them and their undefs when no longer needed. -+#ifndef V4L2_PIX_FMT_ABGR32 -+#define ABGR32_OVERRIDE 1 -+#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') -+#endif -+ -+#ifndef V4L2_PIX_FMT_ARGB32 -+#define ARGB32_OVERRIDE 1 -+#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') -+#endif -+ -+#ifndef V4L2_PIX_FMT_RGBA32 -+#define RGBA32_OVERRIDE 1 -+#define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') -+#endif -+ - namespace webrtc { - namespace videocapturemodule { - #ifdef WEBRTC_LINUX + private: +-#if defined(WEBRTC_LINUX) ++#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) + bool allow_v4l2_ = false; + #endif + #if defined(WEBRTC_USE_PIPEWIRE) -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0082.patch b/third_party/libwebrtc/moz-patch-stack/0082.patch index 8b377c3ca443..b246b5f662c6 100644 --- a/third_party/libwebrtc/moz-patch-stack/0082.patch +++ b/third_party/libwebrtc/moz-patch-stack/0082.patch @@ -1,66 +1,27 @@ -From: Andreas Pehrson -Date: Tue, 23 May 2023 06:14:00 +0000 -Subject: Bug 1809672 - Refactor TabCapturerWebrtc creation. - r=ng,webrtc-reviewers +From: Michael Froman +Date: Wed, 5 Jul 2023 19:15:00 +0000 +Subject: Bug 1841864 - upstream commit 4baea5b07f should properly check size + of encoder_config_.simulcast_layers. r=jib -This restores libwebrtc's DesktopCapturer to their upstream state as far as -the tab capturer is concerned. - -Differential Revision: https://phabricator.services.mozilla.com/D174290 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/3778b2a0e3f93a2077303b91739cdd2eeb590726 +Differential Revision: https://phabricator.services.mozilla.com/D182813 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a7179d8d75313b6c9c76a496e10d102da019ff4f --- - modules/desktop_capture/desktop_capturer.cc | 11 ----------- - modules/desktop_capture/desktop_capturer.h | 8 -------- - 2 files changed, 19 deletions(-) + video/video_stream_encoder.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) -diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc -index 1af19a1fd2..7f601821fc 100644 ---- a/modules/desktop_capture/desktop_capturer.cc -+++ b/modules/desktop_capture/desktop_capturer.cc -@@ -128,17 +128,6 @@ std::unique_ptr DesktopCapturer::CreateScreenCapturer( - return capturer; - } +diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc +index 1e2409372a..0782a5f5f2 100644 +--- a/video/video_stream_encoder.cc ++++ b/video/video_stream_encoder.cc +@@ -1404,7 +1404,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { --// static --std::unique_ptr DesktopCapturer::CreateTabCapturer( -- const DesktopCaptureOptions& options) { -- std::unique_ptr capturer = CreateRawTabCapturer(options); -- if (capturer && options.detect_updated_region()) { -- capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); -- } -- -- return capturer; --} -- - #if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - bool DesktopCapturer::IsRunningUnderWayland() { - const char* xdg_session_type = getenv("XDG_SESSION_TYPE"); -diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h -index 64f3187f51..5efd2efc0f 100644 ---- a/modules/desktop_capture/desktop_capturer.h -+++ b/modules/desktop_capture/desktop_capturer.h -@@ -191,10 +191,6 @@ class RTC_EXPORT DesktopCapturer { - static std::unique_ptr CreateScreenCapturer( - const DesktopCaptureOptions& options); - -- // Creates a DesktopCapturer instance which targets to capture tab. -- static std::unique_ptr CreateTabCapturer( -- const DesktopCaptureOptions& options); -- - #if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) - static bool IsRunningUnderWayland(); - -@@ -225,10 +221,6 @@ class RTC_EXPORT DesktopCapturer { - // capture screens. - static std::unique_ptr CreateRawScreenCapturer( - const DesktopCaptureOptions& options); -- -- // Creates a DesktopCapturer instance which targets to capture tabs -- static std::unique_ptr CreateRawTabCapturer( -- const DesktopCaptureOptions& options); - }; - - } // namespace webrtc + bool is_svc = false; + bool single_stream_or_non_first_inactive = true; +- for (size_t i = 1; i < encoder_config_.number_of_streams; ++i) { ++ for (size_t i = 1; i < encoder_config_.simulcast_layers.size(); ++i) { + if (encoder_config_.simulcast_layers[i].active) { + single_stream_or_non_first_inactive = false; + break; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0083.patch b/third_party/libwebrtc/moz-patch-stack/0083.patch index 31b31993de6f..884a763aab6d 100644 --- a/third_party/libwebrtc/moz-patch-stack/0083.patch +++ b/third_party/libwebrtc/moz-patch-stack/0083.patch @@ -1,58 +1,27 @@ -From: Nico Grunbaum -Date: Tue, 6 Jun 2023 16:37:00 -0700 -Subject: Bug 1833237 - (fix-f0be3bee1f) remove reference to - portal:pipewire_base;r?pehrsons +From: Mike Hommey +Date: Fri, 7 Jul 2023 00:58:00 +0000 +Subject: Bug 1841577 - Don't set WEBRTC_ENABLE_AVX2 on platforms that don't + have AVX2. r=mjf,webrtc-reviewers -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/8ff886a4d366b4be35b329d1ef733a6df542067c +Differential Revision: https://phabricator.services.mozilla.com/D182695 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/46fb51c90709be64c35946a8cf69195121441024 --- - modules/video_capture/BUILD.gn | 4 ++++ - modules/video_capture/linux/device_info_pipewire.cc | 3 ++- - modules/video_capture/linux/device_info_pipewire.h | 3 ++- - 3 files changed, 8 insertions(+), 2 deletions(-) + webrtc.gni | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) -diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn -index 8f89918359..45a0272eee 100644 ---- a/modules/video_capture/BUILD.gn -+++ b/modules/video_capture/BUILD.gn -@@ -104,6 +104,10 @@ if (!build_with_chromium || is_linux || is_chromeos) { - "../../media:rtc_media_base", - "../portal", - ] -+ if (build_with_mozilla) { -+ configs -= [ "../portal:pipewire_base" ] -+ public_deps = [ "//third_party/pipewire" ] -+ } - } - } - if (is_win) { -diff --git a/modules/video_capture/linux/device_info_pipewire.cc b/modules/video_capture/linux/device_info_pipewire.cc -index 1dee78f5ee..2cb6161514 100644 ---- a/modules/video_capture/linux/device_info_pipewire.cc -+++ b/modules/video_capture/linux/device_info_pipewire.cc -@@ -47,7 +47,8 @@ int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber, - char* deviceUniqueIdUTF8, - uint32_t deviceUniqueIdUTF8Length, - char* productUniqueIdUTF8, -- uint32_t productUniqueIdUTF8Length) { -+ uint32_t productUniqueIdUTF8Length, -+ pid_t* pid) { - if (deviceNumber >= NumberOfDevices()) - return -1; +diff --git a/webrtc.gni b/webrtc.gni +index 11f76de10b..0a7b56a303 100644 +--- a/webrtc.gni ++++ b/webrtc.gni +@@ -298,7 +298,7 @@ declare_args() { -diff --git a/modules/video_capture/linux/device_info_pipewire.h b/modules/video_capture/linux/device_info_pipewire.h -index a006c85d1b..724717be5e 100644 ---- a/modules/video_capture/linux/device_info_pipewire.h -+++ b/modules/video_capture/linux/device_info_pipewire.h -@@ -29,7 +29,8 @@ class DeviceInfoPipeWire : public DeviceInfoImpl { - char* deviceUniqueIdUTF8, - uint32_t deviceUniqueIdUTF8Length, - char* productUniqueIdUTF8 = nullptr, -- uint32_t productUniqueIdUTF8Length = 0) override; -+ uint32_t productUniqueIdUTF8Length = 0, -+ pid_t* pid = 0) override; - /* - * Fills the membervariable _captureCapabilities with capabilites for the - * given device name. + # Set this to true to enable the avx2 support in webrtc. + # TODO: Make sure that AVX2 works also for non-clang compilers. +- if (is_clang == true) { ++ if (is_clang == true && (target_cpu == "x86" || target_cpu == "x64")) { + rtc_enable_avx2 = true + } else { + rtc_enable_avx2 = false -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0084.patch b/third_party/libwebrtc/moz-patch-stack/0084.patch index 75e0c20620d2..240063940e0c 100644 --- a/third_party/libwebrtc/moz-patch-stack/0084.patch +++ b/third_party/libwebrtc/moz-patch-stack/0084.patch @@ -1,32 +1,29 @@ -From: Jan-Ivar Bruaroey -Date: Wed, 28 Jun 2023 20:45:00 -0400 -Subject: Bug 1839451 - (fix-0f43da2248) Keep mozilla's - RTCPReceiver::RemoteRTCPSenderInfo function working. +From: Byron Campen +Date: Thu, 20 Jul 2023 14:24:00 +0000 +Subject: Bug 1838080: Remove this duplicate init (that's also on the wrong + thread). r=pehrsons,webrtc-reviewers -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/154c9cdb386d0f50c5e1549270e1af6ab4969602 +This was causing assertions. + +Differential Revision: https://phabricator.services.mozilla.com/D179731 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/6ac6592a04a839a6152d5ad5f0778f63dbbd6b1b --- - modules/rtp_rtcp/source/rtcp_receiver.cc | 8 ++++---- - 1 file changed, 4 insertions(+), 4 deletions(-) + audio/channel_send.cc | 2 -- + 1 file changed, 2 deletions(-) -diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc -index eb7a7f8390..460fbdf5a9 100644 ---- a/modules/rtp_rtcp/source/rtcp_receiver.cc -+++ b/modules/rtp_rtcp/source/rtcp_receiver.cc -@@ -367,10 +367,10 @@ void RTCPReceiver::RemoteRTCPSenderInfo(uint32_t* packet_count, - int64_t* ntp_timestamp_ms, - int64_t* remote_ntp_timestamp_ms) const { - MutexLock lock(&rtcp_receiver_lock_); -- *packet_count = remote_sender_packet_count_; -- *octet_count = remote_sender_octet_count_; -- *ntp_timestamp_ms = last_received_sr_ntp_.ToMs(); -- *remote_ntp_timestamp_ms = remote_sender_ntp_time_.ToMs(); -+ *packet_count = remote_sender_.packets_sent; -+ *octet_count = remote_sender_.bytes_sent; -+ *ntp_timestamp_ms = remote_sender_.last_arrival_timestamp.ToMs(); -+ *remote_ntp_timestamp_ms = remote_sender_.last_remote_timestamp.ToMs(); +diff --git a/audio/channel_send.cc b/audio/channel_send.cc +index 5ee97f290e..f784b8eebc 100644 +--- a/audio/channel_send.cc ++++ b/audio/channel_send.cc +@@ -457,8 +457,6 @@ ChannelSend::ChannelSend( + + int error = audio_coding_->RegisterTransportCallback(this); + RTC_DCHECK_EQ(0, error); +- if (frame_transformer) +- InitFrameTransformerDelegate(std::move(frame_transformer)); } - std::vector RTCPReceiver::GetLatestReportBlockData() const { + ChannelSend::~ChannelSend() { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0085.patch b/third_party/libwebrtc/moz-patch-stack/0085.patch index 55ea270408e5..f068906aa7f5 100644 --- a/third_party/libwebrtc/moz-patch-stack/0085.patch +++ b/third_party/libwebrtc/moz-patch-stack/0085.patch @@ -1,31 +1,42 @@ -From: Jan Grulich -Date: Tue, 4 Jul 2023 08:34:00 +0000 -Subject: Bug 1839829 - WebRTC backport: PipeWire video capture - initialize - pw_stream raw pointer member r=webrtc-reviewers,ng +From: Byron Campen +Date: Thu, 20 Jul 2023 14:24:00 +0000 +Subject: Bug 1838080: Work around a race in + ChannelSendFrameTransformerDelegate. r=pehrsons,webrtc-reviewers -This is a simple backport of an WebRTC upstream change. +This variable can be null when a ChannelSendFrameTransformerDelegate is in use, +because that does an async dispatch to the encoder queue in the handling for +transformed frames. If this is unset while that dispatch is in flight, we +nullptr crash. -Upstream commit: e21745a78b430ed4f2119b6342acbaa30a52b406 - -Differential Revision: https://phabricator.services.mozilla.com/D182671 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/2174ddbbb4733a8f9b9cef34d2e6c3a1ceb244d4 +Differential Revision: https://phabricator.services.mozilla.com/D180735 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/56555ecee7f36ae73abff1cbbd06807c2b65fc19 --- - modules/video_capture/linux/video_capture_pipewire.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + audio/channel_send.cc | 8 ++++++-- + 1 file changed, 6 insertions(+), 2 deletions(-) -diff --git a/modules/video_capture/linux/video_capture_pipewire.h b/modules/video_capture/linux/video_capture_pipewire.h -index fe39f1dd92..316fb2449d 100644 ---- a/modules/video_capture/linux/video_capture_pipewire.h -+++ b/modules/video_capture/linux/video_capture_pipewire.h -@@ -50,7 +50,7 @@ class VideoCaptureModulePipeWire : public VideoCaptureImpl { - RTC_GUARDED_BY(capture_checker_); - bool started_ RTC_GUARDED_BY(api_lock_); +diff --git a/audio/channel_send.cc b/audio/channel_send.cc +index f784b8eebc..528ed6c616 100644 +--- a/audio/channel_send.cc ++++ b/audio/channel_send.cc +@@ -281,12 +281,16 @@ class RtpPacketSenderProxy : public RtpPacketSender { + void EnqueuePackets( + std::vector> packets) override { + MutexLock lock(&mutex_); +- rtp_packet_pacer_->EnqueuePackets(std::move(packets)); ++ if (rtp_packet_pacer_) { ++ rtp_packet_pacer_->EnqueuePackets(std::move(packets)); ++ } + } -- struct pw_stream* stream_ RTC_GUARDED_BY(capture_checker_); -+ struct pw_stream* stream_ RTC_GUARDED_BY(capture_checker_) = nullptr; - struct spa_hook stream_listener_ RTC_GUARDED_BY(capture_checker_); - }; - } // namespace videocapturemodule + void RemovePacketsForSsrc(uint32_t ssrc) override { + MutexLock lock(&mutex_); +- rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); ++ if (rtp_packet_pacer_) { ++ rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); ++ } + } + + private: -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0086.patch b/third_party/libwebrtc/moz-patch-stack/0086.patch index e476574a44af..0da10045d906 100644 --- a/third_party/libwebrtc/moz-patch-stack/0086.patch +++ b/third_party/libwebrtc/moz-patch-stack/0086.patch @@ -1,36 +1,69 @@ -From: Nico Grunbaum -Date: Thu, 22 Jun 2023 16:23:00 +0000 -Subject: Bug 1837918 - libwebrtc update broke the build on - OpenBSD;r=mjf,webrtc-reviewers +From: Byron Campen +Date: Thu, 20 Jul 2023 14:24:00 +0000 +Subject: Bug 1838080: Use the current TaskQueue, instead of the current + thread, to init this. r=pehrsons,webrtc-reviewers -Differential Revision: https://phabricator.services.mozilla.com/D181791 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/2a6a838b7021bb285f9485c2ceda6ba2543e0d6f +There are situations where the current thread is not set, but the current +TaskQueue is (but not vice versa). + +Differential Revision: https://phabricator.services.mozilla.com/D180736 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/62e71a2f745c4b98d5ee7ce9e6386aa1b657be9b --- - modules/video_capture/video_capture_options.h | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) + .../rtp_video_stream_receiver_frame_transformer_delegate.cc | 3 +-- + .../rtp_video_stream_receiver_frame_transformer_delegate.h | 5 ++--- + video/rtp_video_stream_receiver2.cc | 2 +- + 3 files changed, 4 insertions(+), 6 deletions(-) -diff --git a/modules/video_capture/video_capture_options.h b/modules/video_capture/video_capture_options.h -index 6f72f7927e..37965305d9 100644 ---- a/modules/video_capture/video_capture_options.h -+++ b/modules/video_capture/video_capture_options.h -@@ -55,7 +55,7 @@ class RTC_EXPORT VideoCaptureOptions { +diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +index 5db5664147..5dd316e206 100644 +--- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc ++++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +@@ -83,8 +83,7 @@ RtpVideoStreamReceiverFrameTransformerDelegate:: + RtpVideoFrameReceiver* receiver, + Clock* clock, + rtc::scoped_refptr frame_transformer, +- rtc::Thread* network_thread, +- uint32_t ssrc) ++ TaskQueueBase* network_thread, uint32_t ssrc) + : receiver_(receiver), + frame_transformer_(std::move(frame_transformer)), + network_thread_(network_thread), +diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h +index 62a42fdddf..20f9a5caa9 100644 +--- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h ++++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h +@@ -41,8 +41,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate + RtpVideoFrameReceiver* receiver, + Clock* clock, + rtc::scoped_refptr frame_transformer, +- rtc::Thread* network_thread, +- uint32_t ssrc); ++ TaskQueueBase* network_thread, uint32_t ssrc); - void Init(Callback* callback); - --#if defined(WEBRTC_LINUX) -+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) - bool allow_v4l2() const { return allow_v4l2_; } - void set_allow_v4l2(bool allow) { allow_v4l2_ = allow; } - #endif -@@ -68,7 +68,7 @@ class RTC_EXPORT VideoCaptureOptions { - #endif - - private: --#if defined(WEBRTC_LINUX) -+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) - bool allow_v4l2_ = false; - #endif - #if defined(WEBRTC_USE_PIPEWIRE) + void Init(); + void Reset(); +@@ -67,7 +66,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate + RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_); + rtc::scoped_refptr frame_transformer_ + RTC_GUARDED_BY(network_sequence_checker_); +- rtc::Thread* const network_thread_; ++ TaskQueueBase* const network_thread_; + const uint32_t ssrc_; + Clock* const clock_; + }; +diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc +index be8ee88c09..faf5b3f45e 100644 +--- a/video/rtp_video_stream_receiver2.cc ++++ b/video/rtp_video_stream_receiver2.cc +@@ -341,7 +341,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + if (frame_transformer) { + frame_transformer_delegate_ = + rtc::make_ref_counted( +- this, clock_, std::move(frame_transformer), rtc::Thread::Current(), ++ this, clock_, std::move(frame_transformer), TaskQueueBase::Current(), + config_.rtp.remote_ssrc); + frame_transformer_delegate_->Init(); + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0087.patch b/third_party/libwebrtc/moz-patch-stack/0087.patch index bb2a4e9d3315..24099d58f39c 100644 --- a/third_party/libwebrtc/moz-patch-stack/0087.patch +++ b/third_party/libwebrtc/moz-patch-stack/0087.patch @@ -1,27 +1,170 @@ -From: Michael Froman -Date: Wed, 5 Jul 2023 19:15:00 +0000 -Subject: Bug 1841864 - upstream commit 4baea5b07f should properly check size - of encoder_config_.simulcast_layers. r=jib +From: Michael Froman +Date: Thu, 27 Jul 2023 12:42:44 -0500 +Subject: Bug 1838080: Store the rid in TransformableVideoSenderFrame. + r=ng,webrtc-reviewers -Differential Revision: https://phabricator.services.mozilla.com/D182813 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a7179d8d75313b6c9c76a496e10d102da019ff4f +This is necessary to reliably detect what rid a given keyframe is for, for the +purposes of resolving promises from RTCRtpScriptTransformer.generateKeyFrame. + +Differential Revision: https://phabricator.services.mozilla.com/D180737 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/2f1a0ba74bf71cfa0bc4e77714b8a5276a70cc36 --- - video/video_stream_encoder.cc | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + api/frame_transformer_interface.h | 1 + + modules/rtp_rtcp/source/rtp_sender.h | 4 ++++ + modules/rtp_rtcp/source/rtp_sender_video.cc | 1 + + ...tp_sender_video_frame_transformer_delegate.cc | 16 +++++++++++----- + ...rtp_sender_video_frame_transformer_delegate.h | 2 ++ + ...stream_receiver_frame_transformer_delegate.cc | 5 +++++ + 6 files changed, 24 insertions(+), 5 deletions(-) -diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc -index c3d855c1c6..eee99f41de 100644 ---- a/video/video_stream_encoder.cc -+++ b/video/video_stream_encoder.cc -@@ -1395,7 +1395,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { +diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h +index 8effdcf26d..5ad94a06f5 100644 +--- a/api/frame_transformer_interface.h ++++ b/api/frame_transformer_interface.h +@@ -59,6 +59,7 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { + public: + virtual ~TransformableVideoFrameInterface() = default; + virtual bool IsKeyFrame() const = 0; ++ virtual const std::string& GetRid() const = 0; - bool is_svc = false; - bool single_stream_or_non_first_inactive = true; -- for (size_t i = 1; i < encoder_config_.number_of_streams; ++i) { -+ for (size_t i = 1; i < encoder_config_.simulcast_layers.size(); ++i) { - if (encoder_config_.simulcast_layers[i].active) { - single_stream_or_non_first_inactive = false; - break; + virtual VideoFrameMetadata Metadata() const = 0; + +diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h +index 633a69b34f..875d289a08 100644 +--- a/modules/rtp_rtcp/source/rtp_sender.h ++++ b/modules/rtp_rtcp/source/rtp_sender.h +@@ -142,6 +142,10 @@ class RTPSender { + + uint32_t SSRC() const RTC_LOCKS_EXCLUDED(send_mutex_) { return ssrc_; } + ++ const std::string& Rid() const RTC_LOCKS_EXCLUDED(send_mutex_) { ++ return rid_; ++ } ++ + absl::optional FlexfecSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { + return flexfec_ssrc_; + } +diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc +index f29c5ead0f..6542235e39 100644 +--- a/modules/rtp_rtcp/source/rtp_sender_video.cc ++++ b/modules/rtp_rtcp/source/rtp_sender_video.cc +@@ -159,6 +159,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) + config.frame_transformer, + rtp_sender_->SSRC(), + rtp_sender_->Csrcs(), ++ rtp_sender_->Rid(), + config.task_queue_factory) + : nullptr) { + if (frame_transformer_delegate_) +diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +index 140d6419d4..8d872a80e1 100644 +--- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ++++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +@@ -30,7 +30,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { + uint32_t rtp_timestamp, + TimeDelta expected_retransmission_time, + uint32_t ssrc, +- std::vector csrcs) ++ std::vector csrcs, ++ const std::string& rid) + : encoded_data_(encoded_image.GetEncodedData()), + pre_transform_payload_size_(encoded_image.size()), + header_(video_header), +@@ -42,7 +43,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { + capture_time_identifier_(encoded_image.CaptureTimeIdentifier()), + expected_retransmission_time_(expected_retransmission_time), + ssrc_(ssrc), +- csrcs_(csrcs) { ++ csrcs_(csrcs), ++ rid_(rid) { + RTC_DCHECK_GE(payload_type_, 0); + RTC_DCHECK_LE(payload_type_, 127); + } +@@ -98,6 +100,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { + + Direction GetDirection() const override { return Direction::kSender; } + ++ const std::string& GetRid() const override { return rid_; } ++ + private: + rtc::scoped_refptr encoded_data_; + const size_t pre_transform_payload_size_; +@@ -112,6 +116,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { + + uint32_t ssrc_; + std::vector csrcs_; ++ const std::string rid_; + }; + } // namespace + +@@ -120,11 +125,12 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( + rtc::scoped_refptr frame_transformer, + uint32_t ssrc, + std::vector csrcs, ++ const std::string& rid, + TaskQueueFactory* task_queue_factory) + : sender_(sender), + frame_transformer_(std::move(frame_transformer)), + ssrc_(ssrc), +- csrcs_(csrcs), ++ rid_(rid), + transformation_queue_(task_queue_factory->CreateTaskQueue( + "video_frame_transformer", + TaskQueueFactory::Priority::NORMAL)) {} +@@ -143,7 +149,7 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( + TimeDelta expected_retransmission_time) { + frame_transformer_->Transform(std::make_unique( + encoded_image, video_header, payload_type, codec_type, rtp_timestamp, +- expected_retransmission_time, ssrc_, csrcs_)); ++ expected_retransmission_time, ssrc_, csrcs_, rid_)); + return true; + } + +@@ -237,7 +243,7 @@ std::unique_ptr CloneSenderVideoFrame( + encoded_image, new_header, original->GetPayloadType(), new_header.codec, + original->GetTimestamp(), + /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), +- original->GetSsrc(), metadata.GetCsrcs()); ++ original->GetSsrc(), metadata.GetCsrcs(), original->GetRid()); + } + + } // namespace webrtc +diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +index 8b32b8d7c8..7547785ea0 100644 +--- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h ++++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +@@ -59,6 +59,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { + rtc::scoped_refptr frame_transformer, + uint32_t ssrc, + std::vector csrcs, ++ const std::string& rid, + TaskQueueFactory* send_transport_queue); + + void Init(); +@@ -106,6 +107,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { + rtc::scoped_refptr frame_transformer_; + const uint32_t ssrc_; + std::vector csrcs_; ++ const std::string rid_; + // Used when the encoded frames arrives without a current task queue. This can + // happen if a hardware encoder was used. + std::unique_ptr transformation_queue_; +diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +index 5dd316e206..5c6b62d068 100644 +--- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc ++++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +@@ -56,6 +56,11 @@ class TransformableVideoReceiverFrame + return frame_->FrameType() == VideoFrameType::kVideoFrameKey; + } + ++ const std::string& GetRid() const override { ++ static const std::string empty; ++ return empty; ++ } ++ + VideoFrameMetadata Metadata() const override { return metadata_; } + + void SetMetadata(const VideoFrameMetadata&) override { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0088.patch b/third_party/libwebrtc/moz-patch-stack/0088.patch index 34e5e8efb806..08ffca3e053b 100644 --- a/third_party/libwebrtc/moz-patch-stack/0088.patch +++ b/third_party/libwebrtc/moz-patch-stack/0088.patch @@ -1,27 +1,41 @@ -From: Mike Hommey -Date: Fri, 7 Jul 2023 00:58:00 +0000 -Subject: Bug 1841577 - Don't set WEBRTC_ENABLE_AVX2 on platforms that don't - have AVX2. r=mjf,webrtc-reviewers +From: Byron Campen +Date: Thu, 20 Jul 2023 14:24:00 +0000 +Subject: Bug 1838080: Ensure that last ref to transformation_queue_ is not + released on itself. r=pehrsons,webrtc-reviewers -Differential Revision: https://phabricator.services.mozilla.com/D182695 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/46fb51c90709be64c35946a8cf69195121441024 +Differential Revision: https://phabricator.services.mozilla.com/D181699 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/91d9e8b6a5c430a73561ffd2330865f04fcb1a6d --- - webrtc.gni | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + .../rtp_sender_video_frame_transformer_delegate.cc | 9 +++++++++ + 1 file changed, 9 insertions(+) -diff --git a/webrtc.gni b/webrtc.gni -index ae3f99cb67..e30bc6b9bb 100644 ---- a/webrtc.gni -+++ b/webrtc.gni -@@ -290,7 +290,7 @@ declare_args() { +diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +index 8d872a80e1..ded31d2283 100644 +--- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc ++++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +@@ -17,6 +17,7 @@ + #include "api/task_queue/task_queue_factory.h" + #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" + #include "rtc_base/checks.h" ++#include "rtc_base/event.h" - # Set this to true to enable the avx2 support in webrtc. - # TODO: Make sure that AVX2 works also for non-clang compilers. -- if (is_clang == true) { -+ if (is_clang == true && (target_cpu == "x86" || target_cpu == "x64")) { - rtc_enable_avx2 = true - } else { - rtc_enable_avx2 = false + namespace webrtc { + namespace { +@@ -224,6 +225,14 @@ void RTPSenderVideoFrameTransformerDelegate::Reset() { + MutexLock lock(&sender_lock_); + sender_ = nullptr; + } ++ // Wait until all pending tasks are executed, to ensure that the last ref ++ // standing is not on the transformation queue. ++ rtc::Event flush; ++ transformation_queue_->PostTask([this, &flush]() { ++ RTC_DCHECK_RUN_ON(transformation_queue_.get()); ++ flush.Set(); ++ }); ++ flush.Wait(rtc::Event::kForever); + } + + std::unique_ptr CloneSenderVideoFrame( -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0089.patch b/third_party/libwebrtc/moz-patch-stack/0089.patch index 0e2598e99a20..00ad4f293699 100644 --- a/third_party/libwebrtc/moz-patch-stack/0089.patch +++ b/third_party/libwebrtc/moz-patch-stack/0089.patch @@ -1,29 +1,31 @@ -From: Byron Campen -Date: Thu, 20 Jul 2023 14:24:00 +0000 -Subject: Bug 1838080: Remove this duplicate init (that's also on the wrong - thread). r=pehrsons,webrtc-reviewers +From: Michael Froman +Date: Sat, 12 Aug 2023 18:57:00 +0000 +Subject: Bug 1848045 - to upstream - fix misuse of rtc::strcpyn in + ApmDataDumper::SetOutputDirectory. r=ng,webrtc-reviewers -This was causing assertions. +rtc::strcpyn second param should be the size of the destination buffer, +not the size of the source string. This has been masked because FormFileName +helpfully adds a trailing path separator if one is missing. -Differential Revision: https://phabricator.services.mozilla.com/D179731 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/6ac6592a04a839a6152d5ad5f0778f63dbbd6b1b +Differential Revision: https://phabricator.services.mozilla.com/D185862 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/5d935bb2573445a198af03cd3d5186243880ecdf --- - audio/channel_send.cc | 2 -- - 1 file changed, 2 deletions(-) + modules/audio_processing/logging/apm_data_dumper.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) -diff --git a/audio/channel_send.cc b/audio/channel_send.cc -index 5ee97f290e..f784b8eebc 100644 ---- a/audio/channel_send.cc -+++ b/audio/channel_send.cc -@@ -457,8 +457,6 @@ ChannelSend::ChannelSend( +diff --git a/modules/audio_processing/logging/apm_data_dumper.h b/modules/audio_processing/logging/apm_data_dumper.h +index 4ab6baad83..76f8b34c0b 100644 +--- a/modules/audio_processing/logging/apm_data_dumper.h ++++ b/modules/audio_processing/logging/apm_data_dumper.h +@@ -91,7 +91,7 @@ class ApmDataDumper { + static void SetOutputDirectory(absl::string_view output_dir) { + #if WEBRTC_APM_DEBUG_DUMP == 1 + RTC_CHECK_LT(output_dir.size(), kOutputDirMaxLength); +- rtc::strcpyn(output_dir_, output_dir.size(), output_dir); ++ rtc::strcpyn(output_dir_, kOutputDirMaxLength, output_dir); + #endif + } - int error = audio_coding_->RegisterTransportCallback(this); - RTC_DCHECK_EQ(0, error); -- if (frame_transformer) -- InitFrameTransformerDelegate(std::move(frame_transformer)); - } - - ChannelSend::~ChannelSend() { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0090.patch b/third_party/libwebrtc/moz-patch-stack/0090.patch index aa3e315bafc1..108196b88c06 100644 --- a/third_party/libwebrtc/moz-patch-stack/0090.patch +++ b/third_party/libwebrtc/moz-patch-stack/0090.patch @@ -1,42 +1,41 @@ -From: Byron Campen -Date: Thu, 20 Jul 2023 14:24:00 +0000 -Subject: Bug 1838080: Work around a race in - ChannelSendFrameTransformerDelegate. r=pehrsons,webrtc-reviewers +From: stransky +Date: Tue, 29 Aug 2023 12:43:00 +0000 +Subject: Bug 1821629 [DMABuf] Don't use DMABuf if it's disabled by Firefox gfx + config r=ng,webrtc-reviewers -This variable can be null when a ChannelSendFrameTransformerDelegate is in use, -because that does an async dispatch to the encoder queue in the handling for -transformed frames. If this is unset while that dispatch is in flight, we -nullptr crash. - -Differential Revision: https://phabricator.services.mozilla.com/D180735 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/56555ecee7f36ae73abff1cbbd06807c2b65fc19 +Differential Revision: https://phabricator.services.mozilla.com/D172224 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/158a888cad8869a2f0026fa7cfaaa13ecbfcf2ed --- - audio/channel_send.cc | 8 ++++++-- - 1 file changed, 6 insertions(+), 2 deletions(-) + .../linux/wayland/shared_screencast_stream.cc | 9 ++++++++- + 1 file changed, 8 insertions(+), 1 deletion(-) -diff --git a/audio/channel_send.cc b/audio/channel_send.cc -index f784b8eebc..528ed6c616 100644 ---- a/audio/channel_send.cc -+++ b/audio/channel_send.cc -@@ -281,12 +281,16 @@ class RtpPacketSenderProxy : public RtpPacketSender { - void EnqueuePackets( - std::vector> packets) override { - MutexLock lock(&mutex_); -- rtp_packet_pacer_->EnqueuePackets(std::move(packets)); -+ if (rtp_packet_pacer_) { -+ rtp_packet_pacer_->EnqueuePackets(std::move(packets)); -+ } - } +diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +index 0c26e7a7d5..22aa355e44 100644 +--- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc ++++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +@@ -28,6 +28,13 @@ + #include "rtc_base/synchronization/mutex.h" + #include "rtc_base/time_utils.h" - void RemovePacketsForSsrc(uint32_t ssrc) override { - MutexLock lock(&mutex_); -- rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); -+ if (rtp_packet_pacer_) { -+ rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); -+ } - } ++// Wrapper for gfxVars::UseDMABuf() as we can't include gfxVars here. ++// We don't want to use dmabuf of known broken systems. ++// See FEATURE_DMABUF for details. ++namespace mozilla::gfx { ++bool IsDMABufEnabled(); ++} ++ + namespace webrtc { + + const int kBytesPerPixel = 4; +@@ -294,7 +301,7 @@ void SharedScreenCastStreamPrivate::OnStreamParamChanged( + that->modifier_ = + has_modifier ? that->spa_video_format_.modifier : DRM_FORMAT_MOD_INVALID; + std::vector params; +- const int buffer_types = has_modifier ++ const int buffer_types = has_modifier && mozilla::gfx::IsDMABufEnabled() + ? (1 << SPA_DATA_DmaBuf) | (1 << SPA_DATA_MemFd) + : (1 << SPA_DATA_MemFd); - private: -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0091.patch b/third_party/libwebrtc/moz-patch-stack/0091.patch index a45c27821802..2a0ad07f535d 100644 --- a/third_party/libwebrtc/moz-patch-stack/0091.patch +++ b/third_party/libwebrtc/moz-patch-stack/0091.patch @@ -1,69 +1,50 @@ -From: Byron Campen -Date: Thu, 20 Jul 2023 14:24:00 +0000 -Subject: Bug 1838080: Use the current TaskQueue, instead of the current - thread, to init this. r=pehrsons,webrtc-reviewers +From: stransky +Date: Tue, 29 Aug 2023 12:43:00 +0000 +Subject: Bug 1821629 [Pipewire/DMABuf] Don't create dmabuf backend if it's + disabled r=ng,webrtc-reviewers -There are situations where the current thread is not set, but the current -TaskQueue is (but not vice versa). +Depends on D172224 -Differential Revision: https://phabricator.services.mozilla.com/D180736 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/62e71a2f745c4b98d5ee7ce9e6386aa1b657be9b +Differential Revision: https://phabricator.services.mozilla.com/D172229 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/237d19fe96dd7d25b6a817415ee4e6854678d648 --- - .../rtp_video_stream_receiver_frame_transformer_delegate.cc | 3 +-- - .../rtp_video_stream_receiver_frame_transformer_delegate.h | 5 ++--- - video/rtp_video_stream_receiver2.cc | 2 +- - 3 files changed, 4 insertions(+), 6 deletions(-) + .../linux/wayland/shared_screencast_stream.cc | 9 ++++++--- + 1 file changed, 6 insertions(+), 3 deletions(-) -diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc -index 1e4d0540f0..163fcbdf67 100644 ---- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc -+++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc -@@ -80,8 +80,7 @@ RtpVideoStreamReceiverFrameTransformerDelegate:: - RtpVideoFrameReceiver* receiver, - Clock* clock, - rtc::scoped_refptr frame_transformer, -- rtc::Thread* network_thread, -- uint32_t ssrc) -+ TaskQueueBase* network_thread, uint32_t ssrc) - : receiver_(receiver), - frame_transformer_(std::move(frame_transformer)), - network_thread_(network_thread), -diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h -index 62a42fdddf..20f9a5caa9 100644 ---- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h -+++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h -@@ -41,8 +41,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate - RtpVideoFrameReceiver* receiver, - Clock* clock, - rtc::scoped_refptr frame_transformer, -- rtc::Thread* network_thread, -- uint32_t ssrc); -+ TaskQueueBase* network_thread, uint32_t ssrc); - - void Init(); - void Reset(); -@@ -67,7 +66,7 @@ class RtpVideoStreamReceiverFrameTransformerDelegate - RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_); - rtc::scoped_refptr frame_transformer_ - RTC_GUARDED_BY(network_sequence_checker_); -- rtc::Thread* const network_thread_; -+ TaskQueueBase* const network_thread_; - const uint32_t ssrc_; - Clock* const clock_; - }; -diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc -index 738da0758b..6daa18ae30 100644 ---- a/video/rtp_video_stream_receiver2.cc -+++ b/video/rtp_video_stream_receiver2.cc -@@ -341,7 +341,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( - if (frame_transformer) { - frame_transformer_delegate_ = - rtc::make_ref_counted( -- this, clock_, std::move(frame_transformer), rtc::Thread::Current(), -+ this, clock_, std::move(frame_transformer), TaskQueueBase::Current(), - config_.rtp.remote_ssrc); - frame_transformer_delegate_->Init(); +diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +index 22aa355e44..61c6957d27 100644 +--- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc ++++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +@@ -419,7 +419,9 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( + RTC_LOG(LS_ERROR) << "Unable to open PipeWire library"; + return false; } +- egl_dmabuf_ = std::make_unique(); ++ if (mozilla::gfx::IsDMABufEnabled()) { ++ egl_dmabuf_ = std::make_unique(); ++ } + + pw_stream_node_id_ = stream_node_id; + +@@ -508,7 +510,8 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( + for (uint32_t format : {SPA_VIDEO_FORMAT_BGRA, SPA_VIDEO_FORMAT_RGBA, + SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_RGBx}) { + // Modifiers can be used with PipeWire >= 0.3.33 +- if (has_required_pw_client_version && has_required_pw_server_version) { ++ if (egl_dmabuf_ && ++ has_required_pw_client_version && has_required_pw_server_version) { + modifiers_ = egl_dmabuf_->QueryDmaBufModifiers(format); + + if (!modifiers_.empty()) { +@@ -927,7 +930,7 @@ bool SharedScreenCastStreamPrivate::ProcessDMABuffer( + + const uint n_planes = spa_buffer->n_datas; + +- if (!n_planes) { ++ if (!n_planes || !egl_dmabuf_) { + return false; + } + -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0092.patch b/third_party/libwebrtc/moz-patch-stack/0092.patch index c82e244a7788..ff96a8bcdc2d 100644 --- a/third_party/libwebrtc/moz-patch-stack/0092.patch +++ b/third_party/libwebrtc/moz-patch-stack/0092.patch @@ -1,170 +1,44 @@ -From: Michael Froman -Date: Thu, 27 Jul 2023 12:42:44 -0500 -Subject: Bug 1838080: Store the rid in TransformableVideoSenderFrame. - r=ng,webrtc-reviewers +From: Robert Mader +Date: Wed, 6 Sep 2023 16:37:00 +0000 +Subject: Bug 1843786 - webrtc: Filter out non-camera nodes, + r=webrtc-reviewers,pehrsons,@webrtc-reviewers +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit -This is necessary to reliably detect what rid a given keyframe is for, for the -purposes of resolving promises from RTCRtpScriptTransformer.generateKeyFrame. +This can be helpful in various situations, such as debugging with an +unrestricted Pipewire socket or for downstream projects like +B2G/Capyloon. Additionally it will help once we move from the camera +portal to the more generic device portal. -Differential Revision: https://phabricator.services.mozilla.com/D180737 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/2f1a0ba74bf71cfa0bc4e77714b8a5276a70cc36 +This is intended to get upstreamed but would be nice to carry downstream +already. + +Original patch by Fabrice Desré + +Upstream commit: a717c7ada811411e31bd7308168b4dbee5b956fa + +Differential Revision: https://phabricator.services.mozilla.com/D183650 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/4c2505f533280d9012e28633e410a66b0db95412 --- - api/frame_transformer_interface.h | 1 + - modules/rtp_rtcp/source/rtp_sender.h | 4 ++++ - modules/rtp_rtcp/source/rtp_sender_video.cc | 1 + - ...tp_sender_video_frame_transformer_delegate.cc | 16 +++++++++++----- - ...rtp_sender_video_frame_transformer_delegate.h | 2 ++ - ...stream_receiver_frame_transformer_delegate.cc | 5 +++++ - 6 files changed, 24 insertions(+), 5 deletions(-) + modules/video_capture/linux/pipewire_session.cc | 4 ++++ + 1 file changed, 4 insertions(+) -diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h -index c47bad4243..3e2d7d64ef 100644 ---- a/api/frame_transformer_interface.h -+++ b/api/frame_transformer_interface.h -@@ -57,6 +57,7 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { - public: - virtual ~TransformableVideoFrameInterface() = default; - virtual bool IsKeyFrame() const = 0; -+ virtual const std::string& GetRid() const = 0; +diff --git a/modules/video_capture/linux/pipewire_session.cc b/modules/video_capture/linux/pipewire_session.cc +index 3f52b3dd61..4d1b200aca 100644 +--- a/modules/video_capture/linux/pipewire_session.cc ++++ b/modules/video_capture/linux/pipewire_session.cc +@@ -360,6 +360,10 @@ void PipeWireSession::OnRegistryGlobal(void* data, + if (!spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)) + return; - virtual VideoFrameMetadata Metadata() const = 0; - -diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h -index 158d073b7a..1b053b4802 100644 ---- a/modules/rtp_rtcp/source/rtp_sender.h -+++ b/modules/rtp_rtcp/source/rtp_sender.h -@@ -143,6 +143,10 @@ class RTPSender { - - uint32_t SSRC() const RTC_LOCKS_EXCLUDED(send_mutex_) { return ssrc_; } - -+ const std::string& Rid() const RTC_LOCKS_EXCLUDED(send_mutex_) { -+ return rid_; -+ } ++ auto node_role = spa_dict_lookup(props, PW_KEY_MEDIA_ROLE); ++ if (!node_role || strcmp(node_role, "Camera")) ++ return; + - absl::optional FlexfecSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { - return flexfec_ssrc_; - } -diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc -index af447e95a7..1cbf0b8dc3 100644 ---- a/modules/rtp_rtcp/source/rtp_sender_video.cc -+++ b/modules/rtp_rtcp/source/rtp_sender_video.cc -@@ -169,6 +169,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) - config.frame_transformer, - rtp_sender_->SSRC(), - rtp_sender_->Csrcs(), -+ rtp_sender_->Rid(), - config.task_queue_factory) - : nullptr) { - if (frame_transformer_delegate_) -diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc -index 29c33f7b0e..5a68019a8b 100644 ---- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc -+++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc -@@ -31,7 +31,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { - uint32_t rtp_timestamp, - absl::optional expected_retransmission_time_ms, - uint32_t ssrc, -- std::vector csrcs) -+ std::vector csrcs, -+ const std::string& rid) - : encoded_data_(encoded_image.GetEncodedData()), - pre_transform_payload_size_(encoded_image.size()), - header_(video_header), -@@ -43,7 +44,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { - capture_time_identifier_(encoded_image.CaptureTimeIdentifier()), - expected_retransmission_time_ms_(expected_retransmission_time_ms), - ssrc_(ssrc), -- csrcs_(csrcs) { -+ csrcs_(csrcs), -+ rid_(rid) { - RTC_DCHECK_GE(payload_type_, 0); - RTC_DCHECK_LE(payload_type_, 127); - } -@@ -97,6 +99,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { - - Direction GetDirection() const override { return Direction::kSender; } - -+ const std::string& GetRid() const override { return rid_; } -+ - private: - rtc::scoped_refptr encoded_data_; - const size_t pre_transform_payload_size_; -@@ -111,6 +115,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { - - uint32_t ssrc_; - std::vector csrcs_; -+ const std::string rid_; - }; - } // namespace - -@@ -119,11 +124,12 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( - rtc::scoped_refptr frame_transformer, - uint32_t ssrc, - std::vector csrcs, -+ const std::string& rid, - TaskQueueFactory* task_queue_factory) - : sender_(sender), - frame_transformer_(std::move(frame_transformer)), - ssrc_(ssrc), -- csrcs_(csrcs), -+ rid_(rid), - transformation_queue_(task_queue_factory->CreateTaskQueue( - "video_frame_transformer", - TaskQueueFactory::Priority::NORMAL)) {} -@@ -142,7 +148,7 @@ bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( - absl::optional expected_retransmission_time_ms) { - frame_transformer_->Transform(std::make_unique( - encoded_image, video_header, payload_type, codec_type, rtp_timestamp, -- expected_retransmission_time_ms, ssrc_, csrcs_)); -+ expected_retransmission_time_ms, ssrc_, csrcs_, rid_)); - return true; + that->nodes_.emplace_back(that, id, props); + that->PipeWireSync(); } - -@@ -236,7 +242,7 @@ std::unique_ptr CloneSenderVideoFrame( - encoded_image, new_header, original->GetPayloadType(), new_header.codec, - original->GetTimestamp(), - absl::nullopt, // expected_retransmission_time_ms -- original->GetSsrc(), metadata.GetCsrcs()); -+ original->GetSsrc(), metadata.GetCsrcs(), original->GetRid()); - } - - } // namespace webrtc -diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h -index ac934eb332..a5bf65d073 100644 ---- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h -+++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h -@@ -58,6 +58,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { - rtc::scoped_refptr frame_transformer, - uint32_t ssrc, - std::vector csrcs, -+ const std::string& rid, - TaskQueueFactory* send_transport_queue); - - void Init(); -@@ -105,6 +106,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { - rtc::scoped_refptr frame_transformer_; - const uint32_t ssrc_; - std::vector csrcs_; -+ const std::string rid_; - // Used when the encoded frames arrives without a current task queue. This can - // happen if a hardware encoder was used. - std::unique_ptr transformation_queue_; -diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc -index 163fcbdf67..b920efadff 100644 ---- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc -+++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc -@@ -53,6 +53,11 @@ class TransformableVideoReceiverFrame - return frame_->FrameType() == VideoFrameType::kVideoFrameKey; - } - -+ const std::string& GetRid() const override { -+ static const std::string empty; -+ return empty; -+ } -+ - VideoFrameMetadata Metadata() const override { return metadata_; } - - void SetMetadata(const VideoFrameMetadata&) override { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0093.patch b/third_party/libwebrtc/moz-patch-stack/0093.patch index 9dbae05aa32e..07bfa892d1ee 100644 --- a/third_party/libwebrtc/moz-patch-stack/0093.patch +++ b/third_party/libwebrtc/moz-patch-stack/0093.patch @@ -1,41 +1,74 @@ -From: Byron Campen -Date: Thu, 20 Jul 2023 14:24:00 +0000 -Subject: Bug 1838080: Ensure that last ref to transformation_queue_ is not - released on itself. r=pehrsons,webrtc-reviewers +From: Robert Mader +Date: Wed, 6 Sep 2023 16:37:00 +0000 +Subject: Bug 1843786 - webrtc: Implement Pipewire camera rotation support, + r=webrtc-reviewers,pehrsons,@webrtc-reviewers -Differential Revision: https://phabricator.services.mozilla.com/D181699 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/91d9e8b6a5c430a73561ffd2330865f04fcb1a6d +Support the Pipewire videotransform meta via the already existing shared +infrastructure. This is needed for mobile devices which often have a 90 +degree rotated camera - which is likely the reason there is already +support in the shared code paths. + +Upstream commit: dc4c019c62abefc09e4903abea44bc36f3427452 + +Differential Revision: https://phabricator.services.mozilla.com/D183652 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/1db7a958b945838c64dcfcb638f79b99f729b364 --- - .../rtp_sender_video_frame_transformer_delegate.cc | 9 +++++++++ - 1 file changed, 9 insertions(+) + .../linux/video_capture_pipewire.cc | 28 +++++++++++++++++++ + 1 file changed, 28 insertions(+) -diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc -index 5a68019a8b..fb133ff73a 100644 ---- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc -+++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc -@@ -17,6 +17,7 @@ - #include "api/task_queue/task_queue_factory.h" - #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" - #include "rtc_base/checks.h" -+#include "rtc_base/event.h" - - namespace webrtc { - namespace { -@@ -223,6 +224,14 @@ void RTPSenderVideoFrameTransformerDelegate::Reset() { - MutexLock lock(&sender_lock_); - sender_ = nullptr; - } -+ // Wait until all pending tasks are executed, to ensure that the last ref -+ // standing is not on the transformation queue. -+ rtc::Event flush; -+ transformation_queue_->PostTask([this, &flush]() { -+ RTC_DCHECK_RUN_ON(transformation_queue_.get()); -+ flush.Set(); -+ }); -+ flush.Wait(rtc::Event::kForever); +diff --git a/modules/video_capture/linux/video_capture_pipewire.cc b/modules/video_capture/linux/video_capture_pipewire.cc +index 46cfe9bc15..7b79f15a6c 100644 +--- a/modules/video_capture/linux/video_capture_pipewire.cc ++++ b/modules/video_capture/linux/video_capture_pipewire.cc +@@ -301,6 +301,10 @@ void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { + &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, + SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size, + SPA_POD_Int(sizeof(struct spa_meta_header))))); ++ params.push_back(reinterpret_cast(spa_pod_builder_add_object( ++ &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, ++ SPA_POD_Id(SPA_META_VideoTransform), SPA_PARAM_META_size, ++ SPA_POD_Int(sizeof(struct spa_meta_videotransform))))); + pw_stream_update_params(stream_, params.data(), params.size()); } - std::unique_ptr CloneSenderVideoFrame( +@@ -341,6 +345,19 @@ void VideoCaptureModulePipeWire::OnStreamProcess(void* data) { + that->ProcessBuffers(); + } + ++static VideoRotation VideorotationFromPipeWireTransform(uint32_t transform) { ++ switch (transform) { ++ case SPA_META_TRANSFORMATION_90: ++ return kVideoRotation_90; ++ case SPA_META_TRANSFORMATION_180: ++ return kVideoRotation_180; ++ case SPA_META_TRANSFORMATION_270: ++ return kVideoRotation_270; ++ default: ++ return kVideoRotation_0; ++ } ++} ++ + void VideoCaptureModulePipeWire::ProcessBuffers() { + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + +@@ -349,6 +366,17 @@ void VideoCaptureModulePipeWire::ProcessBuffers() { + h = static_cast( + spa_buffer_find_meta_data(buffer->buffer, SPA_META_Header, sizeof(*h))); + ++ struct spa_meta_videotransform* videotransform; ++ videotransform = ++ static_cast(spa_buffer_find_meta_data( ++ buffer->buffer, SPA_META_VideoTransform, sizeof(*videotransform))); ++ if (videotransform) { ++ VideoRotation rotation = ++ VideorotationFromPipeWireTransform(videotransform->transform); ++ SetCaptureRotation(rotation); ++ SetApplyRotation(rotation != kVideoRotation_0); ++ } ++ + if (h->flags & SPA_META_HEADER_FLAG_CORRUPTED) { + RTC_LOG(LS_INFO) << "Dropping corruped frame."; + } else { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0094.patch b/third_party/libwebrtc/moz-patch-stack/0094.patch index af16cf065638..0343188b068e 100644 --- a/third_party/libwebrtc/moz-patch-stack/0094.patch +++ b/third_party/libwebrtc/moz-patch-stack/0094.patch @@ -1,49 +1,59 @@ -From: Jan Grulich -Date: Thu, 27 Jul 2023 09:49:00 +0000 -Subject: Bug 1841851 - WebRTC backport: PipeWire capturer: increase buffer - size to avoid buffer overflow r=jib,webrtc-reviewers,mjf +From: Michael Froman +Date: Thu, 28 Sep 2023 14:12:00 +0000 +Subject: Bug 1832465 - remove libXtst usage from libwebrtc. + r=ng,webrtc-reviewers -This is a simple backport of an WebRTC upstream change. - -Upstream commit: 8fcc6df79daf1810cd4ecdb8d2ef1d361abfdc9c - -Differential Revision: https://phabricator.services.mozilla.com/D183355 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/263be02fdeb0c3556e1da296eb18eb3d154eb99e +Differential Revision: https://phabricator.services.mozilla.com/D189386 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/0ec1b33b95dbb2d39355f28b2812fe25b4ad9f20 --- - .../linux/wayland/shared_screencast_stream.cc | 6 +++--- - 1 file changed, 3 insertions(+), 3 deletions(-) + modules/desktop_capture/BUILD.gn | 3 +++ + modules/desktop_capture/linux/x11/shared_x_display.cc | 4 ++++ + 2 files changed, 7 insertions(+) -diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -index 5b4e23c32d..0c26e7a7d5 100644 ---- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -+++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -@@ -281,7 +281,7 @@ void SharedScreenCastStreamPrivate::OnStreamParamChanged( +diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn +index c184e2fbbc..bcea427de7 100644 +--- a/modules/desktop_capture/BUILD.gn ++++ b/modules/desktop_capture/BUILD.gn +@@ -382,6 +382,9 @@ rtc_library("desktop_capture") { + "Xrandr", + "Xtst", + ] ++ if (build_with_mozilla) { ++ libs -= [ "Xtst" ] ++ } + } - that->stream_size_ = DesktopSize(width, height); + if (!is_win && !is_mac && !rtc_use_x11_extensions && !rtc_use_pipewire && +diff --git a/modules/desktop_capture/linux/x11/shared_x_display.cc b/modules/desktop_capture/linux/x11/shared_x_display.cc +index d690b0e2ba..3f3617b074 100644 +--- a/modules/desktop_capture/linux/x11/shared_x_display.cc ++++ b/modules/desktop_capture/linux/x11/shared_x_display.cc +@@ -11,7 +11,9 @@ + #include "modules/desktop_capture/linux/x11/shared_x_display.h" -- uint8_t buffer[1024] = {}; -+ uint8_t buffer[2048] = {}; - auto builder = spa_pod_builder{buffer, sizeof(buffer)}; + #include ++#if !defined(WEBRTC_MOZILLA_BUILD) + #include ++#endif - // Setup buffers and meta header for new format. -@@ -364,7 +364,7 @@ void SharedScreenCastStreamPrivate::OnRenegotiateFormat(void* data, uint64_t) { - { - PipeWireThreadLoopLock thread_loop_lock(that->pw_main_loop_); + #include -- uint8_t buffer[2048] = {}; -+ uint8_t buffer[4096] = {}; +@@ -95,6 +97,7 @@ void SharedXDisplay::ProcessPendingXEvents() { + } - spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)}; - -@@ -482,7 +482,7 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( - - pw_stream_add_listener(pw_stream_, &spa_stream_listener_, - &pw_stream_events_, this); -- uint8_t buffer[2048] = {}; -+ uint8_t buffer[4096] = {}; - - spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)}; + void SharedXDisplay::IgnoreXServerGrabs() { ++#if !defined(WEBRTC_MOZILLA_BUILD) + int test_event_base = 0; + int test_error_base = 0; + int major = 0; +@@ -103,6 +106,7 @@ void SharedXDisplay::IgnoreXServerGrabs() { + &minor)) { + XTestGrabControl(display(), true); + } ++#endif + } + } // namespace webrtc -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0095.patch b/third_party/libwebrtc/moz-patch-stack/0095.patch index a5b6350e06ec..0bcf130de881 100644 --- a/third_party/libwebrtc/moz-patch-stack/0095.patch +++ b/third_party/libwebrtc/moz-patch-stack/0095.patch @@ -1,77 +1,94 @@ -From: Jan Grulich -Date: Tue, 8 Aug 2023 10:05:00 +0000 -Subject: Bug 1840150 - WebRTC backport: Video capture PipeWire: guard callback - to avoid concurrent access r=jib,webrtc-reviewers,mjf +From: Dan Baker +Date: Mon, 2 Oct 2023 17:17:00 +0000 +Subject: Bug 1851693 - (fix-279a05475d) Revert addition of race checker which + is causing tsan failurs until better fix is landed upstream.r=pehrsons -This is a simple backport of an WebRTC upstream change. +This merely reverses a race checker that is too strict for our use, maintaining the code we had before, so we can fix upstream in the meantime tracked with Bug 1856392. -Upstream commit: 666d707450eff6cc2946eba8edf67420f87ad11e - -Differential Revision: https://phabricator.services.mozilla.com/D184386 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/144cbf240a6b53de6f821bf04bfdb74218fea1a8 +Differential Revision: https://phabricator.services.mozilla.com/D189396 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/e626ce7279e6575e68d0e43de3dfd3ed59b00a75 --- - modules/video_capture/linux/pipewire_session.cc | 10 +++++++++- - modules/video_capture/linux/pipewire_session.h | 5 ++++- - 2 files changed, 13 insertions(+), 2 deletions(-) + modules/video_capture/linux/video_capture_v4l2.cc | 7 ------- + modules/video_capture/linux/video_capture_v4l2.h | 7 +++---- + 2 files changed, 3 insertions(+), 11 deletions(-) -diff --git a/modules/video_capture/linux/pipewire_session.cc b/modules/video_capture/linux/pipewire_session.cc -index 2adb1cdb50..3f52b3dd61 100644 ---- a/modules/video_capture/linux/pipewire_session.cc -+++ b/modules/video_capture/linux/pipewire_session.cc -@@ -232,7 +232,10 @@ PipeWireSession::~PipeWireSession() { +diff --git a/modules/video_capture/linux/video_capture_v4l2.cc b/modules/video_capture/linux/video_capture_v4l2.cc +index d6813b13fd..c887683dc8 100644 +--- a/modules/video_capture/linux/video_capture_v4l2.cc ++++ b/modules/video_capture/linux/video_capture_v4l2.cc +@@ -110,7 +110,6 @@ int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) { + + VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { + RTC_DCHECK_RUN_ON(&api_checker_); +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + StopCapture(); + if (_deviceFd != -1) +@@ -120,7 +119,6 @@ VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { + int32_t VideoCaptureModuleV4L2::StartCapture( + const VideoCaptureCapability& capability) { + RTC_DCHECK_RUN_ON(&api_checker_); +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + + if (_captureStarted) { + if (capability == _requestedCapability) { +@@ -318,7 +316,6 @@ int32_t VideoCaptureModuleV4L2::StopCapture() { + _captureThread.Finalize(); + } + +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + MutexLock lock(&capture_lock_); + if (_captureStarted) { + _captureStarted = false; +@@ -336,7 +333,6 @@ int32_t VideoCaptureModuleV4L2::StopCapture() { + // critical section protected by the caller + + bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + struct v4l2_requestbuffers rbuffer; + memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); + +@@ -387,7 +383,6 @@ bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { } - void PipeWireSession::Init(VideoCaptureOptions::Callback* callback, int fd) { -- callback_ = callback; -+ { -+ webrtc::MutexLock lock(&callback_lock_); -+ callback_ = callback; -+ } - - if (fd != kInvalidPipeWireFd) { - InitPipeWire(fd); -@@ -374,6 +377,8 @@ void PipeWireSession::OnRegistryGlobalRemove(void* data, uint32_t id) { + bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + // unmap buffers + for (int i = 0; i < _buffersAllocatedByDevice; i++) + munmap(_pool[i].start, _pool[i].length); +@@ -405,12 +400,10 @@ bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { } - void PipeWireSession::Finish(VideoCaptureOptions::Status status) { -+ webrtc::MutexLock lock(&callback_lock_); -+ - if (callback_) { - callback_->OnInitialized(status); - callback_ = nullptr; -@@ -381,6 +386,9 @@ void PipeWireSession::Finish(VideoCaptureOptions::Status status) { + bool VideoCaptureModuleV4L2::CaptureStarted() { +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + return _captureStarted; } - void PipeWireSession::Cleanup() { -+ webrtc::MutexLock lock(&callback_lock_); -+ callback_ = nullptr; -+ - StopPipeWire(); - } + bool VideoCaptureModuleV4L2::CaptureProcess() { +- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); -diff --git a/modules/video_capture/linux/pipewire_session.h b/modules/video_capture/linux/pipewire_session.h -index 982b468838..fdc06a6b2a 100644 ---- a/modules/video_capture/linux/pipewire_session.h -+++ b/modules/video_capture/linux/pipewire_session.h -@@ -24,6 +24,7 @@ - #include "modules/video_capture/linux/camera_portal.h" - #include "modules/video_capture/video_capture.h" - #include "modules/video_capture/video_capture_options.h" -+#include "rtc_base/synchronization/mutex.h" - - namespace webrtc { - namespace videocapturemodule { -@@ -117,7 +118,9 @@ class PipeWireSession : public rtc::RefCountedNonVirtual { - void Finish(VideoCaptureOptions::Status status); - void Cleanup(); - -- VideoCaptureOptions::Callback* callback_ = nullptr; -+ webrtc::Mutex callback_lock_; -+ VideoCaptureOptions::Callback* callback_ RTC_GUARDED_BY(&callback_lock_) = -+ nullptr; - - VideoCaptureOptions::Status status_; + int retVal = 0; + struct pollfd rSet; +diff --git a/modules/video_capture/linux/video_capture_v4l2.h b/modules/video_capture/linux/video_capture_v4l2.h +index 0191e41876..61358d0325 100644 +--- a/modules/video_capture/linux/video_capture_v4l2.h ++++ b/modules/video_capture/linux/video_capture_v4l2.h +@@ -45,12 +45,11 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl { + Mutex capture_lock_ RTC_ACQUIRED_BEFORE(api_lock_); + bool quit_ RTC_GUARDED_BY(capture_lock_); + int32_t _deviceId RTC_GUARDED_BY(api_checker_); +- int32_t _deviceFd RTC_GUARDED_BY(capture_checker_); ++ int32_t _deviceFd; + int32_t _buffersAllocatedByDevice RTC_GUARDED_BY(capture_lock_); +- VideoCaptureCapability configured_capability_ +- RTC_GUARDED_BY(capture_checker_); +- bool _captureStarted RTC_GUARDED_BY(capture_checker_); ++ VideoCaptureCapability configured_capability_; ++ bool _captureStarted; + struct Buffer { + void* start; + size_t length; -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0096.patch b/third_party/libwebrtc/moz-patch-stack/0096.patch index db0575926544..3626b2100584 100644 --- a/third_party/libwebrtc/moz-patch-stack/0096.patch +++ b/third_party/libwebrtc/moz-patch-stack/0096.patch @@ -1,31 +1,33 @@ From: Michael Froman -Date: Sat, 12 Aug 2023 18:57:00 +0000 -Subject: Bug 1848045 - to upstream - fix misuse of rtc::strcpyn in - ApmDataDumper::SetOutputDirectory. r=ng,webrtc-reviewers +Date: Thu, 5 Oct 2023 14:21:00 +0000 +Subject: Bug 1857037 - pt1 - add shim gni files to limit BUILD.gn changes. + r=ng,webrtc-reviewers -rtc::strcpyn second param should be the size of the destination buffer, -not the size of the source string. This has been masked because FormFileName -helpfully adds a trailing path separator if one is missing. - -Differential Revision: https://phabricator.services.mozilla.com/D185862 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/5d935bb2573445a198af03cd3d5186243880ecdf +Differential Revision: https://phabricator.services.mozilla.com/D190104 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/a84d39db037cbe34aa19588b0d18335eb5e2d79b --- - modules/audio_processing/logging/apm_data_dumper.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + testing/libfuzzer/fuzzer_test.gni | 2 ++ + testing/test.gni | 2 ++ + 2 files changed, 4 insertions(+) + create mode 100644 testing/libfuzzer/fuzzer_test.gni + create mode 100644 testing/test.gni -diff --git a/modules/audio_processing/logging/apm_data_dumper.h b/modules/audio_processing/logging/apm_data_dumper.h -index 4ab6baad83..76f8b34c0b 100644 ---- a/modules/audio_processing/logging/apm_data_dumper.h -+++ b/modules/audio_processing/logging/apm_data_dumper.h -@@ -91,7 +91,7 @@ class ApmDataDumper { - static void SetOutputDirectory(absl::string_view output_dir) { - #if WEBRTC_APM_DEBUG_DUMP == 1 - RTC_CHECK_LT(output_dir.size(), kOutputDirMaxLength); -- rtc::strcpyn(output_dir_, output_dir.size(), output_dir); -+ rtc::strcpyn(output_dir_, kOutputDirMaxLength, output_dir); - #endif - } - +diff --git a/testing/libfuzzer/fuzzer_test.gni b/testing/libfuzzer/fuzzer_test.gni +new file mode 100644 +index 0000000000..8fdf3cdad2 +--- /dev/null ++++ b/testing/libfuzzer/fuzzer_test.gni +@@ -0,0 +1,2 @@ ++# "empty" file in place of importing new testing/libfuzzer ++# to allow BUILD.gn imports to succeed. +diff --git a/testing/test.gni b/testing/test.gni +new file mode 100644 +index 0000000000..f46fa82778 +--- /dev/null ++++ b/testing/test.gni +@@ -0,0 +1,2 @@ ++# "empty" file in place of importing new testing/test.gni ++# to allow BUILD.gn imports to succeed. -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0097.patch b/third_party/libwebrtc/moz-patch-stack/0097.patch index 8990682d6173..c9ba580d92a7 100644 --- a/third_party/libwebrtc/moz-patch-stack/0097.patch +++ b/third_party/libwebrtc/moz-patch-stack/0097.patch @@ -1,41 +1,43 @@ -From: stransky -Date: Tue, 29 Aug 2023 12:43:00 +0000 -Subject: Bug 1821629 [DMABuf] Don't use DMABuf if it's disabled by Firefox gfx - config r=ng,webrtc-reviewers +From: Michael Froman +Date: Tue, 14 Feb 2023 03:27:00 +0000 +Subject: Bug 1816173 - pt12 - add shim config for + third_party/libwebrtc/testing/{gmock|gtest} r=ng -Differential Revision: https://phabricator.services.mozilla.com/D172224 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/158a888cad8869a2f0026fa7cfaaa13ecbfcf2ed +We don't vendor third_party/libwebrtc/third_party/gmock + third_party/libwebrtc/third_party/gtest, so: +- add BUILD.gn to avoid scattered BUILD.gn changes + +Differential Revision: https://phabricator.services.mozilla.com/D169674 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/4ea9d2db79c42a144235e45c47c71adf1dd01fdc --- - .../linux/wayland/shared_screencast_stream.cc | 9 ++++++++- - 1 file changed, 8 insertions(+), 1 deletion(-) + testing/gmock/BUILD.gn | 5 +++++ + testing/gtest/BUILD.gn | 5 +++++ + 2 files changed, 10 insertions(+) + create mode 100644 testing/gmock/BUILD.gn + create mode 100644 testing/gtest/BUILD.gn -diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -index 0c26e7a7d5..22aa355e44 100644 ---- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -+++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -@@ -28,6 +28,13 @@ - #include "rtc_base/synchronization/mutex.h" - #include "rtc_base/time_utils.h" - -+// Wrapper for gfxVars::UseDMABuf() as we can't include gfxVars here. -+// We don't want to use dmabuf of known broken systems. -+// See FEATURE_DMABUF for details. -+namespace mozilla::gfx { -+bool IsDMABufEnabled(); -+} +diff --git a/testing/gmock/BUILD.gn b/testing/gmock/BUILD.gn +new file mode 100644 +index 0000000000..a2a1efdea9 +--- /dev/null ++++ b/testing/gmock/BUILD.gn +@@ -0,0 +1,5 @@ ++import("//third_party/libaom/options.gni") ++import("../../webrtc.gni") + - namespace webrtc { - - const int kBytesPerPixel = 4; -@@ -294,7 +301,7 @@ void SharedScreenCastStreamPrivate::OnStreamParamChanged( - that->modifier_ = - has_modifier ? that->spa_video_format_.modifier : DRM_FORMAT_MOD_INVALID; - std::vector params; -- const int buffer_types = has_modifier -+ const int buffer_types = has_modifier && mozilla::gfx::IsDMABufEnabled() - ? (1 << SPA_DATA_DmaBuf) | (1 << SPA_DATA_MemFd) - : (1 << SPA_DATA_MemFd); - ++rtc_library("gmock") { ++} +diff --git a/testing/gtest/BUILD.gn b/testing/gtest/BUILD.gn +new file mode 100644 +index 0000000000..c9c2703c37 +--- /dev/null ++++ b/testing/gtest/BUILD.gn +@@ -0,0 +1,5 @@ ++import("//third_party/libaom/options.gni") ++import("../../webrtc.gni") ++ ++rtc_library("gtest") { ++} -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0098.patch b/third_party/libwebrtc/moz-patch-stack/0098.patch index 35fcc8ac6542..c226991e275e 100644 --- a/third_party/libwebrtc/moz-patch-stack/0098.patch +++ b/third_party/libwebrtc/moz-patch-stack/0098.patch @@ -1,50 +1,31 @@ -From: stransky -Date: Tue, 29 Aug 2023 12:43:00 +0000 -Subject: Bug 1821629 [Pipewire/DMABuf] Don't create dmabuf backend if it's - disabled r=ng,webrtc-reviewers +From: Andreas Pehrson +Date: Wed, 18 Oct 2023 17:25:00 +0000 +Subject: Bug 1857862 - (fix-32a8169a65) Don't call non-constexpr + RTC_CHECK_NOTREACHED from constexpr VideoFrameTypeToString under gcc-8. + r=webrtc-reviewers,mjf -Depends on D172224 - -Differential Revision: https://phabricator.services.mozilla.com/D172229 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/237d19fe96dd7d25b6a817415ee4e6854678d648 +Differential Revision: https://phabricator.services.mozilla.com/D191308 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/8a4449ba24fa3192b44863ed8ba96f6f94a6e88d --- - .../linux/wayland/shared_screencast_stream.cc | 9 ++++++--- - 1 file changed, 6 insertions(+), 3 deletions(-) + api/video/video_frame_type.h | 4 ++++ + 1 file changed, 4 insertions(+) -diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -index 22aa355e44..61c6957d27 100644 ---- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -+++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc -@@ -419,7 +419,9 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( - RTC_LOG(LS_ERROR) << "Unable to open PipeWire library"; - return false; - } -- egl_dmabuf_ = std::make_unique(); -+ if (mozilla::gfx::IsDMABufEnabled()) { -+ egl_dmabuf_ = std::make_unique(); -+ } - - pw_stream_node_id_ = stream_node_id; - -@@ -508,7 +510,8 @@ bool SharedScreenCastStreamPrivate::StartScreenCastStream( - for (uint32_t format : {SPA_VIDEO_FORMAT_BGRA, SPA_VIDEO_FORMAT_RGBA, - SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_RGBx}) { - // Modifiers can be used with PipeWire >= 0.3.33 -- if (has_required_pw_client_version && has_required_pw_server_version) { -+ if (egl_dmabuf_ && -+ has_required_pw_client_version && has_required_pw_server_version) { - modifiers_ = egl_dmabuf_->QueryDmaBufModifiers(format); - - if (!modifiers_.empty()) { -@@ -927,7 +930,7 @@ bool SharedScreenCastStreamPrivate::ProcessDMABuffer( - - const uint n_planes = spa_buffer->n_datas; - -- if (!n_planes) { -+ if (!n_planes || !egl_dmabuf_) { - return false; +diff --git a/api/video/video_frame_type.h b/api/video/video_frame_type.h +index 9079829ff8..3665a80cd8 100644 +--- a/api/video/video_frame_type.h ++++ b/api/video/video_frame_type.h +@@ -34,7 +34,11 @@ inline constexpr absl::string_view VideoFrameTypeToString( + case VideoFrameType::kVideoFrameDelta: + return "delta"; } ++// Mozilla: ++// gcc-8 complains about a constexpr function calling a non-constexpr ditto. ++#if defined(__clang__) || (defined(__GNUC__) && __GNUC__ >= 9) + RTC_CHECK_NOTREACHED(); ++#endif + return ""; + } -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0099.patch b/third_party/libwebrtc/moz-patch-stack/0099.patch index 526da4a73684..f79d01a85e84 100644 --- a/third_party/libwebrtc/moz-patch-stack/0099.patch +++ b/third_party/libwebrtc/moz-patch-stack/0099.patch @@ -1,44 +1,46 @@ -From: Robert Mader -Date: Wed, 6 Sep 2023 16:37:00 +0000 -Subject: Bug 1843786 - webrtc: Filter out non-camera nodes, - r=webrtc-reviewers,pehrsons,@webrtc-reviewers -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit +From: Andreas Pehrson +Date: Wed, 18 Oct 2023 18:51:00 +0000 +Subject: Bug 1857862 - (fix-34d82df2ba) Guard clang-specific pragmas in + libwebrtc/api/call/transport.h. r=webrtc-reviewers,mjf -This can be helpful in various situations, such as debugging with an -unrestricted Pipewire socket or for downstream projects like -B2G/Capyloon. Additionally it will help once we move from the camera -portal to the more generic device portal. - -This is intended to get upstreamed but would be nice to carry downstream -already. - -Original patch by Fabrice Desré - -Upstream commit: a717c7ada811411e31bd7308168b4dbee5b956fa - -Differential Revision: https://phabricator.services.mozilla.com/D183650 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/4c2505f533280d9012e28633e410a66b0db95412 +Differential Revision: https://phabricator.services.mozilla.com/D191312 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/3b5018d89fa118d1f227346ddd67a1fe1db4d4c1 --- - modules/video_capture/linux/pipewire_session.cc | 4 ++++ - 1 file changed, 4 insertions(+) + api/call/transport.h | 11 +++++++++++ + 1 file changed, 11 insertions(+) -diff --git a/modules/video_capture/linux/pipewire_session.cc b/modules/video_capture/linux/pipewire_session.cc -index 3f52b3dd61..4d1b200aca 100644 ---- a/modules/video_capture/linux/pipewire_session.cc -+++ b/modules/video_capture/linux/pipewire_session.cc -@@ -360,6 +360,10 @@ void PipeWireSession::OnRegistryGlobal(void* data, - if (!spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)) - return; - -+ auto node_role = spa_dict_lookup(props, PW_KEY_MEDIA_ROLE); -+ if (!node_role || strcmp(node_role, "Camera")) -+ return; -+ - that->nodes_.emplace_back(that, id, props); - that->PipeWireSync(); - } +diff --git a/api/call/transport.h b/api/call/transport.h +index 6c6cbb8941..52c577ba83 100644 +--- a/api/call/transport.h ++++ b/api/call/transport.h +@@ -48,8 +48,15 @@ class Transport { + // New style functions. Default implementations are to accomodate + // subclasses that haven't been converted to new style yet. + // TODO(bugs.webrtc.org/14870): Deprecate and remove old functions. ++ // Mozilla: Add GCC pragmas for now. They will be removed soon: ++ // https://webrtc.googlesource.com/src/+/e14d122a7b24bf78c02b8a4ce23716f79451dd23 ++#if defined(__clang__) + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wdeprecated-declarations" ++#elif defined(__GNUC__) ++#pragma GCC diagnostic push ++#pragma GCC diagnostic ignored "-Wdeprecated-declarations" ++#endif + virtual bool SendRtp(rtc::ArrayView packet, + const PacketOptions& options) { + return SendRtp(packet.data(), packet.size(), options); +@@ -57,7 +64,11 @@ class Transport { + virtual bool SendRtcp(rtc::ArrayView packet) { + return SendRtcp(packet.data(), packet.size()); + } ++#if defined(__clang__) + #pragma clang diagnostic pop ++#elif defined(__GNUC__) ++#pragma GCC diagnostic pop ++#endif + // Old style functions. + [[deprecated("Use ArrayView version")]] virtual bool + SendRtp(const uint8_t* packet, size_t length, const PacketOptions& options) { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0100.patch b/third_party/libwebrtc/moz-patch-stack/0100.patch index 4a1727657b58..f41e4c209ea1 100644 --- a/third_party/libwebrtc/moz-patch-stack/0100.patch +++ b/third_party/libwebrtc/moz-patch-stack/0100.patch @@ -1,74 +1,44 @@ -From: Robert Mader -Date: Wed, 6 Sep 2023 16:37:00 +0000 -Subject: Bug 1843786 - webrtc: Implement Pipewire camera rotation support, - r=webrtc-reviewers,pehrsons,@webrtc-reviewers +From: Andreas Pehrson +Date: Wed, 18 Oct 2023 17:21:00 +0000 +Subject: Bug 1859786 - Fix lock annotation warning in Mozilla-specific edit on + top of video_capture_impl.cc. r=webrtc-reviewers,mjf -Support the Pipewire videotransform meta via the already existing shared -infrastructure. This is needed for mobile devices which often have a 90 -degree rotated camera - which is likely the reason there is already -support in the shared code paths. +The annotations were added in M116: +https://hg.mozilla.org/mozilla-central/rev/9cd372df013948ad822ae936752d725d77474fb5 -Upstream commit: dc4c019c62abefc09e4903abea44bc36f3427452 +Note that this was never unsafe, since _dataCallbacks is only written on the +same thread that we are patching here. This patch however, adds helpful static +analysis. -Differential Revision: https://phabricator.services.mozilla.com/D183652 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/1db7a958b945838c64dcfcb638f79b99f729b364 +Differential Revision: https://phabricator.services.mozilla.com/D191301 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/56ff441b644400f09d2d0453dbd8991ea25db7b1 --- - .../linux/video_capture_pipewire.cc | 28 +++++++++++++++++++ - 1 file changed, 28 insertions(+) + modules/video_capture/video_capture_impl.cc | 11 +++++++---- + 1 file changed, 7 insertions(+), 4 deletions(-) -diff --git a/modules/video_capture/linux/video_capture_pipewire.cc b/modules/video_capture/linux/video_capture_pipewire.cc -index 46cfe9bc15..7b79f15a6c 100644 ---- a/modules/video_capture/linux/video_capture_pipewire.cc -+++ b/modules/video_capture/linux/video_capture_pipewire.cc -@@ -301,6 +301,10 @@ void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { - &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, - SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size, - SPA_POD_Int(sizeof(struct spa_meta_header))))); -+ params.push_back(reinterpret_cast(spa_pod_builder_add_object( -+ &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, -+ SPA_POD_Id(SPA_META_VideoTransform), SPA_PARAM_META_size, -+ SPA_POD_Int(sizeof(struct spa_meta_videotransform))))); - pw_stream_update_params(stream_, params.data(), params.size()); +diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc +index 02404697ad..1bddaf824d 100644 +--- a/modules/video_capture/video_capture_impl.cc ++++ b/modules/video_capture/video_capture_impl.cc +@@ -119,11 +119,14 @@ void VideoCaptureImpl::DeRegisterCaptureDataCallback( } -@@ -341,6 +345,19 @@ void VideoCaptureModulePipeWire::OnStreamProcess(void* data) { - that->ProcessBuffers(); - } - -+static VideoRotation VideorotationFromPipeWireTransform(uint32_t transform) { -+ switch (transform) { -+ case SPA_META_TRANSFORMATION_90: -+ return kVideoRotation_90; -+ case SPA_META_TRANSFORMATION_180: -+ return kVideoRotation_180; -+ case SPA_META_TRANSFORMATION_270: -+ return kVideoRotation_270; -+ default: -+ return kVideoRotation_0; -+ } -+} -+ - void VideoCaptureModulePipeWire::ProcessBuffers() { - RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - -@@ -349,6 +366,17 @@ void VideoCaptureModulePipeWire::ProcessBuffers() { - h = static_cast( - spa_buffer_find_meta_data(buffer->buffer, SPA_META_Header, sizeof(*h))); - -+ struct spa_meta_videotransform* videotransform; -+ videotransform = -+ static_cast(spa_buffer_find_meta_data( -+ buffer->buffer, SPA_META_VideoTransform, sizeof(*videotransform))); -+ if (videotransform) { -+ VideoRotation rotation = -+ VideorotationFromPipeWireTransform(videotransform->transform); -+ SetCaptureRotation(rotation); -+ SetApplyRotation(rotation != kVideoRotation_0); + int32_t VideoCaptureImpl::StopCaptureIfAllClientsClose() { +- if (_dataCallBacks.empty()) { +- return StopCapture(); +- } else { +- return 0; ++ RTC_DCHECK_RUN_ON(&api_checker_); ++ { ++ MutexLock lock(&api_lock_); ++ if (!_dataCallBacks.empty()) { ++ return 0; + } -+ - if (h->flags & SPA_META_HEADER_FLAG_CORRUPTED) { - RTC_LOG(LS_INFO) << "Dropping corruped frame."; - } else { + } ++ return StopCapture(); + } + + int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0101.patch b/third_party/libwebrtc/moz-patch-stack/0101.patch index bfc32a7cf1ee..826b9091d6bc 100644 --- a/third_party/libwebrtc/moz-patch-stack/0101.patch +++ b/third_party/libwebrtc/moz-patch-stack/0101.patch @@ -1,59 +1,30 @@ -From: Michael Froman -Date: Thu, 28 Sep 2023 14:12:00 +0000 -Subject: Bug 1832465 - remove libXtst usage from libwebrtc. - r=ng,webrtc-reviewers +From: Andreas Pehrson +Date: Wed, 18 Oct 2023 17:21:00 +0000 +Subject: Bug 1859786 - Fix clang-tidy warning in video_capture_impl.cc. + r=webrtc-reviewers,mjf -Differential Revision: https://phabricator.services.mozilla.com/D189386 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/0ec1b33b95dbb2d39355f28b2812fe25b4ad9f20 +clang-tidy says: + 'auto dataCallBack' can be declared as 'auto *dataCallBack' + +Differential Revision: https://phabricator.services.mozilla.com/D191302 +Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/26c84d214137a1b0de0902c7038756964e5786f4 --- - modules/desktop_capture/BUILD.gn | 3 +++ - modules/desktop_capture/linux/x11/shared_x_display.cc | 4 ++++ - 2 files changed, 7 insertions(+) + modules/video_capture/video_capture_impl.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) -diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn -index c184e2fbbc..bcea427de7 100644 ---- a/modules/desktop_capture/BUILD.gn -+++ b/modules/desktop_capture/BUILD.gn -@@ -382,6 +382,9 @@ rtc_library("desktop_capture") { - "Xrandr", - "Xtst", - ] -+ if (build_with_mozilla) { -+ libs -= [ "Xtst" ] -+ } +diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc +index 1bddaf824d..15dfb7fe1f 100644 +--- a/modules/video_capture/video_capture_impl.cc ++++ b/modules/video_capture/video_capture_impl.cc +@@ -134,7 +134,7 @@ int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { + + UpdateFrameCount(); // frame count used for local frame rate callback. + +- for (auto dataCallBack : _dataCallBacks) { ++ for (auto* dataCallBack : _dataCallBacks) { + dataCallBack->OnFrame(captureFrame); } - if (!is_win && !is_mac && !rtc_use_x11_extensions && !rtc_use_pipewire && -diff --git a/modules/desktop_capture/linux/x11/shared_x_display.cc b/modules/desktop_capture/linux/x11/shared_x_display.cc -index d690b0e2ba..3f3617b074 100644 ---- a/modules/desktop_capture/linux/x11/shared_x_display.cc -+++ b/modules/desktop_capture/linux/x11/shared_x_display.cc -@@ -11,7 +11,9 @@ - #include "modules/desktop_capture/linux/x11/shared_x_display.h" - - #include -+#if !defined(WEBRTC_MOZILLA_BUILD) - #include -+#endif - - #include - -@@ -95,6 +97,7 @@ void SharedXDisplay::ProcessPendingXEvents() { - } - - void SharedXDisplay::IgnoreXServerGrabs() { -+#if !defined(WEBRTC_MOZILLA_BUILD) - int test_event_base = 0; - int test_error_base = 0; - int major = 0; -@@ -103,6 +106,7 @@ void SharedXDisplay::IgnoreXServerGrabs() { - &minor)) { - XTestGrabControl(display(), true); - } -+#endif - } - - } // namespace webrtc -- -2.37.3 +2.34.1 diff --git a/third_party/libwebrtc/moz-patch-stack/0102.patch b/third_party/libwebrtc/moz-patch-stack/0102.patch deleted file mode 100644 index 7368b19094a2..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/0102.patch +++ /dev/null @@ -1,94 +0,0 @@ -From: Dan Baker -Date: Mon, 2 Oct 2023 17:17:00 +0000 -Subject: Bug 1851693 - (fix-279a05475d) Revert addition of race checker which - is causing tsan failurs until better fix is landed upstream.r=pehrsons - -This merely reverses a race checker that is too strict for our use, maintaining the code we had before, so we can fix upstream in the meantime tracked with Bug 1856392. - -Differential Revision: https://phabricator.services.mozilla.com/D189396 -Mercurial Revision: https://hg.mozilla.org/mozilla-central/rev/e626ce7279e6575e68d0e43de3dfd3ed59b00a75 ---- - modules/video_capture/linux/video_capture_v4l2.cc | 7 ------- - modules/video_capture/linux/video_capture_v4l2.h | 7 +++---- - 2 files changed, 3 insertions(+), 11 deletions(-) - -diff --git a/modules/video_capture/linux/video_capture_v4l2.cc b/modules/video_capture/linux/video_capture_v4l2.cc -index d6813b13fd..c887683dc8 100644 ---- a/modules/video_capture/linux/video_capture_v4l2.cc -+++ b/modules/video_capture/linux/video_capture_v4l2.cc -@@ -110,7 +110,6 @@ int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) { - - VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { - RTC_DCHECK_RUN_ON(&api_checker_); -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - - StopCapture(); - if (_deviceFd != -1) -@@ -120,7 +119,6 @@ VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { - int32_t VideoCaptureModuleV4L2::StartCapture( - const VideoCaptureCapability& capability) { - RTC_DCHECK_RUN_ON(&api_checker_); -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - - if (_captureStarted) { - if (capability == _requestedCapability) { -@@ -318,7 +316,6 @@ int32_t VideoCaptureModuleV4L2::StopCapture() { - _captureThread.Finalize(); - } - -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - MutexLock lock(&capture_lock_); - if (_captureStarted) { - _captureStarted = false; -@@ -336,7 +333,6 @@ int32_t VideoCaptureModuleV4L2::StopCapture() { - // critical section protected by the caller - - bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - struct v4l2_requestbuffers rbuffer; - memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); - -@@ -387,7 +383,6 @@ bool VideoCaptureModuleV4L2::AllocateVideoBuffers() { - } - - bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - // unmap buffers - for (int i = 0; i < _buffersAllocatedByDevice; i++) - munmap(_pool[i].start, _pool[i].length); -@@ -405,12 +400,10 @@ bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { - } - - bool VideoCaptureModuleV4L2::CaptureStarted() { -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - return _captureStarted; - } - - bool VideoCaptureModuleV4L2::CaptureProcess() { -- RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - - int retVal = 0; - struct pollfd rSet; -diff --git a/modules/video_capture/linux/video_capture_v4l2.h b/modules/video_capture/linux/video_capture_v4l2.h -index 0191e41876..61358d0325 100644 ---- a/modules/video_capture/linux/video_capture_v4l2.h -+++ b/modules/video_capture/linux/video_capture_v4l2.h -@@ -45,12 +45,11 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl { - Mutex capture_lock_ RTC_ACQUIRED_BEFORE(api_lock_); - bool quit_ RTC_GUARDED_BY(capture_lock_); - int32_t _deviceId RTC_GUARDED_BY(api_checker_); -- int32_t _deviceFd RTC_GUARDED_BY(capture_checker_); -+ int32_t _deviceFd; - - int32_t _buffersAllocatedByDevice RTC_GUARDED_BY(capture_lock_); -- VideoCaptureCapability configured_capability_ -- RTC_GUARDED_BY(capture_checker_); -- bool _captureStarted RTC_GUARDED_BY(capture_checker_); -+ VideoCaptureCapability configured_capability_; -+ bool _captureStarted; - struct Buffer { - void* start; - size_t length; --- -2.37.3 - diff --git a/third_party/libwebrtc/moz-patch-stack/0e9556a90c.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/0e9556a90c.no-op-cherry-pick-msg deleted file mode 100644 index 331d5d0fdfc7..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/0e9556a90c.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We cherry-picked this in bug 1817263 diff --git a/third_party/libwebrtc/moz-patch-stack/1d835705c9.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/1d835705c9.no-op-cherry-pick-msg new file mode 100644 index 000000000000..d494932f1e65 --- /dev/null +++ b/third_party/libwebrtc/moz-patch-stack/1d835705c9.no-op-cherry-pick-msg @@ -0,0 +1 @@ +We cherry-picked this in bug 1857862 diff --git a/third_party/libwebrtc/moz-patch-stack/299cdc9057.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/299cdc9057.no-op-cherry-pick-msg deleted file mode 100644 index 0bf09898fc1c..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/299cdc9057.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We already cherry-picked this when we vendored 8fbabf340d. diff --git a/third_party/libwebrtc/moz-patch-stack/666d707450.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/666d707450.no-op-cherry-pick-msg deleted file mode 100644 index 843f6c0696f4..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/666d707450.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We cherry-picked this in bug 1840150. diff --git a/third_party/libwebrtc/moz-patch-stack/8fcc6df79d.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/8fcc6df79d.no-op-cherry-pick-msg deleted file mode 100644 index 31f37745a0ed..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/8fcc6df79d.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We cherry-picked this in bug 1841851 diff --git a/third_party/libwebrtc/moz-patch-stack/9d8fb97b3c.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/9d8fb97b3c.no-op-cherry-pick-msg new file mode 100644 index 000000000000..7c20bc90c6be --- /dev/null +++ b/third_party/libwebrtc/moz-patch-stack/9d8fb97b3c.no-op-cherry-pick-msg @@ -0,0 +1 @@ +We already cherry-picked this when we vendored 402f60c2ea. diff --git a/third_party/libwebrtc/moz-patch-stack/c0ed83eac2.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/c0ed83eac2.no-op-cherry-pick-msg deleted file mode 100644 index a1aaaa0425c5..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/c0ed83eac2.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We already cherry-picked this when we vendored 2abe6e2214. diff --git a/third_party/libwebrtc/moz-patch-stack/e21745a78b.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/e21745a78b.no-op-cherry-pick-msg deleted file mode 100644 index 9750b3ecd3cc..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/e21745a78b.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We cherry-picked this in bug 1839829 diff --git a/third_party/libwebrtc/moz-patch-stack/eec1810760.no-op-cherry-pick-msg b/third_party/libwebrtc/moz-patch-stack/eec1810760.no-op-cherry-pick-msg deleted file mode 100644 index e6b9a0b40e7f..000000000000 --- a/third_party/libwebrtc/moz-patch-stack/eec1810760.no-op-cherry-pick-msg +++ /dev/null @@ -1 +0,0 @@ -We already cherry-picked this when we vendored 43670de877. diff --git a/third_party/libwebrtc/moz.build b/third_party/libwebrtc/moz.build index aa47ddba57a5..f528cb110818 100644 --- a/third_party/libwebrtc/moz.build +++ b/third_party/libwebrtc/moz.build @@ -150,7 +150,6 @@ DIRS += [ "/third_party/libwebrtc/logging/rtc_event_video_gn", "/third_party/libwebrtc/logging/rtc_stream_config_gn", "/third_party/libwebrtc/media/codec_gn", - "/third_party/libwebrtc/media/delayable_gn", "/third_party/libwebrtc/media/media_channel_gn", "/third_party/libwebrtc/media/media_channel_impl_gn", "/third_party/libwebrtc/media/media_constants_gn", @@ -311,6 +310,7 @@ DIRS += [ "/third_party/libwebrtc/rtc_base/async_resolver_interface_gn", "/third_party/libwebrtc/rtc_base/audio_format_to_string_gn", "/third_party/libwebrtc/rtc_base/bit_buffer_gn", + "/third_party/libwebrtc/rtc_base/bitrate_tracker_gn", "/third_party/libwebrtc/rtc_base/bitstream_reader_gn", "/third_party/libwebrtc/rtc_base/buffer_gn", "/third_party/libwebrtc/rtc_base/byte_buffer_gn", @@ -340,6 +340,7 @@ DIRS += [ "/third_party/libwebrtc/rtc_base/experiments/rate_control_settings_gn", "/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_gn", "/third_party/libwebrtc/rtc_base/experiments/stable_target_rate_experiment_gn", + "/third_party/libwebrtc/rtc_base/frequency_tracker_gn", "/third_party/libwebrtc/rtc_base/gtest_prod_gn", "/third_party/libwebrtc/rtc_base/histogram_percentile_counter_gn", "/third_party/libwebrtc/rtc_base/ignore_wundef_gn", diff --git a/third_party/libwebrtc/p2p/BUILD.gn b/third_party/libwebrtc/p2p/BUILD.gn index 105536b7c0ac..529413c439e0 100644 --- a/third_party/libwebrtc/p2p/BUILD.gn +++ b/third_party/libwebrtc/p2p/BUILD.gn @@ -67,6 +67,8 @@ rtc_library("rtc_p2p") { "base/pseudo_tcp.h", "base/regathering_controller.cc", "base/regathering_controller.h", + "base/stun_dictionary.cc", + "base/stun_dictionary.h", "base/stun_port.cc", "base/stun_port.h", "base/stun_request.cc", @@ -202,6 +204,7 @@ if (rtc_include_tests) { deps = [ ":rtc_p2p", "../rtc_base:net_helpers", + "../rtc_base:net_test_helpers", "../rtc_base:task_queue_for_test", "../rtc_base:threading", "../rtc_base/memory:always_valid_pointer", @@ -271,6 +274,7 @@ if (rtc_include_tests) { "base/port_unittest.cc", "base/pseudo_tcp_unittest.cc", "base/regathering_controller_unittest.cc", + "base/stun_dictionary_unittest.cc", "base/stun_port_unittest.cc", "base/stun_request_unittest.cc", "base/stun_server_unittest.cc", @@ -312,6 +316,7 @@ if (rtc_include_tests) { "../rtc_base:mdns_responder_interface", "../rtc_base:net_helper", "../rtc_base:net_helpers", + "../rtc_base:net_test_helpers", "../rtc_base:network", "../rtc_base:network_constants", "../rtc_base:proxy_info", diff --git a/third_party/libwebrtc/p2p/base/basic_ice_controller.cc b/third_party/libwebrtc/p2p/base/basic_ice_controller.cc index 55f187cb9afd..a0ff4cf144aa 100644 --- a/third_party/libwebrtc/p2p/base/basic_ice_controller.cc +++ b/third_party/libwebrtc/p2p/base/basic_ice_controller.cc @@ -540,7 +540,8 @@ BasicIceController::SortAndSwitchConnection(IceSwitchReason reason) { }); RTC_LOG(LS_VERBOSE) << "Sorting " << connections_.size() - << " available connections"; + << " available connections due to: " + << IceSwitchReasonToString(reason); for (size_t i = 0; i < connections_.size(); ++i) { RTC_LOG(LS_VERBOSE) << connections_[i]->ToString(); } diff --git a/third_party/libwebrtc/p2p/base/connection.cc b/third_party/libwebrtc/p2p/base/connection.cc index c5e6993c8745..1ef42cc76f71 100644 --- a/third_party/libwebrtc/p2p/base/connection.cc +++ b/third_party/libwebrtc/p2p/base/connection.cc @@ -704,6 +704,28 @@ void Connection::SendStunBindingResponse(const StunMessage* message) { } } + const StunByteStringAttribute* delta = + message->GetByteString(STUN_ATTR_GOOG_DELTA); + if (delta) { + if (field_trials_->answer_goog_delta && goog_delta_consumer_) { + auto ack = (*goog_delta_consumer_)(delta); + if (ack) { + RTC_LOG(LS_INFO) << "Sending GOOG_DELTA_ACK" + << " delta len: " << delta->length(); + response.AddAttribute(std::move(ack)); + } else { + RTC_LOG(LS_ERROR) << "GOOG_DELTA consumer did not return ack!"; + } + } else { + RTC_LOG(LS_WARNING) << "Ignore GOOG_DELTA" + << " len: " << delta->length() + << " answer_goog_delta = " + << field_trials_->answer_goog_delta + << " goog_delta_consumer_ = " + << goog_delta_consumer_.has_value(); + } + } + response.AddMessageIntegrity(local_candidate().password()); response.AddFingerprint(); @@ -933,7 +955,8 @@ int64_t Connection::last_ping_sent() const { return last_ping_sent_; } -void Connection::Ping(int64_t now) { +void Connection::Ping(int64_t now, + std::unique_ptr delta) { RTC_DCHECK_RUN_ON(network_thread_); if (!port_) return; @@ -948,10 +971,11 @@ void Connection::Ping(int64_t now) { nomination = nomination_; } - auto req = - std::make_unique(requests_, this, BuildPingRequest()); + bool has_delta = delta != nullptr; + auto req = std::make_unique( + requests_, this, BuildPingRequest(std::move(delta))); - if (ShouldSendGoogPing(req->msg())) { + if (!has_delta && ShouldSendGoogPing(req->msg())) { auto message = std::make_unique(GOOG_PING_REQUEST, req->id()); message->AddMessageIntegrity32(remote_candidate_.password()); req.reset(new ConnectionRequest(requests_, this, std::move(message))); @@ -966,7 +990,8 @@ void Connection::Ping(int64_t now) { num_pings_sent_++; } -std::unique_ptr Connection::BuildPingRequest() { +std::unique_ptr Connection::BuildPingRequest( + std::unique_ptr delta) { auto message = std::make_unique(STUN_BINDING_REQUEST); // Note that the order of attributes does not impact the parsing on the // receiver side. The attribute is retrieved then by iterating and matching @@ -1022,6 +1047,13 @@ std::unique_ptr Connection::BuildPingRequest() { list->AddTypeAtIndex(kSupportGoogPingVersionRequestIndex, kGoogPingVersion); message->AddAttribute(std::move(list)); } + + if (delta) { + RTC_DCHECK(delta->type() == STUN_ATTR_GOOG_DELTA); + RTC_LOG(LS_INFO) << "Sending GOOG_DELTA: len: " << delta->length(); + message->AddAttribute(std::move(delta)); + } + message->AddMessageIntegrity(remote_candidate_.password()); message->AddFingerprint(); @@ -1393,6 +1425,34 @@ void Connection::OnConnectionRequestResponse(StunRequest* request, cached_stun_binding_ = request->msg()->Clone(); } } + + // Did we send a delta ? + const bool sent_goog_delta = + request->msg()->GetByteString(STUN_ATTR_GOOG_DELTA) != nullptr; + // Did we get a GOOG_DELTA_ACK ? + const StunUInt64Attribute* delta_ack = + response->GetUInt64(STUN_ATTR_GOOG_DELTA_ACK); + + if (goog_delta_ack_consumer_) { + if (sent_goog_delta && delta_ack) { + RTC_LOG(LS_VERBOSE) << "Got GOOG_DELTA_ACK len: " << delta_ack->length(); + (*goog_delta_ack_consumer_)(delta_ack); + } else if (sent_goog_delta) { + // We sent DELTA but did not get a DELTA_ACK. + // This means that remote does not support GOOG_DELTA + RTC_LOG(LS_INFO) << "NO DELTA ACK => disable GOOG_DELTA"; + (*goog_delta_ack_consumer_)( + webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION)); + } else if (delta_ack) { + // We did NOT send DELTA but got a DELTA_ACK. + // That is internal error. + RTC_LOG(LS_ERROR) << "DELTA ACK w/o DELTA => disable GOOG_DELTA"; + (*goog_delta_ack_consumer_)( + webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + } + } else if (delta_ack) { + RTC_LOG(LS_ERROR) << "Discard GOOG_DELTA_ACK, no consumer"; + } } void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request, diff --git a/third_party/libwebrtc/p2p/base/connection.h b/third_party/libwebrtc/p2p/base/connection.h index 4e6c7d91bee8..8e439855fad9 100644 --- a/third_party/libwebrtc/p2p/base/connection.h +++ b/third_party/libwebrtc/p2p/base/connection.h @@ -13,6 +13,7 @@ #include #include +#include #include #include "absl/strings/string_view.h" @@ -205,12 +206,15 @@ class RTC_EXPORT Connection : public CandidatePairInterface { // Called when this connection should try checking writability again. int64_t last_ping_sent() const; - void Ping(int64_t now); + void Ping(int64_t now, + std::unique_ptr delta = nullptr); void ReceivedPingResponse( int rtt, absl::string_view request_id, const absl::optional& nomination = absl::nullopt); - std::unique_ptr BuildPingRequest() RTC_RUN_ON(network_thread_); + std::unique_ptr BuildPingRequest( + std::unique_ptr delta) + RTC_RUN_ON(network_thread_); int64_t last_ping_response_received() const; const absl::optional& last_ping_id_received() const; @@ -319,7 +323,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { std::unique_ptr BuildPingRequestForTest() { RTC_DCHECK_RUN_ON(network_thread_); - return BuildPingRequest(); + return BuildPingRequest(nullptr); } // Public for unit tests. @@ -333,6 +337,20 @@ class RTC_EXPORT Connection : public CandidatePairInterface { remote_candidate_.set_password(pwd); } + void SetStunDictConsumer( + std::function( + const StunByteStringAttribute*)> goog_delta_consumer, + std::function)> + goog_delta_ack_consumer) { + goog_delta_consumer_ = std::move(goog_delta_consumer); + goog_delta_ack_consumer_ = std::move(goog_delta_ack_consumer); + } + + void ClearStunDictConsumer() { + goog_delta_consumer_ = absl::nullopt; + goog_delta_ack_consumer_ = absl::nullopt; + } + protected: // A ConnectionRequest is a simple STUN ping used to determine writability. class ConnectionRequest; @@ -475,6 +493,13 @@ class RTC_EXPORT Connection : public CandidatePairInterface { const IceFieldTrials* field_trials_; rtc::EventBasedExponentialMovingAverage rtt_estimate_ RTC_GUARDED_BY(network_thread_); + + absl::optional( + const StunByteStringAttribute*)>> + goog_delta_consumer_; + absl::optional< + std::function)>> + goog_delta_ack_consumer_; }; // ProxyConnection defers all the interesting work to the port. diff --git a/third_party/libwebrtc/p2p/base/fake_port_allocator.h b/third_party/libwebrtc/p2p/base/fake_port_allocator.h index 05c631361f21..20f6780b5836 100644 --- a/third_party/libwebrtc/p2p/base/fake_port_allocator.h +++ b/third_party/libwebrtc/p2p/base/fake_port_allocator.h @@ -22,6 +22,7 @@ #include "p2p/base/udp_port.h" #include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread.h" diff --git a/third_party/libwebrtc/p2p/base/ice_transport_internal.h b/third_party/libwebrtc/p2p/base/ice_transport_internal.h index 55f12382aabd..98deb492b0d9 100644 --- a/third_party/libwebrtc/p2p/base/ice_transport_internal.h +++ b/third_party/libwebrtc/p2p/base/ice_transport_internal.h @@ -14,6 +14,7 @@ #include #include +#include #include #include "absl/strings/string_view.h" @@ -24,6 +25,7 @@ #include "p2p/base/connection.h" #include "p2p/base/packet_transport_internal.h" #include "p2p/base/port.h" +#include "p2p/base/stun_dictionary.h" #include "p2p/base/transport_description.h" #include "rtc_base/network_constants.h" #include "rtc_base/system/rtc_export.h" @@ -293,6 +295,11 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { virtual absl::optional GetSelectedCandidatePair() const = 0; + virtual absl::optional> + GetDictionaryWriter() { + return absl::nullopt; + } + sigslot::signal1 SignalGatheringState; // Handles sending and receiving of candidates. @@ -330,6 +337,37 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { // Invoked when the transport is being destroyed. sigslot::signal1 SignalDestroyed; + + // Invoked when remote dictionary has been updated, + // i.e. modifications to attributes from remote ice agent has + // reflected in our StunDictionaryView. + template + void AddDictionaryViewUpdatedCallback(const void* tag, F&& callback) { + dictionary_view_updated_callback_list_.AddReceiver( + tag, std::forward(callback)); + } + void RemoveDictionaryViewUpdatedCallback(const void* tag) { + dictionary_view_updated_callback_list_.RemoveReceivers(tag); + } + + // Invoked when local dictionary has been synchronized, + // i.e. remote ice agent has reported acknowledged updates from us. + template + void AddDictionaryWriterSyncedCallback(const void* tag, F&& callback) { + dictionary_writer_synced_callback_list_.AddReceiver( + tag, std::forward(callback)); + } + void RemoveDictionaryWriterSyncedCallback(const void* tag) { + dictionary_writer_synced_callback_list_.RemoveReceivers(tag); + } + + protected: + webrtc::CallbackList> + dictionary_view_updated_callback_list_; + webrtc::CallbackList + dictionary_writer_synced_callback_list_; }; } // namespace cricket diff --git a/third_party/libwebrtc/p2p/base/p2p_transport_channel.cc b/third_party/libwebrtc/p2p/base/p2p_transport_channel.cc index af4b80093037..d916983fe26f 100644 --- a/third_party/libwebrtc/p2p/base/p2p_transport_channel.cc +++ b/third_party/libwebrtc/p2p/base/p2p_transport_channel.cc @@ -286,6 +286,13 @@ void P2PTransportChannel::AddConnection(Connection* connection) { connection->set_ice_event_log(&ice_event_log_); connection->SetIceFieldTrials(&ice_field_trials_); + connection->SetStunDictConsumer( + [this](const StunByteStringAttribute* delta) { + return GoogDeltaReceived(delta); + }, + [this](webrtc::RTCErrorOr delta_ack) { + GoogDeltaAckReceived(std::move(delta_ack)); + }); LogCandidatePairConfig(connection, webrtc::IceCandidatePairConfigType::kAdded); @@ -727,7 +734,10 @@ void P2PTransportChannel::ParseFieldTrials( &ice_field_trials_.dead_connection_timeout_ms, // Stop gathering on strongly connected. "stop_gather_on_strongly_connected", - &ice_field_trials_.stop_gather_on_strongly_connected) + &ice_field_trials_.stop_gather_on_strongly_connected, + // GOOG_DELTA + "enable_goog_delta", &ice_field_trials_.enable_goog_delta, + "answer_goog_delta", &ice_field_trials_.answer_goog_delta) ->Parse(field_trials->Lookup("WebRTC-IceFieldTrials")); if (ice_field_trials_.dead_connection_timeout_ms < 30000) { @@ -782,6 +792,10 @@ void P2PTransportChannel::ParseFieldTrials( ice_field_trials_.extra_ice_ping = field_trials->IsEnabled("WebRTC-ExtraICEPing"); + + if (!ice_field_trials_.enable_goog_delta) { + stun_dict_writer_.Disable(); + } } const IceConfig& P2PTransportChannel::config() const { @@ -2058,7 +2072,7 @@ void P2PTransportChannel::PingConnection(Connection* conn) { conn->set_nomination(nomination); conn->set_use_candidate_attr(use_candidate_attr); last_ping_sent_ms_ = rtc::TimeMillis(); - conn->Ping(last_ping_sent_ms_); + conn->Ping(last_ping_sent_ms_, stun_dict_writer_.CreateDelta()); } uint32_t P2PTransportChannel::GetNominationAttr(Connection* conn) const { @@ -2129,11 +2143,12 @@ void P2PTransportChannel::OnConnectionDestroyed(Connection* connection) { } } -void P2PTransportChannel::RemoveConnection(const Connection* connection) { +void P2PTransportChannel::RemoveConnection(Connection* connection) { RTC_DCHECK_RUN_ON(network_thread_); auto it = absl::c_find(connections_, connection); RTC_DCHECK(it != connections_.end()); connections_.erase(it); + connection->ClearStunDictConsumer(); ice_controller_->OnConnectionDestroyed(connection); } @@ -2307,4 +2322,34 @@ void P2PTransportChannel::LogCandidatePairConfig( conn->ToLogDescription()); } +std::unique_ptr P2PTransportChannel::GoogDeltaReceived( + const StunByteStringAttribute* delta) { + auto error = stun_dict_view_.ApplyDelta(*delta); + if (error.ok()) { + auto& result = error.value(); + RTC_LOG(LS_INFO) << "Applied GOOG_DELTA"; + dictionary_view_updated_callback_list_.Send(this, stun_dict_view_, + result.second); + return std::move(result.first); + } else { + RTC_LOG(LS_ERROR) << "Failed to apply GOOG_DELTA: " + << error.error().message(); + } + return nullptr; +} + +void P2PTransportChannel::GoogDeltaAckReceived( + webrtc::RTCErrorOr error_or_ack) { + if (error_or_ack.ok()) { + RTC_LOG(LS_ERROR) << "Applied GOOG_DELTA_ACK"; + auto ack = error_or_ack.value(); + stun_dict_writer_.ApplyDeltaAck(*ack); + dictionary_writer_synced_callback_list_.Send(this, stun_dict_writer_); + } else { + stun_dict_writer_.Disable(); + RTC_LOG(LS_ERROR) << "Failed GOOG_DELTA_ACK: " + << error_or_ack.error().message(); + } +} + } // namespace cricket diff --git a/third_party/libwebrtc/p2p/base/p2p_transport_channel.h b/third_party/libwebrtc/p2p/base/p2p_transport_channel.h index a0729c163d36..dc27b202c269 100644 --- a/third_party/libwebrtc/p2p/base/p2p_transport_channel.h +++ b/third_party/libwebrtc/p2p/base/p2p_transport_channel.h @@ -61,6 +61,7 @@ #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" #include "p2p/base/regathering_controller.h" +#include "p2p/base/stun_dictionary.h" #include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" @@ -216,7 +217,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, int check_receiving_interval() const; absl::optional network_route() const override; - void RemoveConnection(const Connection* connection); + void RemoveConnection(Connection* connection); // Helper method used only in unittest. rtc::DiffServCodePoint DefaultDscpValue() const; @@ -254,6 +255,11 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, return ss.Release(); } + absl::optional> + GetDictionaryWriter() override { + return stun_dict_writer_; + } + private: P2PTransportChannel( absl::string_view transport_name, @@ -494,6 +500,10 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, Candidate candidate, const webrtc::AsyncDnsResolverResult& result); + std::unique_ptr GoogDeltaReceived( + const StunByteStringAttribute*); + void GoogDeltaAckReceived(webrtc::RTCErrorOr); + // Bytes/packets sent/received on this channel. uint64_t bytes_sent_ = 0; uint64_t bytes_received_ = 0; @@ -509,6 +519,12 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, // Parsed field trials. IceFieldTrials ice_field_trials_; + + // A dictionary of attributes that will be reflected to peer. + StunDictionaryWriter stun_dict_writer_; + + // A dictionary that tracks attributes from peer. + StunDictionaryView stun_dict_view_; }; } // namespace cricket diff --git a/third_party/libwebrtc/p2p/base/p2p_transport_channel_ice_field_trials.h b/third_party/libwebrtc/p2p/base/p2p_transport_channel_ice_field_trials.h index f19823b21eb4..96a7756484ee 100644 --- a/third_party/libwebrtc/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/third_party/libwebrtc/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -70,6 +70,10 @@ struct IceFieldTrials { bool piggyback_ice_check_acknowledgement = false; bool extra_ice_ping = false; + + // Announce/enable GOOG_DELTA + bool enable_goog_delta = true; // send GOOG DELTA + bool answer_goog_delta = true; // answer GOOG DELTA }; } // namespace cricket diff --git a/third_party/libwebrtc/p2p/base/p2p_transport_channel_unittest.cc b/third_party/libwebrtc/p2p/base/p2p_transport_channel_unittest.cc index 02cc483d312a..ca8ca8de5559 100644 --- a/third_party/libwebrtc/p2p/base/p2p_transport_channel_unittest.cc +++ b/third_party/libwebrtc/p2p/base/p2p_transport_channel_unittest.cc @@ -65,6 +65,7 @@ using ::testing::DoAll; using ::testing::InSequence; using ::testing::InvokeArgument; using ::testing::InvokeWithoutArgs; +using ::testing::MockFunction; using ::testing::Return; using ::testing::ReturnRef; using ::testing::SaveArg; @@ -3417,6 +3418,38 @@ TEST_F(P2PTransportChannelMultihomedTest, TestVpnOnlyVpn) { kDefaultTimeout, clock); } +TEST_F(P2PTransportChannelMultihomedTest, StunDictionaryPerformsSync) { + rtc::ScopedFakeClock clock; + AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN, + rtc::ADAPTER_TYPE_ETHERNET); + AddAddress(1, kPublicAddrs[1]); + + // Create channels and let them go writable, as usual. + CreateChannels(); + + MockFunction)> + view_updated_func; + ep2_ch1()->AddDictionaryViewUpdatedCallback( + "tag", view_updated_func.AsStdFunction()); + MockFunction + writer_synced_func; + ep1_ch1()->AddDictionaryWriterSyncedCallback( + "tag", writer_synced_func.AsStdFunction()); + auto& dict_writer = ep1_ch1()->GetDictionaryWriter()->get(); + dict_writer.SetByteString(12)->CopyBytes("keso"); + EXPECT_CALL(view_updated_func, Call) + .WillOnce([&](auto* channel, auto& view, auto keys) { + EXPECT_EQ(keys.size(), 1u); + EXPECT_EQ(keys[0], 12); + EXPECT_EQ(view.GetByteString(12)->string_view(), "keso"); + }); + EXPECT_CALL(writer_synced_func, Call).Times(1); + EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), + kMediumTimeout, clock); +} + // A collection of tests which tests a single P2PTransportChannel by sending // pings. class P2PTransportChannelPingTest : public ::testing::Test, diff --git a/third_party/libwebrtc/p2p/base/port_unittest.cc b/third_party/libwebrtc/p2p/base/port_unittest.cc index 1b1c15613635..b27afe2f3932 100644 --- a/third_party/libwebrtc/p2p/base/port_unittest.cc +++ b/third_party/libwebrtc/p2p/base/port_unittest.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/candidate.h" @@ -3833,7 +3834,6 @@ class ConnectionTest : public PortTest { void OnConnectionStateChange(Connection* connection) { num_state_changes_++; } - private: std::unique_ptr lport_; std::unique_ptr rport_; }; @@ -3922,4 +3922,93 @@ TEST_F(ConnectionTest, ConnectionForgetLearnedStateDoesNotTriggerStateChange) { EXPECT_EQ(num_state_changes_, 2); } +// Test normal happy case. +// Sending a delta and getting a delta ack in response. +TEST_F(ConnectionTest, SendReceiveGoogDelta) { + constexpr int64_t ms = 10; + Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + + std::unique_ptr delta = + absl::WrapUnique(new StunByteStringAttribute(STUN_ATTR_GOOG_DELTA)); + delta->CopyBytes("DELTA"); + + std::unique_ptr delta_ack = + absl::WrapUnique(new StunUInt64Attribute(STUN_ATTR_GOOG_DELTA_ACK, 133)); + + bool received_goog_delta = false; + bool received_goog_delta_ack = false; + lconn->SetStunDictConsumer( + // DeltaReceived + [](const StunByteStringAttribute* delta) + -> std::unique_ptr { return nullptr; }, + // DeltaAckReceived + [&](webrtc::RTCErrorOr error_or_ack) { + received_goog_delta_ack = true; + EXPECT_TRUE(error_or_ack.ok()); + EXPECT_EQ(error_or_ack.value()->value(), 133ull); + }); + + rconn->SetStunDictConsumer( + // DeltaReceived + [&](const StunByteStringAttribute* delta) + -> std::unique_ptr { + received_goog_delta = true; + EXPECT_EQ(delta->string_view(), "DELTA"); + return std::move(delta_ack); + }, + // DeltaAckReceived + [](webrtc::RTCErrorOr error_or__ack) {}); + + lconn->Ping(rtc::TimeMillis(), std::move(delta)); + ASSERT_TRUE_WAIT(lport_->last_stun_msg(), kDefaultTimeout); + ASSERT_TRUE(lport_->last_stun_buf()); + rconn->OnReadPacket(lport_->last_stun_buf()->data(), + lport_->last_stun_buf()->size(), /* packet_time_us */ -1); + EXPECT_TRUE(received_goog_delta); + + clock_.AdvanceTime(webrtc::TimeDelta::Millis(ms)); + ASSERT_TRUE_WAIT(rport_->last_stun_msg(), kDefaultTimeout); + ASSERT_TRUE(rport_->last_stun_buf()); + lconn->OnReadPacket(rport_->last_stun_buf()->data(), + rport_->last_stun_buf()->size(), /* packet_time_us */ -1); + EXPECT_TRUE(received_goog_delta_ack); +} + +// Test that sending a goog delta and not getting +// a delta ack in reply gives an error callback. +TEST_F(ConnectionTest, SendGoogDeltaNoReply) { + constexpr int64_t ms = 10; + Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + + std::unique_ptr delta = + absl::WrapUnique(new StunByteStringAttribute(STUN_ATTR_GOOG_DELTA)); + delta->CopyBytes("DELTA"); + + bool received_goog_delta_ack_error = false; + lconn->SetStunDictConsumer( + // DeltaReceived + [](const StunByteStringAttribute* delta) + -> std::unique_ptr { return nullptr; }, + // DeltaAckReceived + [&](webrtc::RTCErrorOr error_or_ack) { + received_goog_delta_ack_error = true; + EXPECT_FALSE(error_or_ack.ok()); + }); + + lconn->Ping(rtc::TimeMillis(), std::move(delta)); + ASSERT_TRUE_WAIT(lport_->last_stun_msg(), kDefaultTimeout); + ASSERT_TRUE(lport_->last_stun_buf()); + rconn->OnReadPacket(lport_->last_stun_buf()->data(), + lport_->last_stun_buf()->size(), /* packet_time_us */ -1); + + clock_.AdvanceTime(webrtc::TimeDelta::Millis(ms)); + ASSERT_TRUE_WAIT(rport_->last_stun_msg(), kDefaultTimeout); + ASSERT_TRUE(rport_->last_stun_buf()); + lconn->OnReadPacket(rport_->last_stun_buf()->data(), + rport_->last_stun_buf()->size(), /* packet_time_us */ -1); + EXPECT_TRUE(received_goog_delta_ack_error); +} + } // namespace cricket diff --git a/third_party/libwebrtc/p2p/base/stun_dictionary.cc b/third_party/libwebrtc/p2p/base/stun_dictionary.cc new file mode 100644 index 000000000000..bf6a1e49c2a7 --- /dev/null +++ b/third_party/libwebrtc/p2p/base/stun_dictionary.cc @@ -0,0 +1,357 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/base/stun_dictionary.h" + +#include +#include +#include + +#include "rtc_base/logging.h" + +namespace cricket { + +const StunAddressAttribute* StunDictionaryView::GetAddress(int key) const { + const StunAttribute* attr = GetOrNull(key, STUN_VALUE_ADDRESS); + if (attr == nullptr) { + return nullptr; + } + return reinterpret_cast(attr); +} + +const StunUInt32Attribute* StunDictionaryView::GetUInt32(int key) const { + const StunAttribute* attr = GetOrNull(key, STUN_VALUE_UINT32); + if (attr == nullptr) { + return nullptr; + } + return reinterpret_cast(attr); +} + +const StunUInt64Attribute* StunDictionaryView::GetUInt64(int key) const { + const StunAttribute* attr = GetOrNull(key, STUN_VALUE_UINT64); + if (attr == nullptr) { + return nullptr; + } + return reinterpret_cast(attr); +} + +const StunByteStringAttribute* StunDictionaryView::GetByteString( + int key) const { + const StunAttribute* attr = GetOrNull(key, STUN_VALUE_BYTE_STRING); + if (attr == nullptr) { + return nullptr; + } + return reinterpret_cast(attr); +} + +const StunUInt16ListAttribute* StunDictionaryView::GetUInt16List( + int key) const { + const StunAttribute* attr = GetOrNull(key, STUN_VALUE_UINT16_LIST); + if (attr == nullptr) { + return nullptr; + } + return reinterpret_cast(attr); +} + +const StunAttribute* StunDictionaryView::GetOrNull( + int key, + absl::optional type) const { + const auto it = attrs_.find(key); + if (it == attrs_.end()) { + return nullptr; + } + + if (type && it->second->value_type() != *type) { + RTC_LOG(LS_WARNING) << "Get key: " << key << " with type: " << *type + << " found different type: " + << it->second->value_type(); + return nullptr; + } + return (*it).second.get(); +} + +webrtc::RTCErrorOr< + std::pair>>> +StunDictionaryView::ParseDelta(const StunByteStringAttribute& delta) { + rtc::ByteBufferReader buf(delta.bytes(), delta.length()); + uint16_t magic; + if (!buf.ReadUInt16(&magic)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to read magic number"); + } + if (magic != kDeltaMagic) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Invalid magic number"); + } + + uint16_t delta_version; + if (!buf.ReadUInt16(&delta_version)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to read version"); + } + + if (delta_version != kDeltaVersion) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Unsupported delta version"); + } + + // Now read all the attributes + std::deque> attrs; + while (buf.Length()) { + uint16_t key, length, value_type; + if (!buf.ReadUInt16(&key)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to read attribute key"); + } + if (!buf.ReadUInt16(&length)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to read attribute length"); + } + if (!buf.ReadUInt16(&value_type)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to read value type"); + } + + StunAttributeValueType value_type_enum = + static_cast(value_type); + std::unique_ptr attr( + StunAttribute::Create(value_type_enum, key, length, nullptr)); + if (!attr) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to create attribute"); + } + if (attr->length() != length) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Inconsistent attribute length"); + } + if (!attr->Read(&buf)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to read attribute content"); + } + attrs.push_back(std::move(attr)); + } + + // The first attribute should be the version... + if (attrs.empty()) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Empty delta!"); + } + + if (attrs[0]->type() != kVersionKey || + attrs[0]->value_type() != STUN_VALUE_UINT64) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Missing version!"); + } + + uint64_t version_in_delta = + reinterpret_cast(attrs[0].get())->value(); + attrs.pop_front(); + + return std::make_pair(std::max(version_in_delta, version_in_delta), + std::move(attrs)); +} + +// Apply a delta return an StunUInt64Attribute to ack the update. +webrtc::RTCErrorOr< + std::pair, std::vector>> +StunDictionaryView::ApplyDelta(const StunByteStringAttribute& delta) { + auto parsed_delta = ParseDelta(delta); + if (!parsed_delta.ok()) { + return webrtc::RTCError(parsed_delta.error()); + } + + uint64_t version_in_delta = parsed_delta.value().first; + + // Check that update does not overflow max_bytes_stored_. + int new_bytes_stored = bytes_stored_; + for (auto& attr : parsed_delta.value().second) { + auto old_version = version_per_key_.find(attr->type()); + if (old_version == version_per_key_.end() || + version_in_delta > old_version->second) { + size_t new_length = attr->length(); + size_t old_length = GetLength(attr->type()); + if (old_version == version_per_key_.end()) { + new_length += sizeof(int64_t); + } + + new_bytes_stored = new_bytes_stored + new_length - old_length; + if (new_bytes_stored <= 0) { + RTC_LOG(LS_WARNING) + << "attr: " << attr->type() << " old_length: " << old_length + << " new_length: " << new_length + << " bytes_stored_: " << bytes_stored_ + << " new_bytes_stored: " << new_bytes_stored; + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER); + } + if (new_bytes_stored > max_bytes_stored_) { + RTC_LOG(LS_INFO) << "attr: " << attr->type() + << " old_length: " << old_length + << " new_length: " << new_length + << " bytes_stored_: " << bytes_stored_ + << " new_bytes_stored: " << new_bytes_stored; + } + } + } + if (new_bytes_stored > max_bytes_stored_) { + RTC_LOG(LS_INFO) << " bytes_stored_: " << bytes_stored_ + << " new_bytes_stored: " << new_bytes_stored; + return webrtc::RTCError(webrtc::RTCErrorType::RESOURCE_EXHAUSTED); + } + + // Apply the update. + std::vector keys; + for (auto& attr : parsed_delta.value().second) { + if (version_in_delta > version_per_key_[attr->type()]) { + version_per_key_[attr->type()] = version_in_delta; + keys.push_back(attr->type()); + if (attr->value_type() == STUN_VALUE_BYTE_STRING && attr->length() == 0) { + attrs_.erase(attr->type()); + } else { + attrs_[attr->type()] = std::move(attr); + } + } + } + bytes_stored_ = new_bytes_stored; + + return std::make_pair(std::make_unique( + STUN_ATTR_GOOG_DELTA_ACK, version_in_delta), + std::move(keys)); +} + +size_t StunDictionaryView::GetLength(int key) const { + auto attr = GetOrNull(key); + if (attr != nullptr) { + return attr->length(); + } + return 0; +} + +void StunDictionaryWriter::Disable() { + disabled_ = true; +} + +void StunDictionaryWriter::Delete(int key) { + if (disabled_) { + return; + } + + if (dictionary_) { + if (dictionary_->attrs_.find(key) == dictionary_->attrs_.end()) { + return; + } + } + + // remove any pending updates. + pending_.erase( + std::remove_if(pending_.begin(), pending_.end(), + [key](const auto& p) { return p.second->type() == key; }), + pending_.end()); + + // Create tombstone. + auto tombstone = std::make_unique(key); + + // add a pending entry. + pending_.push_back(std::make_pair(++version_, tombstone.get())); + + // store the tombstone. + tombstones_[key] = std::move(tombstone); + + if (dictionary_) { + // remove value + dictionary_->attrs_.erase(key); + } +} + +void StunDictionaryWriter::Set(std::unique_ptr attr) { + if (disabled_) { + return; + } + int key = attr->type(); + // remove any pending updates. + pending_.erase( + std::remove_if(pending_.begin(), pending_.end(), + [key](const auto& p) { return p.second->type() == key; }), + pending_.end()); + + // remove any existing key. + tombstones_.erase(key); + + // create pending entry. + pending_.push_back(std::make_pair(++version_, attr.get())); + + if (dictionary_) { + // store attribute. + dictionary_->attrs_[key] = std::move(attr); + } +} + +// Create an StunByteStringAttribute containing the pending (e.g not ack:ed) +// modifications. +std::unique_ptr StunDictionaryWriter::CreateDelta() { + if (disabled_) { + return nullptr; + } + if (pending_.empty()) { + return nullptr; + } + + rtc::ByteBufferWriter buf; + buf.WriteUInt16(StunDictionaryView::kDeltaMagic); // 0,1 + buf.WriteUInt16(StunDictionaryView::kDeltaVersion); // 2,3 + + // max version in Delta. + buf.WriteUInt16(StunDictionaryView::kVersionKey); // 4,5 + buf.WriteUInt16(8); // 6,7 + buf.WriteUInt16(STUN_VALUE_UINT64); // 8,9 + buf.WriteUInt64(pending_.back().first); // 10-17 + // attributes + for (const auto& attr : pending_) { + buf.WriteUInt16(attr.second->type()); + buf.WriteUInt16(static_cast(attr.second->length())); + buf.WriteUInt16(attr.second->value_type()); + if (!attr.second->Write(&buf)) { + RTC_LOG(LS_ERROR) << "Failed to write key: " << attr.second->type(); + return nullptr; + } + } + return std::make_unique(STUN_ATTR_GOOG_DELTA, + buf.Data(), buf.Length()); +} + +// Apply a delta ack, i.e prune list of pending changes. +void StunDictionaryWriter::ApplyDeltaAck(const StunUInt64Attribute& ack) { + uint64_t acked_version = ack.value(); + auto entries_to_remove = std::remove_if( + pending_.begin(), pending_.end(), + [acked_version](const auto& p) { return p.first <= acked_version; }); + + // remove tombstones. + for (auto it = entries_to_remove; it != pending_.end(); ++it) { + tombstones_.erase((*it).second->type()); + } + pending_.erase(entries_to_remove, pending_.end()); +} + +// Check if a key has a pending change (i.e a change +// that has not been acked). +bool StunDictionaryWriter::Pending(int key) const { + for (const auto& attr : pending_) { + if (attr.second->type() == key) { + return true; + } + } + return false; +} + +int StunDictionaryWriter::Pending() const { + return pending_.size(); +} + +} // namespace cricket diff --git a/third_party/libwebrtc/p2p/base/stun_dictionary.h b/third_party/libwebrtc/p2p/base/stun_dictionary.h new file mode 100644 index 000000000000..f93a1f151f7a --- /dev/null +++ b/third_party/libwebrtc/p2p/base/stun_dictionary.h @@ -0,0 +1,204 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_BASE_STUN_DICTIONARY_H_ +#define P2P_BASE_STUN_DICTIONARY_H_ + +#include +#include +#include +#include +#include + +#include "api/rtc_error.h" +#include "api/transport/stun.h" + +namespace cricket { + +// A StunDictionaryView is a dictionary of StunAttributes. +// - the StunAttributes can be read using the |Get|-methods. +// - the dictionary is updated by using the |ApplyDelta|-method. +// +// A StunDictionaryWriter is used to create |delta|s for the |ApplyDelta|-method +// - It keeps track of which updates has been applied at StunDictionaryView. +// - It optionally keeps a local StunDictionaryView contains modification made +// `locally` +// +// A pair StunDictionaryView(A)/StunDictionaryWriter(B) are linked so that +// modifications to B is transfered to A using the STUN_ATTR_GOOG_DELTA +// (StunByteStringAttribute) and the modification is ack:ed using +// STUN_ATTR_GOOG_DELTA_ACK (StunUInt64Attribute). +// +// Note: +// 1) It is possible to update one StunDictionaryView from multiple writers, +// but this only works of the different writers write disjoint keys (which +// is not checked/enforced by these classes). +// 2) The opposite, one writer updating multiple StunDictionaryView, is not +// possible. +class StunDictionaryView { + public: + // A reserved key used to transport the version number + static constexpr uint16_t kVersionKey = 0xFFFF; + + // A magic number used when transporting deltas. + static constexpr uint16_t kDeltaMagic = 0x7788; + + // The version number for the delta format. + static constexpr uint16_t kDeltaVersion = 0x1; + + // Gets the desired attribute value, or NULL if no such attribute type exists. + // The pointer returned is guaranteed to be valid until ApplyDelta is called. + const StunAddressAttribute* GetAddress(int key) const; + const StunUInt32Attribute* GetUInt32(int key) const; + const StunUInt64Attribute* GetUInt64(int key) const; + const StunByteStringAttribute* GetByteString(int key) const; + const StunUInt16ListAttribute* GetUInt16List(int key) const; + + bool empty() const { return attrs_.empty(); } + size_t size() const { return attrs_.size(); } + int bytes_stored() const { return bytes_stored_; } + void set_max_bytes_stored(int max_bytes_stored) { + max_bytes_stored_ = max_bytes_stored; + } + + // Apply a delta and return + // a pair with + // - StunUInt64Attribute to ack the |delta|. + // - vector of keys that was modified. + webrtc::RTCErrorOr< + std::pair, std::vector>> + ApplyDelta(const StunByteStringAttribute& delta); + + private: + friend class StunDictionaryWriter; + + const StunAttribute* GetOrNull( + int key, + absl::optional = absl::nullopt) const; + size_t GetLength(int key) const; + static webrtc::RTCErrorOr< + std::pair>>> + ParseDelta(const StunByteStringAttribute& delta); + + std::map> attrs_; + std::map version_per_key_; + + int max_bytes_stored_ = 16384; + int bytes_stored_ = 0; +}; + +class StunDictionaryWriter { + public: + StunDictionaryWriter() { + dictionary_ = std::make_unique(); + } + explicit StunDictionaryWriter( + std::unique_ptr dictionary) { + dictionary_ = std::move(dictionary); + } + + // A pending modification. + template + class Modification { + public: + ~Modification() { commit(); } + + T* operator->() { return attr_.get(); } + + void abort() { attr_ = nullptr; } + void commit() { + if (attr_) { + writer_->Set(std::move(attr_)); + } + } + + private: + friend class StunDictionaryWriter; + Modification(StunDictionaryWriter* writer, std::unique_ptr attr) + : writer_(writer), attr_(std::move(attr)) {} + StunDictionaryWriter* writer_; + std::unique_ptr attr_; + + Modification(const Modification&) = + delete; // not copyable (but movable). + Modification& operator=(Modification&) = + delete; // not copyable (but movable). + }; + + // Record a modification. + Modification SetAddress(int key) { + return Modification( + this, StunAttribute::CreateAddress(key)); + } + Modification SetUInt32(int key) { + return Modification(this, + StunAttribute::CreateUInt32(key)); + } + Modification SetUInt64(int key) { + return Modification(this, + StunAttribute::CreateUInt64(key)); + } + Modification SetByteString(int key) { + return Modification( + this, StunAttribute::CreateByteString(key)); + } + Modification SetUInt16List(int key) { + return Modification( + this, StunAttribute::CreateUInt16ListAttribute(key)); + } + + // Delete a key. + void Delete(int key); + + // Check if a key has a pending change (i.e a change + // that has not been acked). + bool Pending(int key) const; + + // Return number of of pending modifications. + int Pending() const; + + // Create an StunByteStringAttribute containing the pending (e.g not ack:ed) + // modifications. + std::unique_ptr CreateDelta(); + + // Apply an delta ack. + void ApplyDeltaAck(const StunUInt64Attribute&); + + // Return pointer to (optional) StunDictionaryView. + const StunDictionaryView* dictionary() { return dictionary_.get(); } + const StunDictionaryView* operator->() { return dictionary_.get(); } + + // Disable writer, + // i.e CreateDelta always return null, and no modifications are made. + // This is called if remote peer does not support GOOG_DELTA. + void Disable(); + bool disabled() const { return disabled_; } + + private: + void Set(std::unique_ptr attr); + + bool disabled_ = false; + + // version of modification. + int64_t version_ = 1; + + // (optional) StunDictionaryView. + std::unique_ptr dictionary_; + + // sorted list of changes that has not been yet been ack:ed. + std::vector> pending_; + + // tombstones, i.e values that has been deleted but not yet acked. + std::map> tombstones_; +}; + +} // namespace cricket + +#endif // P2P_BASE_STUN_DICTIONARY_H_ diff --git a/third_party/libwebrtc/p2p/base/stun_dictionary_unittest.cc b/third_party/libwebrtc/p2p/base/stun_dictionary_unittest.cc new file mode 100644 index 000000000000..b6af420d786d --- /dev/null +++ b/third_party/libwebrtc/p2p/base/stun_dictionary_unittest.cc @@ -0,0 +1,337 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/base/stun_dictionary.h" + +#include + +#include "rtc_base/gunit.h" +#include "rtc_base/logging.h" +#include "test/gtest.h" + +namespace { + +void Sync(cricket::StunDictionaryView& dictionary, + cricket::StunDictionaryWriter& writer) { + int pending = writer.Pending(); + auto delta = writer.CreateDelta(); + if (delta == nullptr) { + EXPECT_EQ(pending, 0); + } else { + EXPECT_NE(pending, 0); + auto delta_ack = dictionary.ApplyDelta(*delta); + if (!delta_ack.ok()) { + RTC_LOG(LS_ERROR) << "delta_ack.error(): " << delta_ack.error().message(); + } + EXPECT_TRUE(delta_ack.ok()); + ASSERT_NE(delta_ack.value().first.get(), nullptr); + writer.ApplyDeltaAck(*delta_ack.value().first); + EXPECT_FALSE(writer.Pending()); + } +} + +void XorToggle(cricket::StunByteStringAttribute& attr, size_t byte) { + ASSERT_TRUE(attr.length() > byte); + uint8_t val = attr.GetByte(byte); + uint8_t new_val = val ^ (128 - (byte & 255)); + attr.SetByte(byte, new_val); +} + +std::unique_ptr Crop( + const cricket::StunByteStringAttribute& attr, + int new_length) { + auto new_attr = + std::make_unique(attr.type()); + std::string content = std::string(attr.string_view()); + content.erase(new_length); + new_attr->CopyBytes(content); + return new_attr; +} + +} // namespace + +namespace cricket { + +constexpr int kKey1 = 100; + +TEST(StunDictionary, CreateEmptyDictionaryWriter) { + StunDictionaryView dictionary; + StunDictionaryWriter writer; + EXPECT_TRUE(dictionary.empty()); + EXPECT_TRUE(writer->empty()); + EXPECT_EQ(writer.Pending(), 0); + EXPECT_EQ(writer.CreateDelta().get(), nullptr); +} + +TEST(StunDictionary, SetAndGet) { + StunDictionaryWriter writer; + writer.SetUInt32(kKey1)->SetValue(27); + EXPECT_EQ(writer->GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(writer->GetUInt64(kKey1), nullptr); + EXPECT_EQ(writer->GetByteString(kKey1), nullptr); + EXPECT_EQ(writer->GetAddress(kKey1), nullptr); + EXPECT_EQ(writer->GetUInt16List(kKey1), nullptr); +} + +TEST(StunDictionary, SetAndApply) { + StunDictionaryWriter writer; + writer.SetUInt32(kKey1)->SetValue(27); + + StunDictionaryView dictionary; + EXPECT_TRUE(dictionary.empty()); + + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.bytes_stored(), 12); +} + +TEST(StunDictionary, SetSetAndApply) { + StunDictionaryWriter writer; + writer.SetUInt32(kKey1)->SetValue(27); + writer.SetUInt32(kKey1)->SetValue(29); + + StunDictionaryView dictionary; + EXPECT_TRUE(dictionary.empty()); + + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 29u); + EXPECT_EQ(dictionary.bytes_stored(), 12); +} + +TEST(StunDictionary, SetAndApplyAndSetAndApply) { + StunDictionaryWriter writer; + writer.SetUInt32(kKey1)->SetValue(27); + + StunDictionaryView dictionary; + EXPECT_TRUE(dictionary.empty()); + + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.bytes_stored(), 12); + + writer.SetUInt32(kKey1)->SetValue(29); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 29u); + EXPECT_EQ(dictionary.bytes_stored(), 12); +} + +TEST(StunDictionary, ChangeType) { + StunDictionaryWriter writer; + writer.SetUInt32(kKey1)->SetValue(27); + EXPECT_EQ(writer->GetUInt32(kKey1)->value(), 27u); + + writer.SetUInt64(kKey1)->SetValue(29); + EXPECT_EQ(writer->GetUInt32(kKey1), nullptr); + EXPECT_EQ(writer->GetUInt64(kKey1)->value(), 29ull); +} + +TEST(StunDictionary, ChangeTypeApply) { + StunDictionaryWriter writer; + writer.SetUInt32(kKey1)->SetValue(27); + EXPECT_EQ(writer->GetUInt32(kKey1)->value(), 27u); + + StunDictionaryView dictionary; + EXPECT_TRUE(dictionary.empty()); + Sync(dictionary, writer); + EXPECT_EQ(writer->GetUInt32(kKey1)->value(), 27u); + + writer.SetUInt64(kKey1)->SetValue(29); + EXPECT_EQ(writer->GetUInt32(kKey1), nullptr); + EXPECT_EQ(writer->GetUInt64(kKey1)->value(), 29ull); + + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1), nullptr); + EXPECT_EQ(dictionary.GetUInt64(kKey1)->value(), 29ull); + EXPECT_EQ(dictionary.bytes_stored(), 16); +} + +TEST(StunDictionary, Pending) { + StunDictionaryWriter writer; + EXPECT_EQ(writer.Pending(), 0); + EXPECT_FALSE(writer.Pending(kKey1)); + + writer.SetUInt32(kKey1)->SetValue(27); + EXPECT_EQ(writer.Pending(), 1); + EXPECT_TRUE(writer.Pending(kKey1)); + + writer.SetUInt32(kKey1)->SetValue(29); + EXPECT_EQ(writer.Pending(), 1); + EXPECT_TRUE(writer.Pending(kKey1)); + + writer.SetUInt32(kKey1 + 1)->SetValue(31); + EXPECT_EQ(writer.Pending(), 2); + EXPECT_TRUE(writer.Pending(kKey1)); + EXPECT_TRUE(writer.Pending(kKey1 + 1)); + + StunDictionaryView dictionary; + + Sync(dictionary, writer); + EXPECT_EQ(writer.Pending(), 0); + EXPECT_FALSE(writer.Pending(kKey1)); + + writer.SetUInt32(kKey1)->SetValue(32); + EXPECT_EQ(writer.Pending(), 1); + EXPECT_TRUE(writer.Pending(kKey1)); +} + +TEST(StunDictionary, Delete) { + StunDictionaryWriter writer; + StunDictionaryView dictionary; + + writer.SetUInt32(kKey1)->SetValue(27); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.bytes_stored(), 12); + + writer.Delete(kKey1); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1), nullptr); + EXPECT_EQ(dictionary.bytes_stored(), 8); + + writer.Delete(kKey1); + EXPECT_EQ(writer.Pending(), 0); +} + +TEST(StunDictionary, MultiWriter) { + StunDictionaryWriter writer1; + StunDictionaryWriter writer2; + StunDictionaryView dictionary; + + writer1.SetUInt32(kKey1)->SetValue(27); + Sync(dictionary, writer1); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + + writer2.SetUInt32(kKey1 + 1)->SetValue(28); + Sync(dictionary, writer2); + EXPECT_EQ(dictionary.GetUInt32(kKey1 + 1)->value(), 28u); + + writer1.Delete(kKey1); + Sync(dictionary, writer1); + EXPECT_EQ(dictionary.GetUInt32(kKey1), nullptr); + + writer2.Delete(kKey1 + 1); + Sync(dictionary, writer2); + EXPECT_EQ(dictionary.GetUInt32(kKey1 + 1), nullptr); +} + +TEST(StunDictionary, BytesStoredIsCountedCorrectlyAfterMultipleUpdates) { + StunDictionaryWriter writer; + StunDictionaryView dictionary; + + for (int i = 0; i < 10; i++) { + writer.SetUInt32(kKey1)->SetValue(27); + writer.SetUInt64(kKey1 + 1)->SetValue(28); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.bytes_stored(), 28); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1)->value(), 28ull); + writer.Delete(kKey1); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.bytes_stored(), 24); + EXPECT_EQ(dictionary.GetUInt32(kKey1), nullptr); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1)->value(), 28ull); + writer.Delete(kKey1 + 1); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.bytes_stored(), 16); + EXPECT_EQ(dictionary.GetUInt32(kKey1), nullptr); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1), nullptr); + } +} + +TEST(StunDictionary, MaxBytesStoredCausesErrorOnOverflow) { + StunDictionaryWriter writer; + StunDictionaryView dictionary; + + dictionary.set_max_bytes_stored(30); + + writer.SetUInt32(kKey1)->SetValue(27); + writer.SetUInt64(kKey1 + 1)->SetValue(28); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.bytes_stored(), 28); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1)->value(), 28ull); + + writer.SetByteString(kKey1 + 2)->CopyBytes("k"); + { + auto delta = writer.CreateDelta(); + auto delta_ack = dictionary.ApplyDelta(*delta); + EXPECT_FALSE(delta_ack.ok()); + } + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1)->value(), 28ull); + EXPECT_EQ(dictionary.GetByteString(kKey1 + 2), nullptr); + + writer.Delete(kKey1 + 1); + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1), nullptr); + EXPECT_EQ(dictionary.GetByteString(kKey1 + 2)->string_view(), "k"); +} + +TEST(StunDictionary, DataTypes) { + StunDictionaryWriter writer; + StunDictionaryView dictionary; + + rtc::SocketAddress addr("127.0.0.1", 8080); + + writer.SetUInt32(kKey1)->SetValue(27); + writer.SetUInt64(kKey1 + 1)->SetValue(28); + writer.SetAddress(kKey1 + 2)->SetAddress(addr); + writer.SetByteString(kKey1 + 3)->CopyBytes("keso"); + writer.SetUInt16List(kKey1 + 4)->AddTypeAtIndex(0, 7); + + Sync(dictionary, writer); + EXPECT_EQ(dictionary.GetUInt32(kKey1)->value(), 27u); + EXPECT_EQ(dictionary.GetUInt64(kKey1 + 1)->value(), 28ull); + EXPECT_EQ(dictionary.GetAddress(kKey1 + 2)->GetAddress(), addr); + EXPECT_EQ(dictionary.GetByteString(kKey1 + 3)->string_view(), "keso"); + EXPECT_EQ(dictionary.GetUInt16List(kKey1 + 4)->GetType(0), 7); +} + +TEST(StunDictionary, ParseError) { + StunDictionaryWriter writer; + StunDictionaryView dictionary; + + rtc::SocketAddress addr("127.0.0.1", 8080); + + writer.SetUInt32(kKey1)->SetValue(27); + writer.SetUInt64(kKey1 + 1)->SetValue(28); + writer.SetAddress(kKey1 + 2)->SetAddress(addr); + writer.SetByteString(kKey1 + 3)->CopyBytes("keso"); + writer.SetUInt16List(kKey1 + 4)->AddTypeAtIndex(0, 7); + + auto delta = writer.CreateDelta(); + + // The first 10 bytes are in the header... + // any modification makes parsing fail. + for (int i = 0; i < 10; i++) { + XorToggle(*delta, i); + EXPECT_FALSE(dictionary.ApplyDelta(*delta).ok()); + XorToggle(*delta, i); // toogle back + } + + // Remove bytes from the delta. + for (size_t i = 0; i < delta->length(); i++) { + // The delta does not contain a footer, + // so it it possible to Crop at special values (attribute boundaries) + // and apply will still work. + const std::vector valid_crop_length = {18, 28, 42, 56, 66, 74}; + bool valid = std::find(valid_crop_length.begin(), valid_crop_length.end(), + i) != valid_crop_length.end(); + auto cropped_delta = Crop(*delta, i); + if (valid) { + EXPECT_TRUE(dictionary.ApplyDelta(*cropped_delta).ok()); + } else { + EXPECT_FALSE(dictionary.ApplyDelta(*cropped_delta).ok()); + } + } +} + +} // namespace cricket diff --git a/third_party/libwebrtc/p2p/client/basic_port_allocator_unittest.cc b/third_party/libwebrtc/p2p/client/basic_port_allocator_unittest.cc index 710d32a98fbe..44e39c39acc3 100644 --- a/third_party/libwebrtc/p2p/client/basic_port_allocator_unittest.cc +++ b/third_party/libwebrtc/p2p/client/basic_port_allocator_unittest.cc @@ -34,6 +34,7 @@ #include "rtc_base/nat_types.h" #include "rtc_base/net_helper.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/network_monitor.h" diff --git a/third_party/libwebrtc/pc/BUILD.gn b/third_party/libwebrtc/pc/BUILD.gn index 2f671d269438..8b2e1282f844 100644 --- a/third_party/libwebrtc/pc/BUILD.gn +++ b/third_party/libwebrtc/pc/BUILD.gn @@ -2450,7 +2450,6 @@ if (rtc_include_tests && !build_with_chromium) { "../common_video", "../logging:fake_rtc_event_log", "../media:codec", - "../media:delayable", "../media:media_channel", "../media:media_constants", "../media:rid_description", @@ -2559,27 +2558,6 @@ if (rtc_include_tests && !build_with_chromium) { ] } - rtc_test("peer_connection_mediachannel_split_unittests") { - testonly = true - sources = [ "peer_connection_mediachannel_split_unittest.cc" ] - deps = [ - ":integration_test_helpers", - ":session_description", - "../api:libjingle_peerconnection_api", - "../api:media_stream_interface", - "../api:rtc_error", - "../api:rtc_stats_api", - "../api:scoped_refptr", - "../media:codec", - "../media:media_constants", - "../rtc_base:gunit_helpers", - "../rtc_base:rtc_base_tests_utils", - "../test:test_main", - "../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - rtc_library("data_channel_controller_unittest") { testonly = true sources = [ "data_channel_controller_unittest.cc" ] @@ -2758,7 +2736,7 @@ if (rtc_include_tests && !build_with_chromium) { "test/mock_peer_connection_observers.h", "test/mock_rtp_receiver_internal.h", "test/mock_rtp_sender_internal.h", - "test/mock_voice_media_channel.h", + "test/mock_voice_media_receive_channel_interface.h", "test/peer_connection_test_wrapper.cc", "test/peer_connection_test_wrapper.h", "test/rtc_stats_obtainer.h", diff --git a/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc b/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc index 5a3383f94c00..9eb20c982fc3 100644 --- a/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc +++ b/third_party/libwebrtc/pc/audio_rtp_receiver_unittest.cc @@ -12,7 +12,7 @@ #include -#include "pc/test/mock_voice_media_channel.h" +#include "pc/test/mock_voice_media_receive_channel_interface.h" #include "rtc_base/gunit.h" #include "rtc_base/thread.h" #include "test/gmock.h" @@ -38,28 +38,26 @@ class AudioRtpReceiverTest : public ::testing::Test { rtc::make_ref_counted(worker_, std::string(), std::vector(), - false)), - media_channel_(cricket::MediaChannel::Role::kReceive, - rtc::Thread::Current()) { - EXPECT_CALL(media_channel_, SetRawAudioSink(kSsrc, _)); - EXPECT_CALL(media_channel_, SetBaseMinimumPlayoutDelayMs(kSsrc, _)); + false)) { + EXPECT_CALL(receive_channel_, SetRawAudioSink(kSsrc, _)); + EXPECT_CALL(receive_channel_, SetBaseMinimumPlayoutDelayMs(kSsrc, _)); } ~AudioRtpReceiverTest() { - EXPECT_CALL(media_channel_, SetOutputVolume(kSsrc, kVolumeMuted)); + EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kVolumeMuted)); receiver_->SetMediaChannel(nullptr); } rtc::AutoThread main_thread_; rtc::Thread* worker_; rtc::scoped_refptr receiver_; - cricket::MockVoiceMediaChannel media_channel_; + cricket::MockVoiceMediaReceiveChannelInterface receive_channel_; }; TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) { std::atomic_int set_volume_calls(0); - EXPECT_CALL(media_channel_, SetOutputVolume(kSsrc, kDefaultVolume)) + EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kDefaultVolume)) .WillOnce(InvokeWithoutArgs([&] { set_volume_calls++; return true; @@ -67,11 +65,11 @@ TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) { receiver_->track(); receiver_->track()->set_enabled(true); - receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel()); - EXPECT_CALL(media_channel_, SetDefaultRawAudioSink(_)).Times(0); + receiver_->SetMediaChannel(&receive_channel_); + EXPECT_CALL(receive_channel_, SetDefaultRawAudioSink(_)).Times(0); receiver_->SetupMediaChannel(kSsrc); - EXPECT_CALL(media_channel_, SetOutputVolume(kSsrc, kVolume)) + EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kVolume)) .WillOnce(InvokeWithoutArgs([&] { set_volume_calls++; return true; @@ -87,11 +85,11 @@ TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) { receiver_->OnSetVolume(kVolume); receiver_->track()->set_enabled(true); - receiver_->SetMediaChannel(media_channel_.AsVoiceReceiveChannel()); + receiver_->SetMediaChannel(&receive_channel_); // The previosly set initial volume should be propagated to the provided // media_channel_ as soon as SetupMediaChannel is called. - EXPECT_CALL(media_channel_, SetOutputVolume(kSsrc, kVolume)); + EXPECT_CALL(receive_channel_, SetOutputVolume(kSsrc, kVolume)); receiver_->SetupMediaChannel(kSsrc); } @@ -102,13 +100,13 @@ TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) { TEST(AudioRtpReceiver, OnChangedNotificationsAfterConstruction) { webrtc::test::RunLoop loop; auto* thread = rtc::Thread::Current(); // Points to loop's thread. - cricket::MockVoiceMediaChannel media_channel( - cricket::MediaChannel::Role::kReceive, thread); + cricket::MockVoiceMediaReceiveChannelInterface receive_channel; auto receiver = rtc::make_ref_counted( - thread, std::string(), std::vector(), true, &media_channel); + thread, std::string(), std::vector(), true, + &receive_channel); - EXPECT_CALL(media_channel, SetDefaultRawAudioSink(_)).Times(1); - EXPECT_CALL(media_channel, SetDefaultOutputVolume(kDefaultVolume)).Times(1); + EXPECT_CALL(receive_channel, SetDefaultRawAudioSink(_)).Times(1); + EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kDefaultVolume)).Times(1); receiver->SetupUnsignaledMediaChannel(); loop.Flush(); @@ -119,10 +117,10 @@ TEST(AudioRtpReceiver, OnChangedNotificationsAfterConstruction) { // for the worker thread. This notification should trigger the volume // of the media channel to be set to kVolumeMuted. // Flush the worker thread, but set the expectation first for the call. - EXPECT_CALL(media_channel, SetDefaultOutputVolume(kVolumeMuted)).Times(1); + EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kVolumeMuted)).Times(1); loop.Flush(); - EXPECT_CALL(media_channel, SetDefaultOutputVolume(kVolumeMuted)).Times(1); + EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kVolumeMuted)).Times(1); receiver->SetMediaChannel(nullptr); } diff --git a/third_party/libwebrtc/pc/channel.cc b/third_party/libwebrtc/pc/channel.cc index ba1f17527bfb..46cd377693f9 100644 --- a/third_party/libwebrtc/pc/channel.cc +++ b/third_party/libwebrtc/pc/channel.cc @@ -78,11 +78,11 @@ struct StreamFinder { } // namespace template -void RtpParametersFromMediaDescription( +void MediaChannelParametersFromMediaDescription( const MediaContentDescriptionImpl* desc, const RtpHeaderExtensions& extensions, bool is_stream_active, - RtpParameters* params) { + MediaChannelParameters* params) { params->is_stream_active = is_stream_active; params->codecs = desc->codecs(); // TODO(bugs.webrtc.org/11513): See if we really need @@ -98,14 +98,14 @@ template void RtpSendParametersFromMediaDescription( const MediaContentDescriptionImpl* desc, webrtc::RtpExtension::Filter extensions_filter, - RtpSendParameters* send_params) { + SenderParameters* send_params) { RtpHeaderExtensions extensions = webrtc::RtpExtension::DeduplicateHeaderExtensions( desc->rtp_header_extensions(), extensions_filter); const bool is_stream_active = webrtc::RtpTransceiverDirectionHasRecv(desc->direction()); - RtpParametersFromMediaDescription(desc, extensions, is_stream_active, - send_params); + MediaChannelParametersFromMediaDescription(desc, extensions, is_stream_active, + send_params); send_params->max_bandwidth_bps = desc->bandwidth(); send_params->extmap_allow_mixed = desc->extmap_allow_mixed(); } @@ -140,32 +140,6 @@ BaseChannel::BaseChannel( RTC_DLOG(LS_INFO) << "Created channel: " << ToString(); } -BaseChannel::BaseChannel(rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel, - absl::string_view mid, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - UniqueRandomIdGenerator* ssrc_generator) - : media_channel_(std::move(media_channel)), - worker_thread_(worker_thread), - network_thread_(network_thread), - signaling_thread_(signaling_thread), - alive_(PendingTaskSafetyFlag::Create()), - srtp_required_(srtp_required), - extensions_filter_( - crypto_options.srtp.enable_encrypted_rtp_header_extensions - ? webrtc::RtpExtension::kPreferEncryptedExtension - : webrtc::RtpExtension::kDiscardEncryptedExtension), - demuxer_criteria_(mid), - ssrc_generator_(ssrc_generator) { - RTC_DCHECK_RUN_ON(worker_thread_); - RTC_DCHECK(media_channel_); - RTC_DCHECK(ssrc_generator_); - RTC_DLOG(LS_INFO) << "Created channel: " << ToString(); -} - BaseChannel::~BaseChannel() { TRACE_EVENT0("webrtc", "BaseChannel::~BaseChannel"); RTC_DCHECK_RUN_ON(worker_thread_); @@ -178,15 +152,9 @@ BaseChannel::~BaseChannel() { } std::string BaseChannel::ToString() const { - if (media_send_channel_) { - return StringFormat( - "{mid: %s, media_type: %s}", mid().c_str(), - MediaTypeToString(media_send_channel_->media_type()).c_str()); - } else { - return StringFormat( - "{mid: %s, media_type: %s}", mid().c_str(), - MediaTypeToString(media_channel_->media_type()).c_str()); - } + return StringFormat( + "{mid: %s, media_type: %s}", mid().c_str(), + MediaTypeToString(media_send_channel_->media_type()).c_str()); } bool BaseChannel::ConnectToRtpTransport_n() { @@ -866,26 +834,6 @@ VoiceChannel::VoiceChannel( crypto_options, ssrc_generator) {} -VoiceChannel::VoiceChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel_impl, - absl::string_view mid, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - UniqueRandomIdGenerator* ssrc_generator) - : BaseChannel(worker_thread, - network_thread, - signaling_thread, - std::move(media_channel_impl), - mid, - srtp_required, - crypto_options, - ssrc_generator) { - InitCallback(); -} - VoiceChannel::~VoiceChannel() { TRACE_EVENT0("webrtc", "VoiceChannel::~VoiceChannel"); // this can't be done in the base class, since it calls a virtual @@ -934,8 +882,8 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, bool update_header_extensions = true; media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); - AudioRecvParameters recv_params = last_recv_params_; - RtpParametersFromMediaDescription( + AudioReceiverParameters recv_params = last_recv_params_; + MediaChannelParametersFromMediaDescription( content->as_audio(), header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); @@ -987,7 +935,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w"); RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString(); - AudioSendParameters send_params = last_send_params_; + AudioSenderParameter send_params = last_send_params_; RtpSendParametersFromMediaDescription(content->as_audio(), extensions_filter(), &send_params); send_params.mid = mid(); @@ -1041,23 +989,6 @@ VideoChannel::VideoChannel( send_channel()->SendCodecRtxTime()); }); } -VideoChannel::VideoChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel_impl, - absl::string_view mid, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - UniqueRandomIdGenerator* ssrc_generator) - : BaseChannel(worker_thread, - network_thread, - signaling_thread, - std::move(media_channel_impl), - mid, - srtp_required, - crypto_options, - ssrc_generator) {} VideoChannel::~VideoChannel() { TRACE_EVENT0("webrtc", "VideoChannel::~VideoChannel"); @@ -1091,14 +1022,14 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, bool update_header_extensions = true; media_send_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); - VideoRecvParameters recv_params = last_recv_params_; + VideoReceiverParameters recv_params = last_recv_params_; - RtpParametersFromMediaDescription( + MediaChannelParametersFromMediaDescription( content->as_video(), header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); - VideoSendParameters send_params = last_send_params_; + VideoSenderParameters send_params = last_send_params_; bool needs_send_params_update = false; if (type == SdpType::kAnswer || type == SdpType::kPrAnswer) { @@ -1177,13 +1108,13 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, const VideoContentDescription* video = content->as_video(); - VideoSendParameters send_params = last_send_params_; + VideoSenderParameters send_params = last_send_params_; RtpSendParametersFromMediaDescription(video, extensions_filter(), &send_params); send_params.mid = mid(); send_params.conference_mode = video->conference_mode(); - VideoRecvParameters recv_params = last_recv_params_; + VideoReceiverParameters recv_params = last_recv_params_; bool needs_recv_params_update = false; if (type == SdpType::kAnswer || type == SdpType::kPrAnswer) { diff --git a/third_party/libwebrtc/pc/channel.h b/third_party/libwebrtc/pc/channel.h index e082c2d506e1..5078225e61aa 100644 --- a/third_party/libwebrtc/pc/channel.h +++ b/third_party/libwebrtc/pc/channel.h @@ -94,16 +94,6 @@ class BaseChannel : public ChannelInterface, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); - // Constructor for use when the MediaChannel is not split - // TODO(bugs.webrtc.org/13931): Delete when split channel project is complete. - BaseChannel(rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel_impl, - absl::string_view mid, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); virtual ~BaseChannel(); rtc::Thread* worker_thread() const { return worker_thread_; } @@ -316,13 +306,6 @@ class BaseChannel : public ChannelInterface, // Return description of media channel to facilitate logging std::string ToString() const; - // MediaChannel implementation pointers. - // Either the `media_channel_` is set, or the `media_send_channel_` - // and the `media_receive_channel_` is set. - // TODO(bugs.webrtc.org/13931): Delete `media_channel_` when split channel - // project is complete. - const std::unique_ptr media_channel_; - const std::unique_ptr media_send_channel_; const std::unique_ptr media_receive_channel_; @@ -396,16 +379,6 @@ class VoiceChannel : public BaseChannel { bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); - // Constructor for use when the MediaChannel is not split - // TODO(bugs.webrtc.org/13931): Delete when split channel project is complete. - VoiceChannel(rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel_impl, - absl::string_view mid, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); ~VoiceChannel(); @@ -416,17 +389,11 @@ class VoiceChannel : public BaseChannel { VoiceChannel* AsVoiceChannel() override { return this; } VoiceMediaSendChannelInterface* send_channel() { - if (media_send_channel_) { - return media_send_channel_->AsVoiceSendChannel(); - } - return media_channel_->AsVoiceSendChannel(); + return media_send_channel_->AsVoiceSendChannel(); } VoiceMediaReceiveChannelInterface* receive_channel() { - if (media_receive_channel_) { - return media_receive_channel_->AsVoiceReceiveChannel(); - } - return media_channel_->AsVoiceReceiveChannel(); + return media_receive_channel_->AsVoiceReceiveChannel(); } VoiceMediaSendChannelInterface* media_send_channel() override { @@ -462,12 +429,12 @@ class VoiceChannel : public BaseChannel { std::string& error_desc) RTC_RUN_ON(worker_thread()) override; - // Last AudioSendParameters sent down to the media_channel() via + // Last AudioSenderParameter sent down to the media_channel() via // SetSendParameters. - AudioSendParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); - // Last AudioRecvParameters sent down to the media_channel() via + AudioSenderParameter last_send_params_ RTC_GUARDED_BY(worker_thread()); + // Last AudioReceiverParameters sent down to the media_channel() via // SetRecvParameters. - AudioRecvParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); + AudioReceiverParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); }; // VideoChannel is a specialization for video. @@ -483,16 +450,6 @@ class VideoChannel : public BaseChannel { bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); - // Constructor for use when the MediaChannel is not split - // TODO(bugs.webrtc.org/13931): Delete when split channel project is complete. - VideoChannel(rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr media_channel_impl, - absl::string_view mid, - bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); ~VideoChannel(); VideoChannel* AsVideoChannel() override { return this; } @@ -502,17 +459,11 @@ class VideoChannel : public BaseChannel { } VideoMediaSendChannelInterface* send_channel() { - if (media_send_channel_) { - return media_send_channel_->AsVideoSendChannel(); - } - return media_channel_->AsVideoSendChannel(); + return media_send_channel_->AsVideoSendChannel(); } VideoMediaReceiveChannelInterface* receive_channel() { - if (media_receive_channel_) { - return media_receive_channel_->AsVideoReceiveChannel(); - } - return media_channel_->AsVideoReceiveChannel(); + return media_receive_channel_->AsVideoReceiveChannel(); } VideoMediaSendChannelInterface* media_send_channel() override { @@ -547,12 +498,12 @@ class VideoChannel : public BaseChannel { std::string& error_desc) RTC_RUN_ON(worker_thread()) override; - // Last VideoSendParameters sent down to the media_channel() via + // Last VideoSenderParameters sent down to the media_channel() via // SetSendParameters. - VideoSendParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); - // Last VideoRecvParameters sent down to the media_channel() via + VideoSenderParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); + // Last VideoReceiverParameters sent down to the media_channel() via // SetRecvParameters. - VideoRecvParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); + VideoReceiverParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); }; } // namespace cricket diff --git a/third_party/libwebrtc/pc/channel_interface.h b/third_party/libwebrtc/pc/channel_interface.h index 7495ad8931bf..8d6a9fe74548 100644 --- a/third_party/libwebrtc/pc/channel_interface.h +++ b/third_party/libwebrtc/pc/channel_interface.h @@ -28,7 +28,6 @@ class VideoBitrateAllocatorFactory; namespace cricket { -class MediaChannel; class VoiceChannel; class VideoChannel; class MediaContentDescription; diff --git a/third_party/libwebrtc/pc/channel_unittest.cc b/third_party/libwebrtc/pc/channel_unittest.cc index 28676291bf97..0d7f0b0cd0ad 100644 --- a/third_party/libwebrtc/pc/channel_unittest.cc +++ b/third_party/libwebrtc/pc/channel_unittest.cc @@ -49,7 +49,8 @@ #include "test/scoped_key_value_config.h" using cricket::DtlsTransportInternal; -using cricket::FakeVoiceMediaChannel; +using cricket::FakeVoiceMediaReceiveChannel; +using cricket::FakeVoiceMediaSendChannel; using cricket::RidDescription; using cricket::RidDirection; using cricket::StreamParams; @@ -77,9 +78,10 @@ enum class NetworkIsWorker { Yes, No }; } // namespace template {}; class VideoTraits : public Traits { } void CreateChannels(int flags1, int flags2) { - CreateChannels(std::make_unique( - cricket::MediaChannel::Role::kSend, nullptr, + CreateChannels(std::make_unique( typename T::Options(), network_thread_), - std::make_unique( - cricket::MediaChannel::Role::kReceive, nullptr, + std::make_unique( typename T::Options(), network_thread_), - std::make_unique( - cricket::MediaChannel::Role::kSend, nullptr, + std::make_unique( typename T::Options(), network_thread_), - std::make_unique( - cricket::MediaChannel::Role::kReceive, nullptr, + std::make_unique( typename T::Options(), network_thread_), flags1, flags2); } - void CreateChannels(std::unique_ptr ch1s, - std::unique_ptr ch1r, - std::unique_ptr ch2s, - std::unique_ptr ch2r, + void CreateChannels(std::unique_ptr ch1s, + std::unique_ptr ch1r, + std::unique_ptr ch2s, + std::unique_ptr ch2r, int flags1, int flags2) { RTC_DCHECK(!channel1_); @@ -275,8 +276,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::unique_ptr CreateChannel( rtc::Thread* worker_thread, rtc::Thread* network_thread, - std::unique_ptr ch_send, - std::unique_ptr ch_receive, + std::unique_ptr ch_send, + std::unique_ptr ch_receive, webrtc::RtpTransportInternal* rtp_transport, int flags); @@ -439,11 +440,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { return result; } - void SendRtp(typename T::MediaChannel* media_channel, rtc::Buffer data) { + void SendRtp(typename T::MediaSendChannel* media_channel, rtc::Buffer data) { network_thread_->PostTask(webrtc::SafeTask( network_thread_safety_, [media_channel, data = std::move(data)]() { - media_channel->SendRtp(data.data(), data.size(), - rtc::PacketOptions()); + media_channel->SendPacket(data.data(), data.size(), + rtc::PacketOptions()); })); } @@ -586,7 +587,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { if (verify_playout_) { EXPECT_FALSE(media_receive_channel1_impl()->playout()); } - EXPECT_TRUE(media_send_channel1_impl()->codecs().empty()); + EXPECT_TRUE(media_send_channel1_impl()->send_codecs().empty()); EXPECT_TRUE(media_receive_channel1_impl()->recv_streams().empty()); EXPECT_TRUE(media_send_channel1_impl()->rtp_packets().empty()); // Basic sanity test for send and receive channel objects @@ -606,11 +607,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateContent(0, kPcmuCodec, kH264Codec, &content); std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); - EXPECT_EQ(0U, media_send_channel1_impl()->codecs().size()); + EXPECT_EQ(0U, media_send_channel1_impl()->send_codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - ASSERT_EQ(1U, media_send_channel1_impl()->codecs().size()); + ASSERT_EQ(1U, media_send_channel1_impl()->send_codecs().size()); EXPECT_TRUE(CodecMatches(content.codecs()[0], - media_send_channel1_impl()->codecs()[0])); + media_send_channel1_impl()->send_codecs()[0])); } // Test that SetLocalContent and SetRemoteContent properly configure @@ -654,11 +655,11 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { std::string err; EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); CreateContent(0, kPcmuCodec, kH264Codec, &content); - EXPECT_EQ(0U, media_send_channel1_impl()->codecs().size()); + EXPECT_EQ(0U, media_send_channel1_impl()->send_codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); - ASSERT_EQ(1U, media_send_channel1_impl()->codecs().size()); + ASSERT_EQ(1U, media_send_channel1_impl()->send_codecs().size()); EXPECT_TRUE(CodecMatches(content.codecs()[0], - media_send_channel1_impl()->codecs()[0])); + media_send_channel1_impl()->send_codecs()[0])); } // Test that SetLocalContent and SetRemoteContent properly set RTCP @@ -911,7 +912,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateChannels(DTLS, DTLS); SendInitiate(); - typename T::MediaChannel* media_send_channel1_impl = + typename T::MediaSendChannel* media_send_channel1_impl = this->media_send_channel1_impl(); ASSERT_TRUE(media_send_channel1_impl); @@ -973,12 +974,12 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(SendAccept()); EXPECT_FALSE(IsSrtpActive(channel1_)); EXPECT_TRUE(media_send_channel1_impl()->sending()); - EXPECT_EQ(1U, media_send_channel1_impl()->codecs().size()); + EXPECT_EQ(1U, media_send_channel1_impl()->send_codecs().size()); if (verify_playout_) { EXPECT_TRUE(media_receive_channel2_impl()->playout()); } EXPECT_TRUE(media_send_channel2_impl()->sending()); - EXPECT_EQ(1U, media_send_channel2_impl()->codecs().size()); + EXPECT_EQ(1U, media_send_channel2_impl()->send_codecs().size()); } // Send voice RTP data to the other side and ensure it gets there. @@ -1457,46 +1458,47 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } // Accessors that return the standard VideoMedia{Send|Receive}ChannelInterface - typename T::MediaSendChannel* media_send_channel1() { + typename T::MediaSendChannelInterface* media_send_channel1() { return channel1_->media_send_channel(); } - typename T::MediaSendChannel* media_send_channel2() { + typename T::MediaSendChannelInterface* media_send_channel2() { return channel2_->media_send_channel(); } - typename T::MediaReceiveChannel* media_receive_channel1() { + typename T::MediaReceiveChannelInterface* media_receive_channel1() { return channel1_->media_receive_channel(); } - typename T::MediaReceiveChannel* media_receive_channel2() { + typename T::MediaReceiveChannelInterface* media_receive_channel2() { return channel2_->media_receive_channel(); } - // Accessors that return the FakeMediaChannel object. + // Accessors that return the FakeMediaSendChannel object. // Note that these depend on getting the object back that was // passed to the channel constructor. - // T::MediaChannel is either FakeVoiceMediaChannel or FakeVideoMediaChannel. - typename T::MediaChannel* media_send_channel1_impl() { + // T::MediaSendChannel is either FakeVoiceMediaSendChannel or + // FakeVideoMediaSendChannel. + typename T::MediaSendChannel* media_send_channel1_impl() { RTC_DCHECK(channel1_); - return static_cast( + return static_cast( channel1_->media_send_channel()); } - typename T::MediaChannel* media_send_channel2_impl() { + typename T::MediaSendChannel* media_send_channel2_impl() { RTC_DCHECK(channel2_); RTC_DCHECK(channel2_->media_send_channel()); - return static_cast( + return static_cast( channel2_->media_send_channel()); } - typename T::MediaChannel* media_receive_channel1_impl() { + typename T::MediaReceiveChannel* media_receive_channel1_impl() { RTC_DCHECK(channel1_); RTC_DCHECK(channel1_->media_receive_channel()); - return static_cast( + return static_cast( channel1_->media_receive_channel()); } - typename T::MediaChannel* media_receive_channel2_impl() { + typename T::MediaReceiveChannel* media_receive_channel2_impl() { RTC_DCHECK(channel2_); RTC_DCHECK(channel2_->media_receive_channel()); - return static_cast( + return static_cast( channel2_->media_receive_channel()); } @@ -1538,8 +1540,8 @@ template <> std::unique_ptr ChannelTest::CreateChannel( rtc::Thread* worker_thread, rtc::Thread* network_thread, - std::unique_ptr send_ch, - std::unique_ptr receive_ch, + std::unique_ptr send_ch, + std::unique_ptr receive_ch, webrtc::RtpTransportInternal* rtp_transport, int flags) { rtc::Thread* signaling_thread = rtc::Thread::Current(); @@ -1625,8 +1627,8 @@ template <> std::unique_ptr ChannelTest::CreateChannel( rtc::Thread* worker_thread, rtc::Thread* network_thread, - std::unique_ptr send_ch, - std::unique_ptr receive_ch, + std::unique_ptr send_ch, + std::unique_ptr receive_ch, webrtc::RtpTransportInternal* rtp_transport, int flags) { rtc::Thread* signaling_thread = rtc::Thread::Current(); diff --git a/third_party/libwebrtc/pc/jsep_transport.cc b/third_party/libwebrtc/pc/jsep_transport.cc index ec186626b7ae..c7d41c8a4c33 100644 --- a/third_party/libwebrtc/pc/jsep_transport.cc +++ b/third_party/libwebrtc/pc/jsep_transport.cc @@ -494,15 +494,15 @@ bool JsepTransport::SetSdes(const std::vector& cryptos, // If setting an SDES answer succeeded, apply the negotiated parameters // to the SRTP transport. if ((type == SdpType::kPrAnswer || type == SdpType::kAnswer) && ret) { - if (sdes_negotiator_.send_cipher_suite() && - sdes_negotiator_.recv_cipher_suite()) { + if (sdes_negotiator_.send_crypto_suite() && + sdes_negotiator_.recv_crypto_suite()) { RTC_DCHECK(send_extension_ids_); RTC_DCHECK(recv_extension_ids_); ret = sdes_transport_->SetRtpParams( - *(sdes_negotiator_.send_cipher_suite()), + *(sdes_negotiator_.send_crypto_suite()), sdes_negotiator_.send_key().data(), static_cast(sdes_negotiator_.send_key().size()), - *(send_extension_ids_), *(sdes_negotiator_.recv_cipher_suite()), + *(send_extension_ids_), *(sdes_negotiator_.recv_crypto_suite()), sdes_negotiator_.recv_key().data(), static_cast(sdes_negotiator_.recv_key().size()), *(recv_extension_ids_)); diff --git a/third_party/libwebrtc/pc/legacy_stats_collector.cc b/third_party/libwebrtc/pc/legacy_stats_collector.cc index 6533fc30680d..3bc65ee3ee1a 100644 --- a/third_party/libwebrtc/pc/legacy_stats_collector.cc +++ b/third_party/libwebrtc/pc/legacy_stats_collector.cc @@ -301,7 +301,7 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, bool use_standard_bytes_stats) { ExtractCommonReceiveProperties(info, report); report->AddString(StatsReport::kStatsValueNameCodecImplementationName, - info.decoder_implementation_name); + info.decoder_implementation_name.value_or("unknown")); int64_t bytes_received = info.payload_bytes_received; if (!use_standard_bytes_stats) { bytes_received += info.header_and_padding_bytes_received; @@ -366,7 +366,7 @@ void ExtractStats(const cricket::VideoSenderInfo& info, ExtractCommonSendProperties(info, report, use_standard_bytes_stats); report->AddString(StatsReport::kStatsValueNameCodecImplementationName, - info.encoder_implementation_name); + info.encoder_implementation_name.value_or("unknown")); report->AddBoolean(StatsReport::kStatsValueNameBandwidthLimitedResolution, (info.adapt_reason & 0x2) > 0); report->AddBoolean(StatsReport::kStatsValueNameCpuLimitedResolution, diff --git a/third_party/libwebrtc/pc/media_session.cc b/third_party/libwebrtc/pc/media_session.cc index f9117237bfab..a2ea39f890f9 100644 --- a/third_party/libwebrtc/pc/media_session.cc +++ b/third_party/libwebrtc/pc/media_session.cc @@ -113,6 +113,52 @@ cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions( namespace cricket { +static bool IsRtxCodec(const Codec& codec) { + return absl::EqualsIgnoreCase(codec.name, kRtxCodecName); +} + +static bool IsRtxCodec(const webrtc::RtpCodecCapability& capability) { + return absl::EqualsIgnoreCase(capability.name, kRtxCodecName); +} + +static bool ContainsRtxCodec(const std::vector& codecs) { + for (const auto& codec : codecs) { + if (IsRtxCodec(codec)) { + return true; + } + } + return false; +} + +static bool IsRedCodec(const Codec& codec) { + return absl::EqualsIgnoreCase(codec.name, kRedCodecName); +} + +static bool IsRedCodec(const webrtc::RtpCodecCapability& capability) { + return absl::EqualsIgnoreCase(capability.name, kRedCodecName); +} + +static bool IsFlexfecCodec(const Codec& codec) { + return absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName); +} + +static bool ContainsFlexfecCodec(const std::vector& codecs) { + for (const auto& codec : codecs) { + if (IsFlexfecCodec(codec)) { + return true; + } + } + return false; +} + +static bool IsUlpfecCodec(const Codec& codec) { + return absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName); +} + +static bool IsComfortNoiseCodec(const Codec& codec) { + return absl::EqualsIgnoreCase(codec.name, kComfortNoiseCodecName); +} + static RtpTransceiverDirection NegotiateRtpTransceiverDirection( RtpTransceiverDirection offer, RtpTransceiverDirection wants) { @@ -152,18 +198,18 @@ static bool CreateCryptoParams(int tag, std::string key = rtc::Base64::Encode(master_key); crypto_out->tag = tag; - crypto_out->cipher_suite = cipher; + crypto_out->crypto_suite = cipher; crypto_out->key_params = kInline; crypto_out->key_params += key; return true; } -static bool AddCryptoParams(const std::string& cipher_suite, +static bool AddCryptoParams(const std::string& crypto_suite, CryptoParamsVec* cryptos_out) { int size = static_cast(cryptos_out->size()); cryptos_out->resize(size + 1); - return CreateCryptoParams(size, cipher_suite, &cryptos_out->at(size)); + return CreateCryptoParams(size, crypto_suite, &cryptos_out->at(size)); } void AddMediaCryptos(const CryptoParamsVec& cryptos, @@ -273,11 +319,11 @@ static bool SelectCrypto(const MediaContentDescription* offer, for (const CryptoParams& crypto : cryptos) { if ((crypto_options.srtp.enable_gcm_crypto_suites && - rtc::IsGcmCryptoSuiteName(crypto.cipher_suite)) || - rtc::kCsAesCm128HmacSha1_80 == crypto.cipher_suite || - (rtc::kCsAesCm128HmacSha1_32 == crypto.cipher_suite && audio && + rtc::IsGcmCryptoSuiteName(crypto.crypto_suite)) || + rtc::kCsAesCm128HmacSha1_80 == crypto.crypto_suite || + (rtc::kCsAesCm128HmacSha1_32 == crypto.crypto_suite && audio && !bundle && crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher)) { - return CreateCryptoParams(crypto.tag, crypto.cipher_suite, crypto_out); + return CreateCryptoParams(crypto.tag, crypto.crypto_suite, crypto_out); } } return false; @@ -496,7 +542,7 @@ static bool GetCryptosByName(const SessionDescription* sdesc, return true; } -// Prunes the `target_cryptos` by removing the crypto params (cipher_suite) +// Prunes the `target_cryptos` by removing the crypto params (crypto_suite) // which are not available in `filter`. static void PruneCryptos(const CryptoParamsVec& filter, CryptoParamsVec* target_cryptos) { @@ -506,11 +552,11 @@ static void PruneCryptos(const CryptoParamsVec& filter, target_cryptos->erase( std::remove_if(target_cryptos->begin(), target_cryptos->end(), - // Returns true if the `crypto`'s cipher_suite is not + // Returns true if the `crypto`'s crypto_suite is not // found in `filter`. [&filter](const CryptoParams& crypto) { for (const CryptoParams& entry : filter) { - if (entry.cipher_suite == crypto.cipher_suite) + if (entry.crypto_suite == crypto.crypto_suite) return false; } return true; @@ -610,51 +656,6 @@ static std::vector GetActiveContents( return active_contents; } -template -static bool ContainsRtxCodec(const std::vector& codecs) { - for (const auto& codec : codecs) { - if (IsRtxCodec(codec)) { - return true; - } - } - return false; -} - -template -static bool IsRedCodec(const C& codec) { - return absl::EqualsIgnoreCase(codec.name, kRedCodecName); -} - -template -static bool IsRtxCodec(const C& codec) { - return absl::EqualsIgnoreCase(codec.name, kRtxCodecName); -} - -template -static bool ContainsFlexfecCodec(const std::vector& codecs) { - for (const auto& codec : codecs) { - if (IsFlexfecCodec(codec)) { - return true; - } - } - return false; -} - -template -static bool IsFlexfecCodec(const C& codec) { - return absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName); -} - -template -static bool IsUlpfecCodec(const C& codec) { - return absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName); -} - -template -static bool IsComfortNoiseCodec(const C& codec) { - return absl::EqualsIgnoreCase(codec.name, kComfortNoiseCodecName); -} - // Create a media content to be offered for the given `sender_options`, // according to the given options.rtcp_mux, session_options.is_muc, codecs, // secure_transport, crypto, and current_streams. If we don't currently have diff --git a/third_party/libwebrtc/pc/media_session_unittest.cc b/third_party/libwebrtc/pc/media_session_unittest.cc index 8a6c8f6019cf..a4979b86fea4 100644 --- a/third_party/libwebrtc/pc/media_session_unittest.cc +++ b/third_party/libwebrtc/pc/media_session_unittest.cc @@ -50,7 +50,7 @@ #define ASSERT_CRYPTO(cd, s, cs) \ ASSERT_EQ(s, cd->cryptos().size()); \ - ASSERT_EQ(cs, cd->cryptos()[0].cipher_suite) + ASSERT_EQ(cs, cd->cryptos()[0].crypto_suite) typedef std::vector Candidates; @@ -423,8 +423,8 @@ void PreferGcmCryptoParameters(CryptoParamsVec* cryptos) { cryptos->erase( std::remove_if(cryptos->begin(), cryptos->end(), [](const cricket::CryptoParams& crypto) { - return crypto.cipher_suite != kCsAeadAes256Gcm && - crypto.cipher_suite != kCsAeadAes128Gcm; + return crypto.crypto_suite != kCsAeadAes256Gcm && + crypto.crypto_suite != kCsAeadAes128Gcm; }), cryptos->end()); } @@ -486,7 +486,7 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { if (c1.size() != c2.size()) return false; for (size_t i = 0; i < c1.size(); ++i) - if (c1[i].tag != c2[i].tag || c1[i].cipher_suite != c2[i].cipher_suite || + if (c1[i].tag != c2[i].tag || c1[i].crypto_suite != c2[i].crypto_suite || c1[i].key_params != c2[i].key_params || c1[i].session_params != c2[i].session_params) return false; @@ -632,7 +632,7 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { video_media_desc->cryptos())); EXPECT_EQ(1u, audio_media_desc->cryptos().size()); EXPECT_EQ(kDefaultSrtpCryptoSuite, - audio_media_desc->cryptos()[0].cipher_suite); + audio_media_desc->cryptos()[0].crypto_suite); // Verify the selected crypto is one from the reference audio // media content. diff --git a/third_party/libwebrtc/pc/peer_connection.cc b/third_party/libwebrtc/pc/peer_connection.cc index 08d3a8a32f45..d697f6596063 100644 --- a/third_party/libwebrtc/pc/peer_connection.cc +++ b/third_party/libwebrtc/pc/peer_connection.cc @@ -1780,9 +1780,9 @@ bool PeerConnection::StartRtcEventLog(std::unique_ptr output, bool PeerConnection::StartRtcEventLog( std::unique_ptr output) { - int64_t output_period_ms = webrtc::RtcEventLog::kImmediateOutput; - if (trials().IsEnabled("WebRTC-RtcEventLogNewFormat")) { - output_period_ms = 5000; + int64_t output_period_ms = 5000; + if (trials().IsDisabled("WebRTC-RtcEventLogNewFormat")) { + output_period_ms = webrtc::RtcEventLog::kImmediateOutput; } return StartRtcEventLog(std::move(output), output_period_ms); } diff --git a/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc b/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc index 7edbe893167b..715a8002a246 100644 --- a/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc +++ b/third_party/libwebrtc/pc/peer_connection_bundle_unittest.cc @@ -730,16 +730,17 @@ TEST_P(PeerConnectionBundleTest, BundleOnFirstMidInAnswer) { } // This tests that applying description with conflicted RTP demuxing criteria -// will fail. -TEST_P(PeerConnectionBundleTest, - ApplyDescriptionWithConflictedDemuxCriteriaFail) { +// will fail when using BUNDLE. +TEST_P(PeerConnectionBundleTest, ApplyDescriptionWithSameSsrcsBundledFails) { auto caller = CreatePeerConnectionWithAudioVideo(); auto callee = CreatePeerConnectionWithAudioVideo(); RTCOfferAnswerOptions options; - options.use_rtp_mux = false; + options.use_rtp_mux = true; auto offer = caller->CreateOffer(options); - // Modified the SDP to make two m= sections have the same SSRC. + EXPECT_TRUE( + caller->SetLocalDescription(CloneSessionDescription(offer.get()))); + // Modify the remote SDP to make two m= sections have the same SSRC. ASSERT_GE(offer->description()->contents().size(), 2U); offer->description() ->contents()[0] @@ -751,20 +752,42 @@ TEST_P(PeerConnectionBundleTest, .media_description() ->mutable_streams()[0] .ssrcs[0] = 1111222; + EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer))); + + // When BUNDLE is enabled, applying the description is expected to fail + // because the demuxing criteria can not be satisfied. + auto answer = callee->CreateAnswer(options); + EXPECT_FALSE(callee->SetLocalDescription(std::move(answer))); +} + +// A variant of the above, without BUNDLE duplicate SSRCs are allowed. +TEST_P(PeerConnectionBundleTest, + ApplyDescriptionWithSameSsrcsUnbundledSucceeds) { + auto caller = CreatePeerConnectionWithAudioVideo(); + auto callee = CreatePeerConnectionWithAudioVideo(); + + RTCOfferAnswerOptions options; + options.use_rtp_mux = false; + auto offer = caller->CreateOffer(options); EXPECT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); + // Modify the remote SDP to make two m= sections have the same SSRC. + ASSERT_GE(offer->description()->contents().size(), 2U); + offer->description() + ->contents()[0] + .media_description() + ->mutable_streams()[0] + .ssrcs[0] = 1111222; + offer->description() + ->contents()[1] + .media_description() + ->mutable_streams()[0] + .ssrcs[0] = 1111222; EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer))); - EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal(options)); - // Enable BUNDLE in subsequent offer/answer exchange and two m= sections are - // expectd to use one RtpTransport underneath. - options.use_rtp_mux = true; - EXPECT_TRUE( - callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal(options))); + // Without BUNDLE, demuxing is done per-transport. auto answer = callee->CreateAnswer(options); - // When BUNDLE is enabled, applying the description is expected to fail - // because the demuxing criteria is conflicted. - EXPECT_FALSE(callee->SetLocalDescription(std::move(answer))); + EXPECT_TRUE(callee->SetLocalDescription(std::move(answer))); } // This tests that changing the pre-negotiated BUNDLE tag is not supported. diff --git a/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc b/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc index 1aedbf295fe2..dc350b2be079 100644 --- a/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc +++ b/third_party/libwebrtc/pc/peer_connection_crypto_unittest.cc @@ -199,7 +199,7 @@ SdpContentPredicate HaveSdesGcmCryptos(size_t num_crypto_suites) { } for (size_t i = 0; i < cryptos.size(); ++i) { if (cryptos[i].key_params.size() == 67U && - cryptos[i].cipher_suite == "AEAD_AES_256_GCM") + cryptos[i].crypto_suite == "AEAD_AES_256_GCM") return true; } return false; diff --git a/third_party/libwebrtc/pc/peer_connection_factory.cc b/third_party/libwebrtc/pc/peer_connection_factory.cc index eaa69d3e7c56..67874b88d6fa 100644 --- a/third_party/libwebrtc/pc/peer_connection_factory.cc +++ b/third_party/libwebrtc/pc/peer_connection_factory.cc @@ -293,9 +293,9 @@ rtc::scoped_refptr PeerConnectionFactory::CreateAudioTrack( std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { RTC_DCHECK_RUN_ON(worker_thread()); - auto encoding_type = RtcEventLog::EncodingType::Legacy; - if (IsTrialEnabled("WebRTC-RtcEventLogNewFormat")) - encoding_type = RtcEventLog::EncodingType::NewFormat; + auto encoding_type = RtcEventLog::EncodingType::NewFormat; + if (field_trials().IsDisabled("WebRTC-RtcEventLogNewFormat")) + encoding_type = RtcEventLog::EncodingType::Legacy; return event_log_factory_ ? event_log_factory_->Create(encoding_type) : std::make_unique(); } diff --git a/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc b/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc index c3b3a2db7ff7..7799c9d6e360 100644 --- a/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc +++ b/third_party/libwebrtc/pc/peer_connection_field_trial_tests.cc @@ -237,6 +237,7 @@ TEST_F(PeerConnectionFieldTrialTest, ApplyFakeNetworkConfig) { auto video_track_source = rtc::make_ref_counted( config, clock_, /*is_screencast=*/false); + video_track_source->Start(); caller->AddTrack(pc_factory_->CreateVideoTrack(video_track_source, "v")); WrapperPtr callee = CreatePeerConnection(); diff --git a/third_party/libwebrtc/pc/peer_connection_media_unittest.cc b/third_party/libwebrtc/pc/peer_connection_media_unittest.cc index 72bd9bda79fe..87e018b83e6e 100644 --- a/third_party/libwebrtc/pc/peer_connection_media_unittest.cc +++ b/third_party/libwebrtc/pc/peer_connection_media_unittest.cc @@ -43,6 +43,7 @@ #include "api/task_queue/task_queue_factory.h" #include "media/base/codec.h" #include "media/base/fake_media_engine.h" +#include "media/base/media_channel.h" #include "media/base/media_constants.h" #include "media/base/media_engine.h" #include "media/base/stream_params.h" @@ -50,9 +51,11 @@ #include "p2p/base/p2p_constants.h" #include "p2p/base/port_allocator.h" #include "p2p/base/transport_info.h" +#include "pc/channel_interface.h" #include "pc/media_session.h" #include "pc/peer_connection_wrapper.h" #include "pc/rtp_media_utils.h" +#include "pc/rtp_transceiver.h" #include "pc/session_description.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" @@ -78,6 +81,47 @@ using ::testing::ElementsAre; using ::testing::NotNull; using ::testing::Values; +cricket::MediaSendChannelInterface* SendChannelInternal( + rtc::scoped_refptr transceiver) { + auto transceiver_with_internal = static_cast>*>( + transceiver.get()); + auto transceiver_internal = + static_cast(transceiver_with_internal->internal()); + return transceiver_internal->channel()->media_send_channel(); +} + +cricket::MediaReceiveChannelInterface* ReceiveChannelInternal( + rtc::scoped_refptr transceiver) { + auto transceiver_with_internal = static_cast>*>( + transceiver.get()); + auto transceiver_internal = + static_cast(transceiver_with_internal->internal()); + return transceiver_internal->channel()->media_receive_channel(); +} + +cricket::FakeVideoMediaSendChannel* VideoMediaSendChannel( + rtc::scoped_refptr transceiver) { + return static_cast( + SendChannelInternal(transceiver)); +} +cricket::FakeVideoMediaReceiveChannel* VideoMediaReceiveChannel( + rtc::scoped_refptr transceiver) { + return static_cast( + ReceiveChannelInternal(transceiver)); +} +cricket::FakeVoiceMediaSendChannel* VoiceMediaSendChannel( + rtc::scoped_refptr transceiver) { + return static_cast( + SendChannelInternal(transceiver)); +} +cricket::FakeVoiceMediaReceiveChannel* VoiceMediaReceiveChannel( + rtc::scoped_refptr transceiver) { + return static_cast( + ReceiveChannelInternal(transceiver)); +} + class PeerConnectionWrapperForMediaTest : public PeerConnectionWrapper { public: using PeerConnectionWrapper::PeerConnectionWrapper; @@ -272,7 +316,8 @@ std::vector GetIds( // Test that exchanging an offer and answer with each side having an audio and // video stream creates the appropriate send/recv streams in the underlying // media engine on both sides. -TEST_P(PeerConnectionMediaTest, AudioVideoOfferAnswerCreateSendRecvStreams) { +TEST_F(PeerConnectionMediaTestUnifiedPlan, + AudioVideoOfferAnswerCreateSendRecvStreams) { const std::string kCallerAudioId = "caller_a"; const std::string kCallerVideoId = "caller_v"; const std::string kCalleeAudioId = "callee_a"; @@ -290,33 +335,37 @@ TEST_P(PeerConnectionMediaTest, AudioVideoOfferAnswerCreateSendRecvStreams) { ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - auto* caller_voice_send = caller->media_engine()->GetVoiceSendChannel(0); + auto* caller_voice_send = + VoiceMediaSendChannel(caller->pc()->GetTransceivers()[0]); auto* caller_voice_receive = - caller->media_engine()->GetVoiceReceiveChannel(0); + VoiceMediaReceiveChannel(caller->pc()->GetTransceivers()[0]); EXPECT_THAT(GetIds(caller_voice_receive->recv_streams()), ElementsAre(kCalleeAudioId)); EXPECT_THAT(GetIds(caller_voice_send->send_streams()), ElementsAre(kCallerAudioId)); - auto* caller_video_send = caller->media_engine()->GetVideoSendChannel(0); + auto* caller_video_send = + VideoMediaSendChannel(caller->pc()->GetTransceivers()[1]); auto* caller_video_receive = - caller->media_engine()->GetVideoReceiveChannel(0); + VideoMediaReceiveChannel(caller->pc()->GetTransceivers()[1]); EXPECT_THAT(GetIds(caller_video_receive->recv_streams()), ElementsAre(kCalleeVideoId)); EXPECT_THAT(GetIds(caller_video_send->send_streams()), ElementsAre(kCallerVideoId)); - auto* callee_voice_send = callee->media_engine()->GetVoiceSendChannel(0); + auto* callee_voice_send = + VoiceMediaSendChannel(callee->pc()->GetTransceivers()[0]); auto* callee_voice_receive = - callee->media_engine()->GetVoiceReceiveChannel(0); + VoiceMediaReceiveChannel(callee->pc()->GetTransceivers()[0]); EXPECT_THAT(GetIds(callee_voice_receive->recv_streams()), ElementsAre(kCallerAudioId)); EXPECT_THAT(GetIds(callee_voice_send->send_streams()), ElementsAre(kCalleeAudioId)); - auto* callee_video_send = callee->media_engine()->GetVideoSendChannel(0); + auto* callee_video_send = + VideoMediaSendChannel(callee->pc()->GetTransceivers()[1]); auto* callee_video_receive = - callee->media_engine()->GetVideoReceiveChannel(0); + VideoMediaReceiveChannel(callee->pc()->GetTransceivers()[1]); EXPECT_THAT(GetIds(callee_video_receive->recv_streams()), ElementsAre(kCallerVideoId)); EXPECT_THAT(GetIds(callee_video_send->send_streams()), @@ -342,10 +391,7 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - ASSERT_FALSE(callee->media_engine()->GetVoiceSendChannel(0)); - ASSERT_FALSE(callee->media_engine()->GetVideoSendChannel(0)); - ASSERT_FALSE(callee->media_engine()->GetVoiceReceiveChannel(0)); - ASSERT_FALSE(callee->media_engine()->GetVideoReceiveChannel(0)); + ASSERT_TRUE(callee->pc()->GetTransceivers().empty()); } // Test that removing streams from a subsequent offer causes the receive streams @@ -365,14 +411,8 @@ TEST_F(PeerConnectionMediaTestPlanB, EmptyRemoteOfferRemovesRecvStreams) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - auto callee_voice_send = callee->media_engine()->GetVoiceSendChannel(0); - auto callee_video_send = callee->media_engine()->GetVideoSendChannel(0); - auto callee_voice_receive = callee->media_engine()->GetVoiceReceiveChannel(0); - auto callee_video_receive = callee->media_engine()->GetVideoReceiveChannel(0); - EXPECT_EQ(1u, callee_voice_send->send_streams().size()); - EXPECT_EQ(0u, callee_voice_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_video_send->send_streams().size()); - EXPECT_EQ(0u, callee_video_receive->recv_streams().size()); + EXPECT_TRUE(callee->pc()->GetReceivers().empty()); + EXPECT_EQ(2u, callee->pc()->GetSenders().size()); } // Test enabling of simulcast with Plan B semantics. @@ -444,8 +484,7 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - EXPECT_FALSE(callee->media_engine()->GetVoiceReceiveChannel(0)); - EXPECT_FALSE(callee->media_engine()->GetVideoReceiveChannel(0)); + EXPECT_TRUE(callee->pc()->GetTransceivers().empty()); } // Test that removing streams from a subsequent answer causes the send streams @@ -465,14 +504,8 @@ TEST_F(PeerConnectionMediaTestPlanB, EmptyLocalAnswerRemovesSendStreams) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - auto callee_voice_send = callee->media_engine()->GetVoiceSendChannel(0); - auto callee_voice_receive = callee->media_engine()->GetVoiceReceiveChannel(0); - auto callee_video_send = callee->media_engine()->GetVideoSendChannel(0); - auto callee_video_receive = callee->media_engine()->GetVideoReceiveChannel(0); - EXPECT_EQ(0u, callee_voice_send->send_streams().size()); - EXPECT_EQ(1u, callee_voice_receive->recv_streams().size()); - EXPECT_EQ(0u, callee_video_send->send_streams().size()); - EXPECT_EQ(1u, callee_video_receive->recv_streams().size()); + EXPECT_TRUE(callee->pc()->GetSenders().empty()); + EXPECT_EQ(2u, callee->pc()->GetReceivers().size()); } // Test that a new stream in a subsequent offer causes a new receive stream to @@ -489,11 +522,12 @@ TEST_P(PeerConnectionMediaTest, NewStreamInRemoteOfferAddsRecvStreams) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - auto a1 = callee->media_engine()->GetVoiceReceiveChannel(0); - auto a2 = callee->media_engine()->GetVoiceReceiveChannel(1); - auto v1 = callee->media_engine()->GetVideoReceiveChannel(0); - auto v2 = callee->media_engine()->GetVideoReceiveChannel(1); if (IsUnifiedPlan()) { + auto a1 = VoiceMediaReceiveChannel(callee->pc()->GetTransceivers()[0]); + auto a2 = VoiceMediaReceiveChannel(callee->pc()->GetTransceivers()[2]); + auto v1 = VideoMediaReceiveChannel(callee->pc()->GetTransceivers()[1]); + auto v2 = VideoMediaReceiveChannel(callee->pc()->GetTransceivers()[3]); + ASSERT_TRUE(a1); EXPECT_EQ(1u, a1->recv_streams().size()); ASSERT_TRUE(a2); @@ -503,12 +537,7 @@ TEST_P(PeerConnectionMediaTest, NewStreamInRemoteOfferAddsRecvStreams) { ASSERT_TRUE(v2); EXPECT_EQ(1u, v2->recv_streams().size()); } else { - ASSERT_TRUE(a1); - EXPECT_EQ(2u, a1->recv_streams().size()); - ASSERT_FALSE(a2); - ASSERT_TRUE(v1); - EXPECT_EQ(2u, v1->recv_streams().size()); - ASSERT_FALSE(v2); + EXPECT_EQ(4u, callee->pc()->GetReceivers().size()); } } @@ -535,17 +564,18 @@ TEST_P(PeerConnectionMediaTest, NewStreamInLocalAnswerAddsSendStreams) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get(), offer_options, answer_options)); - auto callee_voice = callee->media_engine()->GetVoiceSendChannel(0); - ASSERT_TRUE(callee_voice); - auto callee_video = callee->media_engine()->GetVideoSendChannel(0); - ASSERT_TRUE(callee_video); - if (IsUnifiedPlan()) { + auto callee_voice = + VoiceMediaSendChannel(callee->pc()->GetTransceivers()[0]); + ASSERT_TRUE(callee_voice); + auto callee_video = + VideoMediaSendChannel(callee->pc()->GetTransceivers()[1]); + ASSERT_TRUE(callee_video); + EXPECT_EQ(1u, callee_voice->send_streams().size()); EXPECT_EQ(1u, callee_video->send_streams().size()); } else { - EXPECT_EQ(2u, callee_voice->send_streams().size()); - EXPECT_EQ(2u, callee_video->send_streams().size()); + EXPECT_EQ(4u, callee->pc()->GetSenders().size()); } } @@ -1088,158 +1118,18 @@ INSTANTIATE_TEST_SUITE_P( ChangeMediaTypeAudioToVideo, kMLinesOutOfOrder)))); -// Test that the correct media engine send/recv streams are created when doing -// a series of offer/answers where audio/video are both sent, then audio is -// rejected, then both audio/video sent again. -TEST_P(PeerConnectionMediaTest, TestAVOfferWithAudioOnlyAnswer) { - if (IsUnifiedPlan()) { - // offer_to_receive_ is not implemented when creating answers with Unified - // Plan semantics specified. - return; - } - - RTCOfferAnswerOptions options_reject_video; - options_reject_video.offer_to_receive_audio = - RTCOfferAnswerOptions::kOfferToReceiveMediaTrue; - options_reject_video.offer_to_receive_video = 0; - - auto caller = CreatePeerConnection(); - caller->AddAudioTrack("a"); - caller->AddVideoTrack("v"); - auto callee = CreatePeerConnection(); - - // Caller initially offers to send/recv audio and video. - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - // Callee accepts the audio as recv only but rejects the video. - ASSERT_TRUE(caller->SetRemoteDescription( - callee->CreateAnswerAndSetAsLocal(options_reject_video))); - - auto caller_voice_send = caller->media_engine()->GetVoiceSendChannel(0); - auto caller_voice_receive = caller->media_engine()->GetVoiceReceiveChannel(0); - ASSERT_TRUE(caller_voice_send && caller_voice_receive); - EXPECT_EQ(0u, caller_voice_receive->recv_streams().size()); - EXPECT_EQ(1u, caller_voice_send->send_streams().size()); - auto caller_video = caller->media_engine()->GetVideoSendChannel(0); - EXPECT_FALSE(caller_video); - - // Callee adds its own audio/video stream and offers to receive audio/video - // too. - callee->AddAudioTrack("a"); - auto callee_video_track = callee->AddVideoTrack("v"); - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - ASSERT_TRUE( - caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - - auto callee_voice_send = callee->media_engine()->GetVoiceSendChannel(0); - auto callee_voice_receive = callee->media_engine()->GetVoiceReceiveChannel(0); - ASSERT_TRUE(callee_voice_send && callee_voice_receive); - EXPECT_EQ(1u, callee_voice_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_voice_send->send_streams().size()); - auto callee_video_send = callee->media_engine()->GetVideoSendChannel(0); - auto callee_video_receive = callee->media_engine()->GetVideoReceiveChannel(0); - ASSERT_TRUE(callee_video_send && callee_video_receive); - EXPECT_EQ(1u, callee_video_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_video_send->send_streams().size()); - - // Callee removes video but keeps audio and rejects the video once again. - callee->pc()->RemoveTrackOrError(callee_video_track); - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - ASSERT_TRUE( - callee->SetLocalDescription(callee->CreateAnswer(options_reject_video))); - - callee_voice_send = callee->media_engine()->GetVoiceSendChannel(0); - callee_voice_receive = callee->media_engine()->GetVoiceReceiveChannel(0); - ASSERT_TRUE(callee_voice_send && callee_voice_receive); - EXPECT_EQ(1u, callee_voice_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_voice_send->send_streams().size()); - auto callee_video = callee->media_engine()->GetVideoSendChannel(0); - EXPECT_FALSE(callee_video); -} - -// Test that the correct media engine send/recv streams are created when doing -// a series of offer/answers where audio/video are both sent, then video is -// rejected, then both audio/video sent again. -TEST_P(PeerConnectionMediaTest, TestAVOfferWithVideoOnlyAnswer) { - if (IsUnifiedPlan()) { - // offer_to_receive_ is not implemented when creating answers with Unified - // Plan semantics specified. - return; - } - - // Disable the bundling here. If the media is bundled on audio - // transport, then we can't reject the audio because switching the bundled - // transport is not currently supported. - // (https://bugs.chromium.org/p/webrtc/issues/detail?id=6704) - RTCOfferAnswerOptions options_no_bundle; - options_no_bundle.use_rtp_mux = false; - RTCOfferAnswerOptions options_reject_audio = options_no_bundle; - options_reject_audio.offer_to_receive_audio = 0; - options_reject_audio.offer_to_receive_video = - RTCOfferAnswerOptions::kMaxOfferToReceiveMedia; - - auto caller = CreatePeerConnection(); - caller->AddAudioTrack("a"); - caller->AddVideoTrack("v"); - auto callee = CreatePeerConnection(); - - // Caller initially offers to send/recv audio and video. - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - // Callee accepts the video as recv only but rejects the audio. - ASSERT_TRUE(caller->SetRemoteDescription( - callee->CreateAnswerAndSetAsLocal(options_reject_audio))); - - auto caller_voice = caller->media_engine()->GetVoiceSendChannel(0); - EXPECT_FALSE(caller_voice); - auto caller_video_send = caller->media_engine()->GetVideoSendChannel(0); - auto caller_video_receive = caller->media_engine()->GetVideoReceiveChannel(0); - ASSERT_TRUE(caller_video_send && caller_video_receive); - EXPECT_EQ(0u, caller_video_receive->recv_streams().size()); - EXPECT_EQ(1u, caller_video_send->send_streams().size()); - - // Callee adds its own audio/video stream and offers to receive audio/video - // too. - auto callee_audio_track = callee->AddAudioTrack("a"); - callee->AddVideoTrack("v"); - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - ASSERT_TRUE(caller->SetRemoteDescription( - callee->CreateAnswerAndSetAsLocal(options_no_bundle))); - - auto callee_voice_send = callee->media_engine()->GetVoiceSendChannel(0); - auto callee_voice_receive = callee->media_engine()->GetVoiceReceiveChannel(0); - ASSERT_TRUE(callee_voice_send && callee_voice_receive); - EXPECT_EQ(1u, callee_voice_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_voice_send->send_streams().size()); - auto callee_video_send = callee->media_engine()->GetVideoSendChannel(0); - auto callee_video_receive = callee->media_engine()->GetVideoReceiveChannel(0); - ASSERT_TRUE(callee_video_send && callee_video_receive); - EXPECT_EQ(1u, callee_video_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_video_send->send_streams().size()); - - // Callee removes audio but keeps video and rejects the audio once again. - callee->pc()->RemoveTrackOrError(callee_audio_track); - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - ASSERT_TRUE( - callee->SetLocalDescription(callee->CreateAnswer(options_reject_audio))); - - auto callee_voice = callee->media_engine()->GetVoiceReceiveChannel(0); - EXPECT_FALSE(callee_voice); - callee_video_send = callee->media_engine()->GetVideoSendChannel(0); - callee_video_receive = callee->media_engine()->GetVideoReceiveChannel(0); - ASSERT_TRUE(callee_video_send && callee_video_receive); - EXPECT_EQ(1u, callee_video_receive->recv_streams().size()); - EXPECT_EQ(1u, callee_video_send->send_streams().size()); -} - // Tests that if the underlying video encoder fails to be initialized (signaled // by failing to set send codecs), the PeerConnection signals the error to the // client. -TEST_P(PeerConnectionMediaTest, MediaEngineErrorPropagatedToClients) { +TEST_F(PeerConnectionMediaTestUnifiedPlan, + MediaEngineErrorPropagatedToClients) { auto caller = CreatePeerConnectionWithAudioVideo(); auto callee = CreatePeerConnectionWithAudioVideo(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - auto video_channel = caller->media_engine()->GetVideoSendChannel(0); + auto video_channel = + VideoMediaSendChannel(caller->pc()->GetTransceivers()[1]); video_channel->set_fail_set_send_codecs(true); std::string error; @@ -1255,14 +1145,15 @@ TEST_P(PeerConnectionMediaTest, MediaEngineErrorPropagatedToClients) { // Tests that if the underlying video encoder fails once then subsequent // attempts at setting the local/remote description will also fail, even if // SetSendCodecs no longer fails. -TEST_P(PeerConnectionMediaTest, +TEST_F(PeerConnectionMediaTestUnifiedPlan, FailToApplyDescriptionIfVideoEncoderHasEverFailed) { auto caller = CreatePeerConnectionWithAudioVideo(); auto callee = CreatePeerConnectionWithAudioVideo(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - auto video_channel = caller->media_engine()->GetVideoSendChannel(0); + auto video_channel = + VideoMediaSendChannel(caller->pc()->GetTransceivers()[1]); video_channel->set_fail_set_send_codecs(true); EXPECT_FALSE( diff --git a/third_party/libwebrtc/pc/peer_connection_mediachannel_split_unittest.cc b/third_party/libwebrtc/pc/peer_connection_mediachannel_split_unittest.cc deleted file mode 100644 index 97e5ecdc4041..000000000000 --- a/third_party/libwebrtc/pc/peer_connection_mediachannel_split_unittest.cc +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Integration tests for PeerConnection to exercise the options of -// either splitting or not splitting the MediaChannel object. -// These tests exercise a full stack over a simulated network. - -// TODO(bugs.webrtc.org/13931): Delete these tests when split is landed. - -#include -#include - -#include "absl/types/optional.h" -#include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "api/rtc_error.h" -#include "api/scoped_refptr.h" -#include "api/stats/rtc_stats.h" -#include "api/stats/rtc_stats_report.h" -#include "api/stats/rtcstats_objects.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" -#include "pc/session_description.h" -#include "pc/test/integration_test_helpers.h" -#include "rtc_base/gunit.h" -#include "rtc_base/virtual_socket_server.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { - -class PeerConnectionMediaChannelSplitTest - : public PeerConnectionIntegrationBaseTest, - public ::testing::WithParamInterface { - protected: - PeerConnectionMediaChannelSplitTest() - : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan, - /* field_trials = */ GetParam()) {} -}; - -int NacksReceivedCount(PeerConnectionIntegrationWrapper& pc) { - rtc::scoped_refptr report = pc.NewGetStats(); - auto sender_stats = report->GetStatsOfType(); - if (sender_stats.size() != 1) { - ADD_FAILURE(); - return 0; - } - if (!sender_stats[0]->nack_count.is_defined()) { - return 0; - } - return *sender_stats[0]->nack_count; -} - -int NacksSentCount(PeerConnectionIntegrationWrapper& pc) { - rtc::scoped_refptr report = pc.NewGetStats(); - auto receiver_stats = report->GetStatsOfType(); - if (receiver_stats.size() != 1) { - ADD_FAILURE(); - return 0; - } - if (!receiver_stats[0]->nack_count.is_defined()) { - return 0; - } - return *receiver_stats[0]->nack_count; -} - -// Test disabled because it is flaky. -TEST_P(PeerConnectionMediaChannelSplitTest, - DISABLED_AudioPacketLossCausesNack) { - RTCConfiguration config; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - auto audio_transceiver_or_error = - caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); - ASSERT_TRUE(audio_transceiver_or_error.ok()); - auto send_transceiver = audio_transceiver_or_error.MoveValue(); - // Munge the SDP to include NACK and RRTR on Opus, and remove all other - // codecs. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - cricket::AudioContentDescription* media = - content.media_description()->as_audio(); - std::vector codecs = media->codecs(); - std::vector codecs_out; - for (cricket::AudioCodec codec : codecs) { - if (codec.name == "opus") { - codec.AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); - codec.AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamRrtr, cricket::kParamValueEmpty)); - codecs_out.push_back(codec); - } - } - EXPECT_FALSE(codecs_out.empty()); - media->set_codecs(codecs_out); - } - }); - - caller()->CreateAndSetAndSignalOffer(); - // Check for failure in helpers - ASSERT_FALSE(HasFailure()); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeAudio(1); - ExpectNewFrames(media_expectations); - ASSERT_FALSE(HasFailure()); - - virtual_socket_server()->set_drop_probability(0.2); - - // Wait until callee has sent at least one NACK. - // Note that due to stats caching, this might only be visible 50 ms - // after the nack was in fact sent. - EXPECT_TRUE_WAIT(NacksSentCount(*callee()) > 0, kDefaultTimeout); - ASSERT_FALSE(HasFailure()); - - virtual_socket_server()->set_drop_probability(0.0); - // Wait until caller has received at least one NACK - EXPECT_TRUE_WAIT(NacksReceivedCount(*caller()) > 0, kDefaultTimeout); -} - -TEST_P(PeerConnectionMediaChannelSplitTest, VideoPacketLossCausesNack) { - RTCConfiguration config; - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - ConnectFakeSignaling(); - auto video_transceiver_or_error = - caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); - ASSERT_TRUE(video_transceiver_or_error.ok()); - auto send_transceiver = video_transceiver_or_error.MoveValue(); - // Munge the SDP to include NACK and RRTR on VP8, and remove all other - // codecs. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - cricket::VideoContentDescription* media = - content.media_description()->as_video(); - std::vector codecs = media->codecs(); - std::vector codecs_out; - for (cricket::VideoCodec codec : codecs) { - if (codec.name == "VP8") { - ASSERT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty))); - codecs_out.push_back(codec); - } - } - EXPECT_FALSE(codecs_out.empty()); - media->set_codecs(codecs_out); - } - }); - - caller()->CreateAndSetAndSignalOffer(); - // Check for failure in helpers - ASSERT_FALSE(HasFailure()); - MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeVideo(1); - ExpectNewFrames(media_expectations); - ASSERT_FALSE(HasFailure()); - - virtual_socket_server()->set_drop_probability(0.2); - - // Wait until callee has sent at least one NACK. - // Note that due to stats caching, this might only be visible 50 ms - // after the nack was in fact sent. - EXPECT_TRUE_WAIT(NacksSentCount(*callee()) > 0, kDefaultTimeout); - ASSERT_FALSE(HasFailure()); - - // Wait until caller has received at least one NACK - EXPECT_TRUE_WAIT(NacksReceivedCount(*caller()) > 0, kDefaultTimeout); -} - -// Test that we can get capture start ntp time. -TEST_P(PeerConnectionMediaChannelSplitTest, - GetCaptureStartNtpTimeWithOldStatsApi) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - caller()->AddAudioTrack(); - - callee()->AddAudioTrack(); - - // Do offer/answer, wait for the callee to receive some frames. - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - - // Get the remote audio track created on the receiver, so they can be used as - // GetStats filters. - auto receivers = callee()->pc()->GetReceivers(); - ASSERT_EQ(1u, receivers.size()); - auto remote_audio_track = receivers[0]->track(); - - // Get the audio output level stats. Note that the level is not available - // until an RTCP packet has been received. - EXPECT_TRUE_WAIT(callee()->OldGetStatsForTrack(remote_audio_track.get()) - ->CaptureStartNtpTime() > 0, - 2 * kMaxWaitForFramesMs); -} - -INSTANTIATE_TEST_SUITE_P(PeerConnectionMediaChannelSplitTest, - PeerConnectionMediaChannelSplitTest, - Values("WebRTC-SplitMediaChannel/Disabled/", - "WebRTC-SplitMediaChannel/Enabled/")); - -} // namespace - -} // namespace webrtc diff --git a/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc b/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc index 1adb8a52e080..6b6a96c4739b 100644 --- a/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc +++ b/third_party/libwebrtc/pc/peer_connection_simulcast_unittest.cc @@ -221,15 +221,6 @@ class PeerConnectionSimulcastMetricsTests public ::testing::WithParamInterface { protected: PeerConnectionSimulcastMetricsTests() { webrtc::metrics::Reset(); } - - std::map LocalDescriptionSamples() { - return metrics::Samples( - "WebRTC.PeerConnection.Simulcast.ApplyLocalDescription"); - } - std::map RemoteDescriptionSamples() { - return metrics::Samples( - "WebRTC.PeerConnection.Simulcast.ApplyRemoteDescription"); - } }; #endif @@ -614,156 +605,6 @@ TEST_F(PeerConnectionSimulcastTests, SimulcastSldModificationRejected) { } #if RTC_METRICS_ENABLED -// -// Checks the logged metrics when simulcast is not used. -TEST_F(PeerConnectionSimulcastMetricsTests, NoSimulcastUsageIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = ::CreateLayers(0, true); - AddTransceiver(local.get(), layers); - ExchangeOfferAnswer(local.get(), remote.get(), layers); - - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 2))); - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 2))); -} - -// Checks the logged metrics when spec-compliant simulcast is used. -TEST_F(PeerConnectionSimulcastMetricsTests, SpecComplianceIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = ::CreateLayers(3, true); - AddTransceiver(local.get(), layers); - ExchangeOfferAnswer(local.get(), remote.get(), layers); - - // Expecting 2 invocations of each, because we have 2 peer connections. - // Only the local peer connection will be aware of simulcast. - // The remote peer connection will think that there is no simulcast. - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 1), - Pair(kSimulcastApiVersionSpecCompliant, 1))); - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 1), - Pair(kSimulcastApiVersionSpecCompliant, 1))); -} - -// Checks the logged metrics when and incoming request to send spec-compliant -// simulcast is received from the remote party. -TEST_F(PeerConnectionSimulcastMetricsTests, IncomingSimulcastIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = ::CreateLayers(3, true); - AddTransceiver(local.get(), layers); - auto offer = local->CreateOfferAndSetAsLocal(); - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionSpecCompliant, 1))); - - // Remove simulcast as a sender and set it up as a receiver. - RemoveSimulcast(offer.get()); - AddRequestToReceiveSimulcast(layers, offer.get()); - std::string error; - EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error; - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionSpecCompliant, 1))); - - auto transceiver = remote->pc()->GetTransceivers()[0]; - transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); - EXPECT_TRUE(remote->CreateAnswerAndSetAsLocal()); - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionSpecCompliant, 2))); -} - -// Checks that a spec-compliant simulcast offer that is rejected is logged. -TEST_F(PeerConnectionSimulcastMetricsTests, RejectedSimulcastIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = CreateLayers({"1", "2", "3"}, true); - AddTransceiver(local.get(), layers); - auto offer = local->CreateOfferAndSetAsLocal(); - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionSpecCompliant, 1))); - RemoveSimulcast(offer.get()); - std::string error; - EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error; - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 1))); - - auto answer = remote->CreateAnswerAndSetAsLocal(); - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 1), - Pair(kSimulcastApiVersionSpecCompliant, 1))); - EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &error)) << error; - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 2))); -} - -// Checks the logged metrics when legacy munging simulcast technique is used. -TEST_F(PeerConnectionSimulcastMetricsTests, LegacySimulcastIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = ::CreateLayers(0, true); - AddTransceiver(local.get(), layers); - auto offer = local->CreateOffer(); - // Munge the SDP to set up legacy simulcast. - const std::string end_line = "\r\n"; - std::string sdp; - offer->ToString(&sdp); - rtc::StringBuilder builder(sdp); - builder << "a=ssrc:1111 cname:slimshady" << end_line; - builder << "a=ssrc:2222 cname:slimshady" << end_line; - builder << "a=ssrc:3333 cname:slimshady" << end_line; - builder << "a=ssrc-group:SIM 1111 2222 3333" << end_line; - - SdpParseError parse_error; - auto sd = - CreateSessionDescription(SdpType::kOffer, builder.str(), &parse_error); - ASSERT_TRUE(sd) << parse_error.line << parse_error.description; - std::string error; - EXPECT_TRUE(local->SetLocalDescription(std::move(sd), &error)) << error; - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionLegacy, 1))); - EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error; - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 1))); - auto answer = remote->CreateAnswerAndSetAsLocal(); - EXPECT_THAT(LocalDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 1), - Pair(kSimulcastApiVersionLegacy, 1))); - // Legacy simulcast is not signaled in remote description. - EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &error)) << error; - EXPECT_THAT(RemoteDescriptionSamples(), - ElementsAre(Pair(kSimulcastApiVersionNone, 2))); -} - -// Checks that disabling simulcast is logged in the metrics. -TEST_F(PeerConnectionSimulcastMetricsTests, SimulcastDisabledIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = CreateLayers({"1", "2", "3"}, true); - AddTransceiver(local.get(), layers); - auto offer = local->CreateOfferAndSetAsLocal(); - RemoveSimulcast(offer.get()); - std::string error; - EXPECT_TRUE(remote->SetRemoteDescription(std::move(offer), &error)) << error; - auto answer = remote->CreateAnswerAndSetAsLocal(); - EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &error)) << error; - - EXPECT_EQ(1, metrics::NumSamples("WebRTC.PeerConnection.Simulcast.Disabled")); - EXPECT_EQ(1, - metrics::NumEvents("WebRTC.PeerConnection.Simulcast.Disabled", 1)); -} - -// Checks that the disabled metric is not logged if simulcast is not disabled. -TEST_F(PeerConnectionSimulcastMetricsTests, SimulcastDisabledIsNotLogged) { - auto local = CreatePeerConnectionWrapper(); - auto remote = CreatePeerConnectionWrapper(); - auto layers = CreateLayers({"1", "2", "3"}, true); - AddTransceiver(local.get(), layers); - ExchangeOfferAnswer(local.get(), remote.get(), layers); - - EXPECT_EQ(0, metrics::NumSamples("WebRTC.PeerConnection.Simulcast.Disabled")); -} const int kMaxLayersInMetricsTest = 8; diff --git a/third_party/libwebrtc/pc/rtc_stats_collector.cc b/third_party/libwebrtc/pc/rtc_stats_collector.cc index 8cba02b5cedd..c191f4d65e38 100644 --- a/third_party/libwebrtc/pc/rtc_stats_collector.cc +++ b/third_party/libwebrtc/pc/rtc_stats_collector.cc @@ -425,6 +425,17 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( if (media_receiver_info.nacks_sent.has_value()) { inbound_stats->nack_count = *media_receiver_info.nacks_sent; } + if (media_receiver_info.fec_packets_received.has_value()) { + inbound_stats->fec_packets_received = + *media_receiver_info.fec_packets_received; + } + if (media_receiver_info.fec_packets_discarded.has_value()) { + inbound_stats->fec_packets_discarded = + *media_receiver_info.fec_packets_discarded; + } + if (media_receiver_info.fec_bytes_received.has_value()) { + inbound_stats->fec_bytes_received = *media_receiver_info.fec_bytes_received; + } } std::unique_ptr CreateInboundAudioStreamStats( @@ -483,10 +494,6 @@ std::unique_ptr CreateInboundAudioStreamStats( inbound_audio->estimated_playout_timestamp = static_cast( *voice_receiver_info.estimated_playout_ntp_timestamp_ms); } - inbound_audio->fec_packets_received = - voice_receiver_info.fec_packets_received; - inbound_audio->fec_packets_discarded = - voice_receiver_info.fec_packets_discarded; inbound_audio->packets_discarded = voice_receiver_info.packets_discarded; inbound_audio->jitter_buffer_flushes = voice_receiver_info.jitter_buffer_flushes; @@ -660,16 +667,28 @@ CreateInboundRTPStreamStatsFromVideoReceiverInfo( } // TODO(bugs.webrtc.org/10529): When info's `content_info` is optional // support the "unspecified" value. - if (video_receiver_info.content_type == VideoContentType::SCREENSHARE) + if (videocontenttypehelpers::IsScreenshare(video_receiver_info.content_type)) inbound_video->content_type = "screenshare"; - if (!video_receiver_info.decoder_implementation_name.empty()) { + if (video_receiver_info.decoder_implementation_name.has_value()) { inbound_video->decoder_implementation = - video_receiver_info.decoder_implementation_name; + *video_receiver_info.decoder_implementation_name; } if (video_receiver_info.power_efficient_decoder.has_value()) { inbound_video->power_efficient_decoder = *video_receiver_info.power_efficient_decoder; } + for (const auto& ssrc_group : video_receiver_info.ssrc_groups) { + if (ssrc_group.semantics == cricket::kFidSsrcGroupSemantics && + ssrc_group.ssrcs.size() == 2) { + inbound_video->rtx_ssrc = ssrc_group.ssrcs[1]; + } else if (ssrc_group.semantics == cricket::kFecFrSsrcGroupSemantics && + ssrc_group.ssrcs.size() == 2) { + // TODO(bugs.webrtc.org/15002): the ssrc-group might be >= 2 with + // multistream support. + inbound_video->fec_ssrc = ssrc_group.ssrcs[1]; + } + } + return inbound_video; } @@ -801,11 +820,11 @@ CreateOutboundRTPStreamStatsFromVideoSenderInfo( video_sender_info.quality_limitation_resolution_changes; // TODO(https://crbug.com/webrtc/10529): When info's `content_info` is // optional, support the "unspecified" value. - if (video_sender_info.content_type == VideoContentType::SCREENSHARE) + if (videocontenttypehelpers::IsScreenshare(video_sender_info.content_type)) outbound_video->content_type = "screenshare"; - if (!video_sender_info.encoder_implementation_name.empty()) { + if (video_sender_info.encoder_implementation_name.has_value()) { outbound_video->encoder_implementation = - video_sender_info.encoder_implementation_name; + *video_sender_info.encoder_implementation_name; } if (video_sender_info.rid.has_value()) { outbound_video->rid = *video_sender_info.rid; @@ -818,6 +837,12 @@ CreateOutboundRTPStreamStatsFromVideoSenderInfo( outbound_video->scalability_mode = std::string( ScalabilityModeToString(*video_sender_info.scalability_mode)); } + for (const auto& ssrc_group : video_sender_info.ssrc_groups) { + if (ssrc_group.semantics == cricket::kFidSsrcGroupSemantics && + ssrc_group.ssrcs.size() == 2) { + outbound_video->rtx_ssrc = ssrc_group.ssrcs[1]; + } + } return outbound_video; } @@ -1394,7 +1419,11 @@ void RTCStatsCollector::ProduceDataChannelStats_n( "D" + rtc::ToString(stats.internal_id), timestamp); data_channel_stats->label = std::move(stats.label); data_channel_stats->protocol = std::move(stats.protocol); - data_channel_stats->data_channel_identifier = stats.id; + if (stats.id >= 0) { + // Do not set this value before the DTLS handshake is finished + // and filter out the magic value -1. + data_channel_stats->data_channel_identifier = stats.id; + } data_channel_stats->state = DataStateToRTCDataChannelState(stats.state); data_channel_stats->messages_sent = stats.messages_sent; data_channel_stats->bytes_sent = stats.bytes_sent; diff --git a/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc b/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc index a9f326f96a54..37821ac82903 100644 --- a/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc +++ b/third_party/libwebrtc/pc/rtc_stats_collector_unittest.cc @@ -1549,6 +1549,10 @@ TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { } TEST_F(RTCStatsCollectorTest, CollectTwoRTCDataChannelStatsWithPendingId) { + // Note: The test assumes data channel IDs are predictable. + // This is not a safe assumption, but in order to make it work for + // the test, we reset the ID allocator at test start. + SctpDataChannel::ResetInternalIdAllocatorForTesting(-1); pc_->AddSctpDataChannel(rtc::make_ref_counted( data_channel_controller_->weak_ptr(), /*id=*/-1, DataChannelInterface::kConnecting)); @@ -1557,6 +1561,20 @@ TEST_F(RTCStatsCollectorTest, CollectTwoRTCDataChannelStatsWithPendingId) { DataChannelInterface::kConnecting)); rtc::scoped_refptr report = stats_->GetStatsReport(); + RTCDataChannelStats expected_data_channel0("D0", Timestamp::Zero()); + // Default values from MockDataChannel. + expected_data_channel0.label = "MockSctpDataChannel"; + expected_data_channel0.protocol = "someProtocol"; + expected_data_channel0.state = "connecting"; + expected_data_channel0.messages_sent = 0; + expected_data_channel0.bytes_sent = 0; + expected_data_channel0.messages_received = 0; + expected_data_channel0.bytes_received = 0; + + ASSERT_TRUE(report->Get(expected_data_channel0.id())); + EXPECT_EQ( + expected_data_channel0, + report->Get(expected_data_channel0.id())->cast_to()); } TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { @@ -1566,10 +1584,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { SctpDataChannel::ResetInternalIdAllocatorForTesting(-1); pc_->AddSctpDataChannel(rtc::make_ref_counted( data_channel_controller_->weak_ptr(), 0, "MockSctpDataChannel0", - DataChannelInterface::kConnecting, "udp", 1, 2, 3, 4)); + DataChannelInterface::kConnecting, "proto1", 1, 2, 3, 4)); RTCDataChannelStats expected_data_channel0("D0", Timestamp::Zero()); expected_data_channel0.label = "MockSctpDataChannel0"; - expected_data_channel0.protocol = "udp"; + expected_data_channel0.protocol = "proto1"; expected_data_channel0.data_channel_identifier = 0; expected_data_channel0.state = "connecting"; expected_data_channel0.messages_sent = 1; @@ -1579,10 +1597,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { pc_->AddSctpDataChannel(rtc::make_ref_counted( data_channel_controller_->weak_ptr(), 1, "MockSctpDataChannel1", - DataChannelInterface::kOpen, "tcp", 5, 6, 7, 8)); + DataChannelInterface::kOpen, "proto2", 5, 6, 7, 8)); RTCDataChannelStats expected_data_channel1("D1", Timestamp::Zero()); expected_data_channel1.label = "MockSctpDataChannel1"; - expected_data_channel1.protocol = "tcp"; + expected_data_channel1.protocol = "proto2"; expected_data_channel1.data_channel_identifier = 1; expected_data_channel1.state = "open"; expected_data_channel1.messages_sent = 5; @@ -1592,10 +1610,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { pc_->AddSctpDataChannel(rtc::make_ref_counted( data_channel_controller_->weak_ptr(), 2, "MockSctpDataChannel2", - DataChannelInterface::kClosing, "udp", 9, 10, 11, 12)); + DataChannelInterface::kClosing, "proto1", 9, 10, 11, 12)); RTCDataChannelStats expected_data_channel2("D2", Timestamp::Zero()); expected_data_channel2.label = "MockSctpDataChannel2"; - expected_data_channel2.protocol = "udp"; + expected_data_channel2.protocol = "proto1"; expected_data_channel2.data_channel_identifier = 2; expected_data_channel2.state = "closing"; expected_data_channel2.messages_sent = 9; @@ -1605,10 +1623,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { pc_->AddSctpDataChannel(rtc::make_ref_counted( data_channel_controller_->weak_ptr(), 3, "MockSctpDataChannel3", - DataChannelInterface::kClosed, "tcp", 13, 14, 15, 16)); + DataChannelInterface::kClosed, "proto3", 13, 14, 15, 16)); RTCDataChannelStats expected_data_channel3("D3", Timestamp::Zero()); expected_data_channel3.label = "MockSctpDataChannel3"; - expected_data_channel3.protocol = "tcp"; + expected_data_channel3.protocol = "proto3"; expected_data_channel3.data_channel_identifier = 3; expected_data_channel3.state = "closed"; expected_data_channel3.messages_sent = 13; @@ -2343,11 +2361,17 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { video_media_info.receivers[0].content_type = VideoContentType::UNSPECIFIED; video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = absl::nullopt; - video_media_info.receivers[0].decoder_implementation_name = ""; + video_media_info.receivers[0].decoder_implementation_name = absl::nullopt; video_media_info.receivers[0].min_playout_delay_ms = 50; video_media_info.receivers[0].power_efficient_decoder = false; video_media_info.receivers[0].retransmitted_packets_received = 17; video_media_info.receivers[0].retransmitted_bytes_received = 62; + video_media_info.receivers[0].fec_packets_received = 32; + video_media_info.receivers[0].fec_bytes_received = 54; + video_media_info.receivers[0].ssrc_groups.push_back( + {cricket::kFidSsrcGroupSemantics, {1, 4404}}); + video_media_info.receivers[0].ssrc_groups.push_back( + {cricket::kFecFrSsrcGroupSemantics, {1, 5505}}); // Note: these two values intentionally differ, // only the decoded one should show up. @@ -2412,6 +2436,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { expected_video.power_efficient_decoder = false; expected_video.retransmitted_packets_received = 17; expected_video.retransmitted_bytes_received = 62; + expected_video.fec_packets_received = 32; + expected_video.fec_bytes_received = 54; + expected_video.rtx_ssrc = 4404; + expected_video.fec_ssrc = 5505; ASSERT_TRUE(report->Get(expected_video.id())); EXPECT_EQ( @@ -2598,7 +2626,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) { video_media_info.senders[0].quality_limitation_resolution_changes = 56u; video_media_info.senders[0].qp_sum = absl::nullopt; video_media_info.senders[0].content_type = VideoContentType::UNSPECIFIED; - video_media_info.senders[0].encoder_implementation_name = ""; + video_media_info.senders[0].encoder_implementation_name = absl::nullopt; video_media_info.senders[0].power_efficient_encoder = false; video_media_info.senders[0].send_frame_width = 200; video_media_info.senders[0].send_frame_height = 100; @@ -2607,6 +2635,8 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) { video_media_info.senders[0].huge_frames_sent = 2; video_media_info.senders[0].active = false; video_media_info.senders[0].scalability_mode = ScalabilityMode::kL3T3_KEY; + video_media_info.senders[0].ssrc_groups.push_back( + {cricket::kFidSsrcGroupSemantics, {1, 4404}}); video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; @@ -2662,6 +2692,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) { expected_video.active = false; expected_video.power_efficient_encoder = false; expected_video.scalability_mode = "L3T3_KEY"; + expected_video.rtx_ssrc = 4404; // `expected_video.content_type` should be undefined. // `expected_video.qp_sum` should be undefined. // `expected_video.encoder_implementation` should be undefined. diff --git a/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc b/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc index 12a0063940db..648efab69a54 100644 --- a/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc +++ b/third_party/libwebrtc/pc/rtc_stats_integrationtest.cc @@ -608,10 +608,16 @@ class RTCStatsReportVerifier { inbound_stream.fec_packets_received); verifier.TestMemberIsNonNegative( inbound_stream.fec_packets_discarded); + verifier.TestMemberIsUndefined(inbound_stream.fec_bytes_received); } else { verifier.TestMemberIsUndefined(inbound_stream.packets_discarded); + // FEC stats are only present when FlexFEC was negotiated which is guarded + // by the WebRTC-FlexFEC-03-Advertised/Enabled/ field trial and off by + // default. + verifier.TestMemberIsUndefined(inbound_stream.fec_bytes_received); verifier.TestMemberIsUndefined(inbound_stream.fec_packets_received); verifier.TestMemberIsUndefined(inbound_stream.fec_packets_discarded); + verifier.TestMemberIsUndefined(inbound_stream.fec_ssrc); } verifier.TestMemberIsNonNegative(inbound_stream.bytes_received); verifier.TestMemberIsNonNegative( @@ -688,11 +694,14 @@ class RTCStatsReportVerifier { inbound_stream.retransmitted_packets_received); verifier.TestMemberIsNonNegative( inbound_stream.retransmitted_bytes_received); + verifier.TestMemberIsNonNegative(inbound_stream.rtx_ssrc); } else { verifier.TestMemberIsUndefined( inbound_stream.retransmitted_packets_received); verifier.TestMemberIsUndefined( inbound_stream.retransmitted_bytes_received); + verifier.TestMemberIsUndefined(inbound_stream.rtx_ssrc); + verifier.TestMemberIsUndefined(inbound_stream.fec_ssrc); } // Test runtime too short to get an estimate (at least two RTCP sender @@ -844,6 +853,7 @@ class RTCStatsReportVerifier { outbound_stream.huge_frames_sent); verifier.MarkMemberTested(outbound_stream.rid, true); verifier.TestMemberIsDefined(outbound_stream.scalability_mode); + verifier.TestMemberIsNonNegative(outbound_stream.rtx_ssrc); } else { verifier.TestMemberIsUndefined(outbound_stream.frames_encoded); verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded); @@ -866,6 +876,7 @@ class RTCStatsReportVerifier { verifier.TestMemberIsUndefined(outbound_stream.frames_sent); verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent); verifier.TestMemberIsUndefined(outbound_stream.scalability_mode); + verifier.TestMemberIsUndefined(outbound_stream.rtx_ssrc); } return verifier.ExpectAllMembersSuccessfullyTested(); } diff --git a/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc b/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc index 15529827a74d..18a5505f39d2 100644 --- a/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc +++ b/third_party/libwebrtc/pc/rtp_sender_receiver_unittest.cc @@ -37,7 +37,6 @@ #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" #include "media/base/codec.h" -#include "media/base/delayable.h" #include "media/base/fake_media_engine.h" #include "media/base/media_channel.h" #include "media/base/media_config.h" @@ -118,39 +117,18 @@ class RtpSenderReceiverTest // Create the channels, discard the result; we get them later. // Fake media channels are owned by the media engine. - media_engine_->voice().CreateMediaChannel( - cricket::MediaChannel::Role::kSend, &fake_call_, cricket::MediaConfig(), - cricket::AudioOptions(), webrtc::CryptoOptions(), - webrtc::AudioCodecPairId::Create()); - media_engine_->video().CreateMediaChannel( - cricket::MediaChannel::Role::kSend, &fake_call_, cricket::MediaConfig(), - cricket::VideoOptions(), webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get()); - media_engine_->voice().CreateMediaChannel( - cricket::MediaChannel::Role::kReceive, &fake_call_, - cricket::MediaConfig(), cricket::AudioOptions(), + voice_media_send_channel_ = media_engine_->voice().CreateSendChannel( + &fake_call_, cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); - media_engine_->video().CreateMediaChannel( - cricket::MediaChannel::Role::kReceive, &fake_call_, - cricket::MediaConfig(), cricket::VideoOptions(), + video_media_send_channel_ = media_engine_->video().CreateSendChannel( + &fake_call_, cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); - - voice_media_send_channel_ = - absl::WrapUnique(media_engine_->GetVoiceSendChannel(0)); - video_media_send_channel_ = - absl::WrapUnique(media_engine_->GetVideoSendChannel(0)); - voice_media_receive_channel_ = - absl::WrapUnique(media_engine_->GetVoiceReceiveChannel(0)); - video_media_receive_channel_ = - absl::WrapUnique(media_engine_->GetVideoReceiveChannel(0)); - - RTC_CHECK(voice_media_send_channel()); - RTC_CHECK(video_media_send_channel()); - RTC_CHECK(voice_media_receive_channel()); - RTC_CHECK(video_media_receive_channel()); - // Create sender channel objects - voice_send_channel_ = voice_media_send_channel()->AsVoiceSendChannel(); - video_send_channel_ = video_media_send_channel()->AsVideoSendChannel(); + voice_media_receive_channel_ = media_engine_->voice().CreateReceiveChannel( + &fake_call_, cricket::MediaConfig(), cricket::AudioOptions(), + webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + video_media_receive_channel_ = media_engine_->video().CreateReceiveChannel( + &fake_call_, cricket::MediaConfig(), cricket::VideoOptions(), + webrtc::CryptoOptions()); // Create streams for predefined SSRCs. Streams need to exist in order // for the senders and receievers to apply parameters to them. @@ -194,7 +172,7 @@ class RtpSenderReceiverTest // Needed to use DTMF sender. void AddDtmfCodec() { - cricket::AudioSendParameters params; + cricket::AudioSenderParameter params; const cricket::AudioCodec kTelephoneEventCodec = cricket::CreateAudioCodec(106, "telephone-event", 8000, 1); params.codecs.push_back(kTelephoneEventCodec); @@ -225,7 +203,7 @@ class RtpSenderReceiverTest ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); audio_rtp_sender_->SetStreams({local_stream_->id()}); - audio_rtp_sender_->SetMediaChannel(voice_send_channel_); + audio_rtp_sender_->SetMediaChannel(voice_media_send_channel_.get()); audio_rtp_sender_->SetSsrc(kAudioSsrc); VerifyVoiceChannelInput(); } @@ -233,8 +211,7 @@ class RtpSenderReceiverTest void CreateAudioRtpSenderWithNoTrack() { audio_rtp_sender_ = AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); - audio_rtp_sender_->SetMediaChannel( - voice_media_send_channel()->AsVoiceSendChannel()); + audio_rtp_sender_->SetMediaChannel(voice_media_send_channel_.get()); } void CreateVideoRtpSender(uint32_t ssrc) { @@ -286,16 +263,14 @@ class RtpSenderReceiverTest ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); - video_rtp_sender_->SetMediaChannel( - video_media_send_channel()->AsVideoSendChannel()); + video_rtp_sender_->SetMediaChannel(video_media_send_channel()); video_rtp_sender_->SetSsrc(ssrc); VerifyVideoChannelInput(ssrc); } void CreateVideoRtpSenderWithNoTrack() { video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); - video_rtp_sender_->SetMediaChannel( - video_media_send_channel()->AsVideoSendChannel()); + video_rtp_sender_->SetMediaChannel(video_media_send_channel()); } void DestroyAudioRtpSender() { @@ -313,8 +288,7 @@ class RtpSenderReceiverTest audio_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kAudioTrackId, streams, /*is_unified_plan=*/true); - audio_rtp_receiver_->SetMediaChannel( - voice_media_receive_channel()->AsVoiceReceiveChannel()); + audio_rtp_receiver_->SetMediaChannel(voice_media_receive_channel()); audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc); audio_track_ = audio_rtp_receiver_->audio_track(); VerifyVoiceChannelOutput(); @@ -324,8 +298,7 @@ class RtpSenderReceiverTest std::vector> streams = {}) { video_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kVideoTrackId, streams); - video_rtp_receiver_->SetMediaChannel( - video_media_receive_channel()->AsVideoReceiveChannel()); + video_rtp_receiver_->SetMediaChannel(video_media_receive_channel()); video_rtp_receiver_->SetupMediaChannel(kVideoSsrc); video_track_ = video_rtp_receiver_->video_track(); VerifyVideoChannelOutput(); @@ -345,8 +318,7 @@ class RtpSenderReceiverTest video_rtp_receiver_ = rtc::make_ref_counted( rtc::Thread::Current(), kVideoTrackId, streams); - video_rtp_receiver_->SetMediaChannel( - video_media_receive_channel()->AsVideoReceiveChannel()); + video_rtp_receiver_->SetMediaChannel(video_media_receive_channel()); video_rtp_receiver_->SetupMediaChannel(primary_ssrc); video_track_ = video_rtp_receiver_->video_track(); } @@ -381,7 +353,7 @@ class RtpSenderReceiverTest void VerifyVideoChannelInput(uint32_t ssrc) { // Verify that the media channel has a video source, - EXPECT_TRUE(video_media_send_channel_->HasSource(ssrc)); + EXPECT_TRUE(video_media_send_channel()->HasSource(ssrc)); } void VerifyVoiceChannelNoInput() { VerifyVoiceChannelNoInput(kAudioSsrc); } @@ -395,7 +367,7 @@ class RtpSenderReceiverTest void VerifyVideoChannelNoInput(uint32_t ssrc) { // Verify that the media channel's source is reset. - EXPECT_FALSE(video_media_receive_channel_->HasSource(ssrc)); + EXPECT_FALSE(video_media_receive_channel()->HasSource(ssrc)); } void VerifyVoiceChannelOutput() { @@ -408,7 +380,7 @@ class RtpSenderReceiverTest void VerifyVideoChannelOutput() { // Verify that the media channel has a sink. - EXPECT_TRUE(video_media_receive_channel_->HasSink(kVideoSsrc)); + EXPECT_TRUE(video_media_receive_channel()->HasSink(kVideoSsrc)); } void VerifyVoiceChannelNoOutput() { @@ -421,7 +393,7 @@ class RtpSenderReceiverTest void VerifyVideoChannelNoOutput() { // Verify that the media channel's sink is reset. - EXPECT_FALSE(video_media_receive_channel_->HasSink(kVideoSsrc)); + EXPECT_FALSE(video_media_receive_channel()->HasSink(kVideoSsrc)); } // Verifies that the encoding layers contain the specified RIDs. @@ -512,9 +484,10 @@ class RtpSenderReceiverTest // Check that minimum Jitter Buffer delay is propagated to the underlying // `media_channel`. - void VerifyRtpReceiverDelayBehaviour(cricket::Delayable* media_channel, - RtpReceiverInterface* receiver, - uint32_t ssrc) { + void VerifyRtpReceiverDelayBehaviour( + cricket::MediaReceiveChannelInterface* media_channel, + RtpReceiverInterface* receiver, + uint32_t ssrc) { receiver->SetJitterBufferMinimumDelay(/*delay_seconds=*/0.5); absl::optional delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc); // In milliseconds. @@ -522,17 +495,21 @@ class RtpSenderReceiverTest } protected: - cricket::FakeVideoMediaChannel* video_media_send_channel() { - return video_media_send_channel_.get(); + cricket::FakeVideoMediaSendChannel* video_media_send_channel() { + return static_cast( + video_media_send_channel_.get()); } - cricket::FakeVoiceMediaChannel* voice_media_send_channel() { - return voice_media_send_channel_.get(); + cricket::FakeVoiceMediaSendChannel* voice_media_send_channel() { + return static_cast( + voice_media_send_channel_.get()); } - cricket::FakeVideoMediaChannel* video_media_receive_channel() { - return video_media_receive_channel_.get(); + cricket::FakeVideoMediaReceiveChannel* video_media_receive_channel() { + return static_cast( + video_media_receive_channel_.get()); } - cricket::FakeVoiceMediaChannel* voice_media_receive_channel() { - return voice_media_receive_channel_.get(); + cricket::FakeVoiceMediaReceiveChannel* voice_media_receive_channel() { + return static_cast( + voice_media_receive_channel_.get()); } test::RunLoop run_loop_; @@ -548,12 +525,14 @@ class RtpSenderReceiverTest std::unique_ptr media_engine_; rtc::UniqueRandomIdGenerator ssrc_generator_; cricket::FakeCall fake_call_; - std::unique_ptr voice_media_send_channel_; - std::unique_ptr video_media_send_channel_; - std::unique_ptr voice_media_receive_channel_; - std::unique_ptr video_media_receive_channel_; - cricket::VoiceMediaSendChannelInterface* voice_send_channel_; - cricket::VideoMediaSendChannelInterface* video_send_channel_; + std::unique_ptr + voice_media_send_channel_; + std::unique_ptr + video_media_send_channel_; + std::unique_ptr + voice_media_receive_channel_; + std::unique_ptr + video_media_receive_channel_; rtc::scoped_refptr audio_rtp_sender_; rtc::scoped_refptr video_rtp_sender_; rtc::scoped_refptr audio_rtp_receiver_; diff --git a/third_party/libwebrtc/pc/rtp_transceiver.cc b/third_party/libwebrtc/pc/rtp_transceiver.cc index d1bf578db8b5..e705b0f3721a 100644 --- a/third_party/libwebrtc/pc/rtp_transceiver.cc +++ b/third_party/libwebrtc/pc/rtp_transceiver.cc @@ -25,6 +25,7 @@ #include "api/rtp_parameters.h" #include "api/sequence_checker.h" #include "media/base/codec.h" +#include "media/base/media_channel.h" #include "media/base/media_channel_impl.h" #include "media/base/media_constants.h" #include "media/base/media_engine.h" @@ -37,10 +38,11 @@ namespace webrtc { namespace { -template -RTCError VerifyCodecPreferences(const std::vector& codecs, - const std::vector& send_codecs, - const std::vector& recv_codecs) { + +RTCError VerifyCodecPreferences( + const std::vector& codecs, + const std::vector& send_codecs, + const std::vector& recv_codecs) { // If the intersection between codecs and // RTCRtpSender.getCapabilities(kind).codecs or the intersection between // codecs and RTCRtpReceiver.getCapabilities(kind).codecs only contains RTX, @@ -52,26 +54,28 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, return codec.name != cricket::kRtxCodecName && codec.name != cricket::kRedCodecName && codec.name != cricket::kFlexfecCodecName && - absl::c_any_of(recv_codecs, [&codec](const T& recv_codec) { - return recv_codec.MatchesCapability(codec); - }); + absl::c_any_of(recv_codecs, + [&codec](const cricket::Codec& recv_codec) { + return recv_codec.MatchesCapability(codec); + }); })) { - return RTCError(RTCErrorType::INVALID_MODIFICATION, - "Invalid codec preferences: Missing codec from recv " - "codec capabilities."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Invalid codec preferences: Missing codec from recv " + "codec capabilities."); } if (!absl::c_any_of(codecs, [&send_codecs](const RtpCodecCapability& codec) { return codec.name != cricket::kRtxCodecName && codec.name != cricket::kRedCodecName && codec.name != cricket::kFlexfecCodecName && - absl::c_any_of(send_codecs, [&codec](const T& send_codec) { - return send_codec.MatchesCapability(codec); - }); + absl::c_any_of(send_codecs, + [&codec](const cricket::Codec& send_codec) { + return send_codec.MatchesCapability(codec); + }); })) { - return RTCError(RTCErrorType::INVALID_MODIFICATION, - "Invalid codec preferences: Missing codec from send " - "codec capabilities."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Invalid codec preferences: Missing codec from send " + "codec capabilities."); } // Let codecCapabilities be the union of @@ -79,18 +83,18 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, // RTCRtpReceiver.getCapabilities(kind).codecs. For each codec in codecs, If // codec is not in codecCapabilities, throw InvalidModificationError. for (const auto& codec_preference : codecs) { - bool is_recv_codec = - absl::c_any_of(recv_codecs, [&codec_preference](const T& codec) { + bool is_recv_codec = absl::c_any_of( + recv_codecs, [&codec_preference](const cricket::Codec& codec) { return codec.MatchesCapability(codec_preference); }); - bool is_send_codec = - absl::c_any_of(send_codecs, [&codec_preference](const T& codec) { + bool is_send_codec = absl::c_any_of( + send_codecs, [&codec_preference](const cricket::Codec& codec) { return codec.MatchesCapability(codec_preference); }); if (!is_recv_codec && !is_send_codec) { - return RTCError( + LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_MODIFICATION, std::string("Invalid codec preferences: invalid codec with name \"") + codec_preference.name + "\"."); @@ -103,9 +107,10 @@ RTCError VerifyCodecPreferences(const std::vector& codecs, codec.name == cricket::kRedCodecName || codec.name == cricket::kUlpfecCodecName; })) { - return RTCError(RTCErrorType::INVALID_MODIFICATION, - "Invalid codec preferences: codec list must have a non " - "RTX, RED or FEC entry."); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_MODIFICATION, + "Invalid codec preferences: codec list must have a non " + "RTX, RED or FEC entry."); } return RTCError::OK(); @@ -201,8 +206,6 @@ RTCError RtpTransceiver::CreateChannel( return RTCError(RTCErrorType::INTERNAL_ERROR, "No media engine for mid=" + std::string(mid)); } - bool use_split_media_channel = - !context()->field_trials().IsDisabled("WebRTC-SplitMediaChannel"); std::unique_ptr new_channel; if (media_type() == cricket::MEDIA_TYPE_AUDIO) { // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to @@ -218,49 +221,34 @@ RTCError RtpTransceiver::CreateChannel( AudioCodecPairId codec_pair_id = AudioCodecPairId::Create(); - if (use_split_media_channel) { - std::unique_ptr media_send_channel = - absl::WrapUnique(media_engine()->voice().CreateMediaChannel( - cricket::MediaChannel::Role::kSend, call_ptr, media_config, - audio_options, crypto_options, codec_pair_id)); - if (!media_send_channel) { - // TODO(bugs.webrtc.org/14912): Consider CHECK or reporting failure - return; - } - std::unique_ptr media_receive_channel = - absl::WrapUnique(media_engine()->voice().CreateMediaChannel( - cricket::MediaChannel::Role::kReceive, call_ptr, media_config, - audio_options, crypto_options, codec_pair_id)); - if (!media_receive_channel) { - return; - } - // Note that this is safe because both sending and - // receiving channels will be deleted at the same time. - media_send_channel->SetSsrcListChangedCallback( - [receive_channel = media_receive_channel.get()]( - const std::set& choices) { - receive_channel->ChooseReceiverReportSsrc(choices); - }); - - new_channel = std::make_unique( - context()->worker_thread(), context()->network_thread(), - context()->signaling_thread(), std::move(media_send_channel), - std::move(media_receive_channel), mid, srtp_required, - crypto_options, context()->ssrc_generator()); - } else { - cricket::VoiceMediaChannel* media_channel = - media_engine()->voice().CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_ptr, media_config, - audio_options, crypto_options, AudioCodecPairId::Create()); - if (!media_channel) { - return; - } - - new_channel = std::make_unique( - context()->worker_thread(), context()->network_thread(), - context()->signaling_thread(), absl::WrapUnique(media_channel), mid, - srtp_required, crypto_options, context()->ssrc_generator()); + std::unique_ptr + media_send_channel = media_engine()->voice().CreateSendChannel( + call_ptr, media_config, audio_options, crypto_options, + codec_pair_id); + if (!media_send_channel) { + // TODO(bugs.webrtc.org/14912): Consider CHECK or reporting failure + return; } + std::unique_ptr + media_receive_channel = media_engine()->voice().CreateReceiveChannel( + call_ptr, media_config, audio_options, crypto_options, + codec_pair_id); + if (!media_receive_channel) { + return; + } + // Note that this is safe because both sending and + // receiving channels will be deleted at the same time. + media_send_channel->SetSsrcListChangedCallback( + [receive_channel = + media_receive_channel.get()](const std::set& choices) { + receive_channel->ChooseReceiverReportSsrc(choices); + }); + + new_channel = std::make_unique( + context()->worker_thread(), context()->network_thread(), + context()->signaling_thread(), std::move(media_send_channel), + std::move(media_receive_channel), mid, srtp_required, crypto_options, + context()->ssrc_generator()); }); } else { RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, media_type()); @@ -271,51 +259,33 @@ RTCError RtpTransceiver::CreateChannel( context()->worker_thread()->BlockingCall([&] { RTC_DCHECK_RUN_ON(context()->worker_thread()); - if (use_split_media_channel) { - std::unique_ptr media_send_channel = - absl::WrapUnique(media_engine()->video().CreateMediaChannel( - cricket::MediaChannel::Role::kSend, call_ptr, media_config, - video_options, crypto_options, - video_bitrate_allocator_factory)); - if (!media_send_channel) { - return; - } - - std::unique_ptr media_receive_channel = - absl::WrapUnique(media_engine()->video().CreateMediaChannel( - cricket::MediaChannel::Role::kReceive, call_ptr, media_config, - video_options, crypto_options, - video_bitrate_allocator_factory)); - if (!media_receive_channel) { - return; - } - // Note that this is safe because both sending and - // receiving channels will be deleted at the same time. - media_send_channel->SetSsrcListChangedCallback( - [receive_channel = media_receive_channel.get()]( - const std::set& choices) { - receive_channel->ChooseReceiverReportSsrc(choices); - }); - - new_channel = std::make_unique( - context()->worker_thread(), context()->network_thread(), - context()->signaling_thread(), std::move(media_send_channel), - std::move(media_receive_channel), mid, srtp_required, - crypto_options, context()->ssrc_generator()); - } else { - cricket::VideoMediaChannel* media_channel = - media_engine()->video().CreateMediaChannel( - cricket::MediaChannel::Role::kBoth, call_ptr, media_config, - video_options, crypto_options, video_bitrate_allocator_factory); - if (!media_channel) { - return; - } - - new_channel = std::make_unique( - context()->worker_thread(), context()->network_thread(), - context()->signaling_thread(), absl::WrapUnique(media_channel), mid, - srtp_required, crypto_options, context()->ssrc_generator()); + std::unique_ptr + media_send_channel = media_engine()->video().CreateSendChannel( + call_ptr, media_config, video_options, crypto_options, + video_bitrate_allocator_factory); + if (!media_send_channel) { + return; } + + std::unique_ptr + media_receive_channel = media_engine()->video().CreateReceiveChannel( + call_ptr, media_config, video_options, crypto_options); + if (!media_receive_channel) { + return; + } + // Note that this is safe because both sending and + // receiving channels will be deleted at the same time. + media_send_channel->SetSsrcListChangedCallback( + [receive_channel = + media_receive_channel.get()](const std::set& choices) { + receive_channel->ChooseReceiverReportSsrc(choices); + }); + + new_channel = std::make_unique( + context()->worker_thread(), context()->network_thread(), + context()->signaling_thread(), std::move(media_send_channel), + std::move(media_receive_channel), mid, srtp_required, crypto_options, + context()->ssrc_generator()); }); } if (!new_channel) { @@ -714,15 +684,15 @@ RTCError RtpTransceiver::SetCodecPreferences( // 6. to 8. RTCError result; if (media_type_ == cricket::MEDIA_TYPE_AUDIO) { - std::vector recv_codecs, send_codecs; - send_codecs = media_engine()->voice().send_codecs(); - recv_codecs = media_engine()->voice().recv_codecs(); - result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); + result = + VerifyCodecPreferences(codecs, media_engine()->voice().send_codecs(), + media_engine()->voice().recv_codecs()); } else if (media_type_ == cricket::MEDIA_TYPE_VIDEO) { - std::vector recv_codecs, send_codecs; - send_codecs = media_engine()->video().send_codecs(context()->use_rtx()); - recv_codecs = media_engine()->video().recv_codecs(context()->use_rtx()); - result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); + std::vector send_codecs = + media_engine()->video().send_codecs(context()->use_rtx()); + result = VerifyCodecPreferences( + codecs, send_codecs, + media_engine()->video().recv_codecs(context()->use_rtx())); if (result.ok()) { senders_.front()->internal()->SetVideoCodecPreferences( diff --git a/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc b/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc index 9e47fab80fae..8b4a2389ce4e 100644 --- a/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc +++ b/third_party/libwebrtc/pc/rtp_transceiver_unittest.cc @@ -382,7 +382,7 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - EXPECT_CALL(*mock_channel, media_send_channel()) + EXPECT_CALL(*mock_channel, voice_media_send_channel()) .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); @@ -415,7 +415,7 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) { EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - EXPECT_CALL(*mock_channel, media_send_channel()) + EXPECT_CALL(*mock_channel, voice_media_send_channel()) .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); diff --git a/third_party/libwebrtc/pc/sctp_data_channel.cc b/third_party/libwebrtc/pc/sctp_data_channel.cc index a17008fb3fff..8fdbf4cb9244 100644 --- a/third_party/libwebrtc/pc/sctp_data_channel.cc +++ b/third_party/libwebrtc/pc/sctp_data_channel.cc @@ -396,7 +396,7 @@ void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) { if (network_thread_ == current_thread) { register_observer(); } else { - network_thread_->PostTask(std::move(register_observer)); + network_thread_->BlockingCall(std::move(register_observer)); } } diff --git a/third_party/libwebrtc/pc/sdp_offer_answer.cc b/third_party/libwebrtc/pc/sdp_offer_answer.cc index 4874444eae14..2ae26025684d 100644 --- a/third_party/libwebrtc/pc/sdp_offer_answer.cc +++ b/third_party/libwebrtc/pc/sdp_offer_answer.cc @@ -109,13 +109,6 @@ const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto."; const char kSessionError[] = "Session error code: "; const char kSessionErrorDesc[] = "Session error description: "; -// UMA metric names. -const char kSimulcastVersionApplyLocalDescription[] = - "WebRTC.PeerConnection.Simulcast.ApplyLocalDescription"; -const char kSimulcastVersionApplyRemoteDescription[] = - "WebRTC.PeerConnection.Simulcast.ApplyRemoteDescription"; -const char kSimulcastDisabled[] = "WebRTC.PeerConnection.Simulcast.Disabled"; - // The length of RTCP CNAMEs. static const int kRtcpCnameLength = 16; @@ -201,34 +194,6 @@ std::string GetStreamIdsString(rtc::ArrayView stream_ids) { return output; } -void ReportSimulcastApiVersion(const char* name, - const SessionDescription& session) { - bool has_legacy = false; - bool has_spec_compliant = false; - for (const ContentInfo& content : session.contents()) { - if (!content.media_description()) { - continue; - } - has_spec_compliant |= content.media_description()->HasSimulcast(); - for (const StreamParams& sp : content.media_description()->streams()) { - has_legacy |= sp.has_ssrc_group(cricket::kSimSsrcGroupSemantics); - } - } - - if (has_legacy) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionLegacy, - kSimulcastApiVersionMax); - } - if (has_spec_compliant) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionSpecCompliant, - kSimulcastApiVersionMax); - } - if (!has_legacy && !has_spec_compliant) { - RTC_HISTOGRAM_ENUMERATION(name, kSimulcastApiVersionNone, - kSimulcastApiVersionMax); - } -} - const ContentInfo* FindTransceiverMSection( RtpTransceiver* transceiver, const SessionDescriptionInterface* session_description) { @@ -349,17 +314,13 @@ RTCError VerifyCrypto(const SessionDescription* desc, } if (dtls_enabled) { if (!tinfo->description.identity_fingerprint) { - RTC_LOG(LS_WARNING) - << "Session description must have DTLS fingerprint if " - "DTLS enabled."; - return RTCError(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutDtlsFingerprint); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutDtlsFingerprint); } } else { if (media->cryptos().empty()) { - RTC_LOG(LS_WARNING) - << "Session description must have SDES when DTLS disabled."; - return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutSdesCrypto); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutSdesCrypto); } } } @@ -432,12 +393,12 @@ RTCError FindDuplicateCodecParameters( payload_to_codec_parameters.find(codec_parameters.payload_type); if (existing_codec_parameters != payload_to_codec_parameters.end() && codec_parameters != existing_codec_parameters->second) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "A BUNDLE group contains a codec collision for " - "payload_type='" + - rtc::ToString(codec_parameters.payload_type) + - ". All codecs must share the same type, " - "encoding name, clock rate and parameters."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a codec collision for " + "payload_type='" + + rtc::ToString(codec_parameters.payload_type) + + ". All codecs must share the same type, " + "encoding name, clock rate and parameters."); } payload_to_codec_parameters.insert( std::make_pair(codec_parameters.payload_type, codec_parameters)); @@ -459,9 +420,9 @@ RTCError ValidateBundledPayloadTypes( const cricket::MediaContentDescription* media_description = description.GetContentDescriptionByName(content_name); if (!media_description) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "A BUNDLE group contains a MID='" + content_name + - "' matching no m= section."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a MID='" + content_name + + "' matching no m= section."); } if (!media_description->has_codecs()) { continue; @@ -498,7 +459,7 @@ RTCError FindDuplicateHeaderExtensionIds( if (existing_extension != id_to_extension.end() && !(extension.uri == existing_extension->second.uri && extension.encrypt == existing_extension->second.encrypt)) { - return RTCError( + LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "A BUNDLE group contains a codec collision for " "header extension id=" + @@ -522,9 +483,9 @@ RTCError ValidateBundledRtpHeaderExtensions( const cricket::MediaContentDescription* media_description = description.GetContentDescriptionByName(content_name); if (!media_description) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "A BUNDLE group contains a MID='" + content_name + - "' matching no m= section."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "A BUNDLE group contains a MID='" + content_name + + "' matching no m= section."); } for (const auto& extension : media_description->rtp_header_extensions()) { auto error = @@ -553,10 +514,10 @@ RTCError ValidateRtpHeaderExtensionsForSpecSimulcast( return ext.uri == RtpExtension::kRidUri; }); if (it == extensions.end()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "The media section with MID='" + content.mid() + - "' negotiates simulcast but does not negotiate " - "the RID RTP header extension."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "The media section with MID='" + content.mid() + + "' negotiates simulcast but does not negotiate " + "the RID RTP header extension."); } } return RTCError::OK(); @@ -1671,10 +1632,6 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // `local_description()`. RTC_DCHECK(local_description()); - // Report statistics about any use of simulcast. - ReportSimulcastApiVersion(kSimulcastVersionApplyLocalDescription, - *local_description()->description()); - if (!is_caller_) { if (remote_description()) { // Remote description was applied first, so this PC is the callee. @@ -1760,7 +1717,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( if (type == SdpType::kOffer) { // TODO(bugs.webrtc.org/4676) - Handle CreateChannel failure, as new local // description is applied. Restore back to old description. - RTCError error = CreateChannels(*local_description()->description()); + error = CreateChannels(*local_description()->description()); if (!error.ok()) { RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) << ")"; @@ -1792,6 +1749,23 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // SCTP sids. AllocateSctpSids(); + // Validate SSRCs, we do not allow duplicates. + if (ConfiguredForMedia()) { + std::set used_ssrcs; + for (const auto& content : local_description()->description()->contents()) { + for (const auto& stream : content.media_description()->streams()) { + for (uint32_t ssrc : stream.ssrcs) { + auto result = used_ssrcs.insert(ssrc); + if (!result.second) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Duplicate ssrc " + rtc::ToString(ssrc) + " is not allowed"); + } + } + } + } + } + if (IsUnifiedPlan()) { if (ConfiguredForMedia()) { // We must use List and not ListInternal here because @@ -1962,10 +1936,6 @@ RTCError SdpOfferAnswerHandler::ReplaceRemoteDescription( const cricket::SessionDescription* session_desc = remote_description()->description(); - // Report statistics about any use of simulcast. - ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, - *session_desc); - // NOTE: This will perform a BlockingCall() to the network thread. return transport_controller_s()->SetRemoteDescription(sdp_type, session_desc); } @@ -3055,10 +3025,11 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { auto state = signaling_state(); if (state != PeerConnectionInterface::kHaveLocalOffer && state != PeerConnectionInterface::kHaveRemoteOffer) { - return RTCError(RTCErrorType::INVALID_STATE, - (rtc::StringBuilder("Called in wrong signalingState: ") - << (PeerConnectionInterface::AsString(signaling_state()))) - .Release()); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_STATE, + (rtc::StringBuilder("Called in wrong signalingState: ") + << (PeerConnectionInterface::AsString(signaling_state()))) + .Release()); } RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(IsUnifiedPlan()); @@ -3513,16 +3484,17 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( RTC_DCHECK_EQ(SessionError::kNone, session_error()); if (!sdesc || !sdesc->description()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); } SdpType type = sdesc->GetType(); if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { - return RTCError(RTCErrorType::INVALID_STATE, - (rtc::StringBuilder("Called in wrong state: ") - << PeerConnectionInterface::AsString(signaling_state())) - .Release()); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_STATE, + (rtc::StringBuilder("Called in wrong state: ") + << PeerConnectionInterface::AsString(signaling_state())) + .Release()); } RTCError error = ValidateMids(*sdesc->description()); @@ -3543,7 +3515,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // Verify ice-ufrag and ice-pwd. if (!VerifyIceUfragPwdPresent(sdesc->description(), bundle_groups_by_mid)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutIceUfragPwd); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutIceUfragPwd); } // Validate that there are no collisions of bundled payload types. @@ -3564,7 +3537,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if (!pc_->ValidateBundleSettings(sdesc->description(), bundle_groups_by_mid)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kBundleWithoutRtcpMux); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kBundleWithoutRtcpMux); } // TODO(skvlad): When the local rtcp-mux policy is Require, reject any @@ -3580,7 +3554,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), type)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kMlineMismatchInAnswer); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInAnswer); } } else { // The re-offers should respect the order of m= sections in current @@ -3604,8 +3579,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if (current_desc && !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, *sdesc->description(), type)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInSubsequentOffer); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInSubsequentOffer); } } @@ -3620,7 +3595,7 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || desc.type() == cricket::MEDIA_TYPE_VIDEO) && desc.streams().size() > 1u) { - return RTCError( + LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "Media section has more than one track specified with a=ssrc lines " "which is not supported with Unified Plan."); @@ -3657,7 +3632,7 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( if (pc_->configuration()->bundle_policy == PeerConnectionInterface::kBundlePolicyMaxBundle && bundle_groups_by_mid.empty()) { - return RTCError( + LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "max-bundle configured but session description has no BUNDLE group"); } @@ -3747,7 +3722,8 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { RTC_LOG(LS_INFO) << "Ignoring unsupported media type"; } else { - return RTCError(RTCErrorType::INTERNAL_ERROR, "Unknown section type."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Unknown section type."); } } @@ -3792,8 +3768,8 @@ SdpOfferAnswerHandler::AssociateTransceiver( } if (!transceiver) { // This may happen normally when media sections are rejected. - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Transceiver not found based on m-line index"); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Transceiver not found based on m-line index"); } } else { RTC_DCHECK_EQ(source, cricket::CS_REMOTE); @@ -3842,7 +3818,6 @@ SdpOfferAnswerHandler::AssociateTransceiver( if (SimulcastIsRejected(old_local_content, *media_desc, pc_->GetCryptoOptions() .srtp.enable_encrypted_rtp_header_extensions)) { - RTC_HISTOGRAM_BOOLEAN(kSimulcastDisabled, true); RTCError error = DisableSimulcastInSender(transceiver->internal()->sender_internal()); if (!error.ok()) { @@ -3853,8 +3828,9 @@ SdpOfferAnswerHandler::AssociateTransceiver( } if (transceiver->media_type() != media_desc->type()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Transceiver type does not match media description type."); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Transceiver type does not match media description type."); } if (media_desc->HasSimulcast()) { @@ -3935,8 +3911,8 @@ RTCError SdpOfferAnswerHandler::UpdateDataChannel( error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE); DestroyDataChannelTransport(error); } else if (!CreateDataChannel(content.name)) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); } return RTCError::OK(); } @@ -4787,8 +4763,8 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( // Note that this is never expected to fail, since RtpDemuxer doesn't // return an error when changing payload type demux criteria, which is all // this does. - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to update payload type demuxing state."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to update payload type demuxing state."); } // Push down the new SDP media section for each audio/video transceiver. @@ -4831,7 +4807,7 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( : entry.first->SetRemoteContent(entry.second, type, error); }); if (!success) { - return RTCError(RTCErrorType::INVALID_PARAMETER, error); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); } } } @@ -5068,7 +5044,7 @@ RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( return content_info.mid() == candidate->sdp_mid(); }); if (it == contents.end()) { - return RTCError( + LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "Mid " + candidate->sdp_mid() + " specified but no media section with that mid found."); @@ -5082,16 +5058,16 @@ RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( if (mediacontent_index < content_size) { return &description->description()->contents()[mediacontent_index]; } else { - return RTCError(RTCErrorType::INVALID_RANGE, - "Media line index (" + - rtc::ToString(candidate->sdp_mline_index()) + - ") out of range (number of mlines: " + - rtc::ToString(content_size) + ")."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, + "Media line index (" + + rtc::ToString(candidate->sdp_mline_index()) + + ") out of range (number of mlines: " + + rtc::ToString(content_size) + ")."); } } - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Neither sdp_mline_index nor sdp_mid specified."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Neither sdp_mline_index nor sdp_mid specified."); } RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { @@ -5136,8 +5112,8 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); if (data && !data->rejected && !CreateDataChannel(data->name)) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); } return RTCError::OK(); diff --git a/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc b/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc index e9ec17bf0866..3e250c208aea 100644 --- a/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc +++ b/third_party/libwebrtc/pc/sdp_offer_answer_unittest.cc @@ -12,6 +12,7 @@ #include #include +#include "absl/strings/str_replace.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" @@ -599,4 +600,81 @@ TEST_F(SdpOfferAnswerTest, SimulcastAnswerWithNoRidsIsRejected) { EXPECT_TRUE(pc->SetRemoteDescription(std::move(answer_with_extensions))); } +TEST_F(SdpOfferAnswerTest, ExpectAllSsrcsSpecifiedInSsrcGroupFid) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:96 H264/90000\r\n" + "a=fmtp:96 " + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f\r\n" + "a=rtpmap:97 rtx/90000\r\n" + "a=fmtp:97 apt=96\r\n" + "a=ssrc-group:FID 1 2\r\n" + "a=ssrc:1 cname:test\r\n"; + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + EXPECT_FALSE(pc->SetRemoteDescription(std::move(offer))); +} + +TEST_F(SdpOfferAnswerTest, ExpectAllSsrcsSpecifiedInSsrcGroupFecFr) { + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 98\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:96 H264/90000\r\n" + "a=fmtp:96 " + "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f\r\n" + "a=rtpmap:98 flexfec-03/90000\r\n" + "a=fmtp:98 repair-window=10000000\r\n" + "a=ssrc-group:FEC-FR 1 2\r\n" + "a=ssrc:1 cname:test\r\n"; + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + EXPECT_FALSE(pc->SetRemoteDescription(std::move(offer))); +} + +TEST_F(SdpOfferAnswerTest, DuplicateSsrcsDisallowedInLocalDescription) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + pc->AddVideoTrack("video_track", {}); + auto offer = pc->CreateOffer(); + auto& offer_contents = offer->description()->contents(); + ASSERT_EQ(offer_contents.size(), 2u); + uint32_t second_ssrc = offer_contents[1].media_description()->first_ssrc(); + + offer->description() + ->contents()[0] + .media_description() + ->mutable_streams()[0] + .ssrcs[0] = second_ssrc; + EXPECT_FALSE(pc->SetLocalDescription(std::move(offer))); +} + } // namespace webrtc diff --git a/third_party/libwebrtc/pc/sdp_serializer_unittest.cc b/third_party/libwebrtc/pc/sdp_serializer_unittest.cc index 0c31750df452..c907ecbd6c33 100644 --- a/third_party/libwebrtc/pc/sdp_serializer_unittest.cc +++ b/third_party/libwebrtc/pc/sdp_serializer_unittest.cc @@ -63,7 +63,7 @@ void ExpectEqual(const std::map& expected, ASSERT_EQ(expected.size(), actual.size()); // Maps have unique keys, so if size is equal, it is enough to check // that all the keys (and values) from one map exist in the other. - for (const std::pair& pair : expected) { + for (const auto& pair : expected) { const_iterator iter = actual.find(pair.first); EXPECT_NE(iter, actual.end()) << "Key: " << pair.first << " not found"; EXPECT_EQ(pair.second, iter->second); diff --git a/third_party/libwebrtc/pc/session_description.h b/third_party/libwebrtc/pc/session_description.h index f68e044db2a3..31992be0834f 100644 --- a/third_party/libwebrtc/pc/session_description.h +++ b/third_party/libwebrtc/pc/session_description.h @@ -269,46 +269,41 @@ class MediaContentDescriptionImpl : public MediaContentDescription { protocol_ = std::string(protocol); } - typedef C CodecType; - // Codecs should be in preference order (most preferred codec first). - const std::vector& codecs() const { return codecs_; } - void set_codecs(const std::vector& codecs) { codecs_ = codecs; } + const std::vector& codecs() const { return codecs_; } + void set_codecs(const std::vector& codecs) { codecs_ = codecs; } bool has_codecs() const override { return !codecs_.empty(); } bool HasCodec(int id) { bool found = false; - for (typename std::vector::iterator iter = codecs_.begin(); - iter != codecs_.end(); ++iter) { - if (iter->id == id) { + for (auto it = codecs_.begin(); it != codecs_.end(); ++it) { + if (it->id == id) { found = true; break; } } return found; } - void AddCodec(const C& codec) { codecs_.push_back(codec); } - void AddOrReplaceCodec(const C& codec) { - for (typename std::vector::iterator iter = codecs_.begin(); - iter != codecs_.end(); ++iter) { - if (iter->id == codec.id) { - *iter = codec; + void AddCodec(const Codec& codec) { codecs_.push_back(codec); } + void AddOrReplaceCodec(const Codec& codec) { + for (auto it = codecs_.begin(); it != codecs_.end(); ++it) { + if (it->id == codec.id) { + *it = codec; return; } } AddCodec(codec); } - void AddCodecs(const std::vector& codecs) { - typename std::vector::const_iterator codec; - for (codec = codecs.begin(); codec != codecs.end(); ++codec) { - AddCodec(*codec); + void AddCodecs(const std::vector& codecs) { + for (const auto& codec : codecs) { + AddCodec(codec); } } private: - std::vector codecs_; + std::vector codecs_; }; -class AudioContentDescription : public MediaContentDescriptionImpl { +class AudioContentDescription : public MediaContentDescriptionImpl { public: AudioContentDescription() {} @@ -322,7 +317,7 @@ class AudioContentDescription : public MediaContentDescriptionImpl { } }; -class VideoContentDescription : public MediaContentDescriptionImpl { +class VideoContentDescription : public MediaContentDescriptionImpl { public: virtual MediaType type() const { return MEDIA_TYPE_VIDEO; } virtual VideoContentDescription* as_video() { return this; } diff --git a/third_party/libwebrtc/pc/srtp_filter.cc b/third_party/libwebrtc/pc/srtp_filter.cc index 9d7f39a7a319..b8be63cd2223 100644 --- a/third_party/libwebrtc/pc/srtp_filter.cc +++ b/third_party/libwebrtc/pc/srtp_filter.cc @@ -182,8 +182,8 @@ bool SrtpFilter::ResetParams() { offer_params_.clear(); applied_send_params_ = CryptoParams(); applied_recv_params_ = CryptoParams(); - send_cipher_suite_ = absl::nullopt; - recv_cipher_suite_ = absl::nullopt; + send_crypto_suite_ = absl::nullopt; + recv_crypto_suite_ = absl::nullopt; send_key_.Clear(); recv_key_.Clear(); state_ = ST_INIT; @@ -191,7 +191,7 @@ bool SrtpFilter::ResetParams() { } bool SrtpFilter::ApplySendParams(const CryptoParams& send_params) { - if (applied_send_params_.cipher_suite == send_params.cipher_suite && + if (applied_send_params_.crypto_suite == send_params.crypto_suite && applied_send_params_.key_params == send_params.key_params) { RTC_LOG(LS_INFO) << "Applying the same SRTP send parameters again. No-op."; @@ -199,20 +199,20 @@ bool SrtpFilter::ApplySendParams(const CryptoParams& send_params) { return true; } - send_cipher_suite_ = rtc::SrtpCryptoSuiteFromName(send_params.cipher_suite); - if (send_cipher_suite_ == rtc::kSrtpInvalidCryptoSuite) { + send_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(send_params.crypto_suite); + if (send_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { RTC_LOG(LS_WARNING) << "Unknown crypto suite(s) received:" - " send cipher_suite " - << send_params.cipher_suite; + " send crypto_suite " + << send_params.crypto_suite; return false; } int send_key_len, send_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*send_cipher_suite_, &send_key_len, + if (!rtc::GetSrtpKeyAndSaltLengths(*send_crypto_suite_, &send_key_len, &send_salt_len)) { RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):" - " send cipher_suite " - << send_params.cipher_suite; + " send crypto_suite " + << send_params.crypto_suite; return false; } @@ -222,7 +222,7 @@ bool SrtpFilter::ApplySendParams(const CryptoParams& send_params) { } bool SrtpFilter::ApplyRecvParams(const CryptoParams& recv_params) { - if (applied_recv_params_.cipher_suite == recv_params.cipher_suite && + if (applied_recv_params_.crypto_suite == recv_params.crypto_suite && applied_recv_params_.key_params == recv_params.key_params) { RTC_LOG(LS_INFO) << "Applying the same SRTP recv parameters again. No-op."; @@ -230,20 +230,20 @@ bool SrtpFilter::ApplyRecvParams(const CryptoParams& recv_params) { return true; } - recv_cipher_suite_ = rtc::SrtpCryptoSuiteFromName(recv_params.cipher_suite); - if (recv_cipher_suite_ == rtc::kSrtpInvalidCryptoSuite) { + recv_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(recv_params.crypto_suite); + if (recv_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { RTC_LOG(LS_WARNING) << "Unknown crypto suite(s) received:" - " recv cipher_suite " - << recv_params.cipher_suite; + " recv crypto_suite " + << recv_params.crypto_suite; return false; } int recv_key_len, recv_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*recv_cipher_suite_, &recv_key_len, + if (!rtc::GetSrtpKeyAndSaltLengths(*recv_crypto_suite_, &recv_key_len, &recv_salt_len)) { RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):" - " recv cipher_suite " - << recv_params.cipher_suite; + " recv crypto_suite " + << recv_params.crypto_suite; return false; } diff --git a/third_party/libwebrtc/pc/srtp_filter.h b/third_party/libwebrtc/pc/srtp_filter.h index e2848a10902a..59c43f624b9b 100644 --- a/third_party/libwebrtc/pc/srtp_filter.h +++ b/third_party/libwebrtc/pc/srtp_filter.h @@ -83,8 +83,8 @@ class SrtpFilter { uint8_t* key, size_t len); - absl::optional send_cipher_suite() { return send_cipher_suite_; } - absl::optional recv_cipher_suite() { return recv_cipher_suite_; } + absl::optional send_crypto_suite() { return send_crypto_suite_; } + absl::optional recv_crypto_suite() { return recv_crypto_suite_; } rtc::ArrayView send_key() { return send_key_; } rtc::ArrayView recv_key() { return recv_key_; } @@ -136,8 +136,8 @@ class SrtpFilter { std::vector offer_params_; CryptoParams applied_send_params_; CryptoParams applied_recv_params_; - absl::optional send_cipher_suite_; - absl::optional recv_cipher_suite_; + absl::optional send_crypto_suite_; + absl::optional recv_crypto_suite_; rtc::ZeroOnFreeBuffer send_key_; rtc::ZeroOnFreeBuffer recv_key_; }; diff --git a/third_party/libwebrtc/pc/srtp_filter_unittest.cc b/third_party/libwebrtc/pc/srtp_filter_unittest.cc index eadaad68afde..fed023199f2d 100644 --- a/third_party/libwebrtc/pc/srtp_filter_unittest.cc +++ b/third_party/libwebrtc/pc/srtp_filter_unittest.cc @@ -91,8 +91,8 @@ class SrtpFilterTest : public ::testing::Test { } void VerifyCryptoParamsMatch(const std::string& cs1, const std::string& cs2) { - EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs1), f1_.send_cipher_suite()); - EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs2), f2_.send_cipher_suite()); + EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs1), f1_.send_crypto_suite()); + EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs2), f2_.send_crypto_suite()); VerifyKeysAreEqual(f1_.send_key(), f2_.recv_key()); VerifyKeysAreEqual(f2_.send_key(), f1_.recv_key()); } @@ -102,14 +102,14 @@ class SrtpFilterTest : public ::testing::Test { }; // Test that we can set up the session and keys properly. -TEST_F(SrtpFilterTest, TestGoodSetupOneCipherSuite) { +TEST_F(SrtpFilterTest, TestGoodSetupOneCryptoSuite) { EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); EXPECT_FALSE(f1_.IsActive()); EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); EXPECT_TRUE(f1_.IsActive()); } -TEST_F(SrtpFilterTest, TestGoodSetupOneCipherSuiteGcm) { +TEST_F(SrtpFilterTest, TestGoodSetupOneCryptoSuiteGcm) { EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParamsGcm1), CS_LOCAL)); EXPECT_FALSE(f1_.IsActive()); EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParamsGcm2), CS_REMOTE)); @@ -117,21 +117,21 @@ TEST_F(SrtpFilterTest, TestGoodSetupOneCipherSuiteGcm) { } // Test that we can set up things with multiple params. -TEST_F(SrtpFilterTest, TestGoodSetupMultipleCipherSuites) { +TEST_F(SrtpFilterTest, TestGoodSetupMultipleCryptoSuites) { std::vector offer(MakeVector(kTestCryptoParams1)); std::vector answer(MakeVector(kTestCryptoParams2)); offer.push_back(kTestCryptoParams1); offer[1].tag = 2; - offer[1].cipher_suite = kCsAesCm128HmacSha1_32; + offer[1].crypto_suite = kCsAesCm128HmacSha1_32; answer[0].tag = 2; - answer[0].cipher_suite = kCsAesCm128HmacSha1_32; + answer[0].crypto_suite = kCsAesCm128HmacSha1_32; EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); EXPECT_FALSE(f1_.IsActive()); EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); EXPECT_TRUE(f1_.IsActive()); } -TEST_F(SrtpFilterTest, TestGoodSetupMultipleCipherSuitesGcm) { +TEST_F(SrtpFilterTest, TestGoodSetupMultipleCryptoSuitesGcm) { std::vector offer(MakeVector(kTestCryptoParamsGcm1)); std::vector answer(MakeVector(kTestCryptoParamsGcm3)); offer.push_back(kTestCryptoParamsGcm4); @@ -144,7 +144,7 @@ TEST_F(SrtpFilterTest, TestGoodSetupMultipleCipherSuitesGcm) { } // Test that we handle the cases where crypto is not desired. -TEST_F(SrtpFilterTest, TestGoodSetupNoCipherSuites) { +TEST_F(SrtpFilterTest, TestGoodSetupNoCryptoSuites) { std::vector offer, answer; EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); @@ -152,7 +152,7 @@ TEST_F(SrtpFilterTest, TestGoodSetupNoCipherSuites) { } // Test that we handle the cases where crypto is not desired by the remote side. -TEST_F(SrtpFilterTest, TestGoodSetupNoAnswerCipherSuites) { +TEST_F(SrtpFilterTest, TestGoodSetupNoAnswerCryptoSuites) { std::vector answer; EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); @@ -212,7 +212,7 @@ TEST_F(SrtpFilterTest, TestBadSetupMultipleOffers) { } // Test that we fail if we have params in the answer when none were offered. -TEST_F(SrtpFilterTest, TestNoAnswerCipherSuites) { +TEST_F(SrtpFilterTest, TestNoAnswerCryptoSuites) { std::vector offer; EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); EXPECT_FALSE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); @@ -220,21 +220,21 @@ TEST_F(SrtpFilterTest, TestNoAnswerCipherSuites) { } // Test that we fail if we have too many params in our answer. -TEST_F(SrtpFilterTest, TestMultipleAnswerCipherSuites) { +TEST_F(SrtpFilterTest, TestMultipleAnswerCryptoSuites) { std::vector answer(MakeVector(kTestCryptoParams2)); answer.push_back(kTestCryptoParams2); answer[1].tag = 2; - answer[1].cipher_suite = kCsAesCm128HmacSha1_32; + answer[1].crypto_suite = kCsAesCm128HmacSha1_32; EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); EXPECT_FALSE(f1_.IsActive()); } -// Test that we fail if we don't support the cipher-suite. -TEST_F(SrtpFilterTest, TestInvalidCipherSuite) { +// Test that we fail if we don't support the crypto suite. +TEST_F(SrtpFilterTest, TestInvalidCryptoSuite) { std::vector offer(MakeVector(kTestCryptoParams1)); std::vector answer(MakeVector(kTestCryptoParams2)); - offer[0].cipher_suite = answer[0].cipher_suite = "FOO"; + offer[0].crypto_suite = answer[0].crypto_suite = "FOO"; EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); EXPECT_FALSE(f1_.IsActive()); @@ -250,12 +250,12 @@ TEST_F(SrtpFilterTest, TestNoMatchingTag) { EXPECT_FALSE(f1_.IsActive()); } -// Test that we fail if we can't agree on a cipher-suite. -TEST_F(SrtpFilterTest, TestNoMatchingCipherSuite) { +// Test that we fail if we can't agree on a crypto suite. +TEST_F(SrtpFilterTest, TestNoMatchingCryptoSuite) { std::vector offer(MakeVector(kTestCryptoParams1)); std::vector answer(MakeVector(kTestCryptoParams2)); answer[0].tag = 2; - answer[0].cipher_suite = "FOO"; + answer[0].crypto_suite = "FOO"; EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); EXPECT_FALSE(f1_.IsActive()); @@ -318,7 +318,7 @@ TEST_F(SrtpFilterTest, TestProtect_AES_CM_128_HMAC_SHA1_80) { std::vector answer(MakeVector(kTestCryptoParams2)); offer.push_back(kTestCryptoParams1); offer[1].tag = 2; - offer[1].cipher_suite = kCsAesCm128HmacSha1_32; + offer[1].crypto_suite = kCsAesCm128HmacSha1_32; TestSetParams(offer, answer); VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); } @@ -329,9 +329,9 @@ TEST_F(SrtpFilterTest, TestProtect_AES_CM_128_HMAC_SHA1_32) { std::vector answer(MakeVector(kTestCryptoParams2)); offer.push_back(kTestCryptoParams1); offer[1].tag = 2; - offer[1].cipher_suite = kCsAesCm128HmacSha1_32; + offer[1].crypto_suite = kCsAesCm128HmacSha1_32; answer[0].tag = 2; - answer[0].cipher_suite = kCsAesCm128HmacSha1_32; + answer[0].crypto_suite = kCsAesCm128HmacSha1_32; TestSetParams(offer, answer); VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32); } @@ -344,11 +344,11 @@ TEST_F(SrtpFilterTest, TestChangeParameters) { TestSetParams(offer, answer); VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); - // Change the key parameters and cipher_suite. + // Change the key parameters and crypto_suite. offer[0].key_params = kTestKeyParams3; - offer[0].cipher_suite = kCsAesCm128HmacSha1_32; + offer[0].crypto_suite = kCsAesCm128HmacSha1_32; answer[0].key_params = kTestKeyParams4; - answer[0].cipher_suite = kCsAesCm128HmacSha1_32; + answer[0].crypto_suite = kCsAesCm128HmacSha1_32; EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE)); @@ -371,7 +371,7 @@ TEST_F(SrtpFilterTest, TestProvisionalAnswer) { std::vector offer(MakeVector(kTestCryptoParams1)); offer.push_back(kTestCryptoParams1); offer[1].tag = 2; - offer[1].cipher_suite = kCsAesCm128HmacSha1_32; + offer[1].crypto_suite = kCsAesCm128HmacSha1_32; std::vector answer(MakeVector(kTestCryptoParams2)); EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); @@ -386,7 +386,7 @@ TEST_F(SrtpFilterTest, TestProvisionalAnswer) { answer[0].key_params = kTestKeyParams4; answer[0].tag = 2; - answer[0].cipher_suite = kCsAesCm128HmacSha1_32; + answer[0].crypto_suite = kCsAesCm128HmacSha1_32; EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL)); EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); EXPECT_TRUE(f1_.IsActive()); diff --git a/third_party/libwebrtc/pc/srtp_session.cc b/third_party/libwebrtc/pc/srtp_session.cc index 7d1aaf2d6528..5408d3e0dab7 100644 --- a/third_party/libwebrtc/pc/srtp_session.cc +++ b/third_party/libwebrtc/pc/srtp_session.cc @@ -133,32 +133,32 @@ SrtpSession::~SrtpSession() { } } -bool SrtpSession::SetSend(int cs, +bool SrtpSession::SetSend(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { - return SetKey(ssrc_any_outbound, cs, key, len, extension_ids); + return SetKey(ssrc_any_outbound, crypto_suite, key, len, extension_ids); } -bool SrtpSession::UpdateSend(int cs, +bool SrtpSession::UpdateSend(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { - return UpdateKey(ssrc_any_outbound, cs, key, len, extension_ids); + return UpdateKey(ssrc_any_outbound, crypto_suite, key, len, extension_ids); } -bool SrtpSession::SetRecv(int cs, +bool SrtpSession::SetRecv(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { - return SetKey(ssrc_any_inbound, cs, key, len, extension_ids); + return SetKey(ssrc_any_inbound, crypto_suite, key, len, extension_ids); } -bool SrtpSession::UpdateRecv(int cs, +bool SrtpSession::UpdateRecv(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { - return UpdateKey(ssrc_any_inbound, cs, key, len, extension_ids); + return UpdateKey(ssrc_any_inbound, crypto_suite, key, len, extension_ids); } bool SrtpSession::ProtectRtp(void* p, int in_len, int max_len, int* out_len) { @@ -349,7 +349,7 @@ bool SrtpSession::GetSendStreamPacketIndex(void* p, } bool SrtpSession::DoSetKey(int type, - int cs, + int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { @@ -358,11 +358,13 @@ bool SrtpSession::DoSetKey(int type, srtp_policy_t policy; memset(&policy, 0, sizeof(policy)); if (!(srtp_crypto_policy_set_from_profile_for_rtp( - &policy.rtp, (srtp_profile_t)cs) == srtp_err_status_ok && + &policy.rtp, (srtp_profile_t)crypto_suite) == srtp_err_status_ok && srtp_crypto_policy_set_from_profile_for_rtcp( - &policy.rtcp, (srtp_profile_t)cs) == srtp_err_status_ok)) { + &policy.rtcp, (srtp_profile_t)crypto_suite) == + srtp_err_status_ok)) { RTC_LOG(LS_ERROR) << "Failed to " << (session_ ? "update" : "create") - << " SRTP session: unsupported cipher_suite " << cs; + << " SRTP session: unsupported cipher_suite " + << crypto_suite; return false; } @@ -385,7 +387,7 @@ bool SrtpSession::DoSetKey(int type, // Enable external HMAC authentication only for outgoing streams and only // for cipher suites that support it (i.e. only non-GCM cipher suites). if (type == ssrc_any_outbound && IsExternalAuthEnabled() && - !rtc::IsGcmCryptoSuite(cs)) { + !rtc::IsGcmCryptoSuite(crypto_suite)) { policy.rtp.auth_type = EXTERNAL_HMAC_SHA1; } if (!extension_ids.empty()) { @@ -417,7 +419,7 @@ bool SrtpSession::DoSetKey(int type, } bool SrtpSession::SetKey(int type, - int cs, + int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { @@ -437,11 +439,11 @@ bool SrtpSession::SetKey(int type, return false; } - return DoSetKey(type, cs, key, len, extension_ids); + return DoSetKey(type, crypto_suite, key, len, extension_ids); } bool SrtpSession::UpdateKey(int type, - int cs, + int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids) { @@ -451,7 +453,7 @@ bool SrtpSession::UpdateKey(int type, return false; } - return DoSetKey(type, cs, key, len, extension_ids); + return DoSetKey(type, crypto_suite, key, len, extension_ids); } void ProhibitLibsrtpInitialization() { diff --git a/third_party/libwebrtc/pc/srtp_session.h b/third_party/libwebrtc/pc/srtp_session.h index 048e6656444e..60f1860ada10 100644 --- a/third_party/libwebrtc/pc/srtp_session.h +++ b/third_party/libwebrtc/pc/srtp_session.h @@ -43,23 +43,23 @@ class SrtpSession { SrtpSession& operator=(const SrtpSession&) = delete; // Configures the session for sending data using the specified - // cipher-suite and key. Receiving must be done by a separate session. - bool SetSend(int cs, + // crypto suite and key. Receiving must be done by a separate session. + bool SetSend(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); - bool UpdateSend(int cs, + bool UpdateSend(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); // Configures the session for receiving data using the specified - // cipher-suite and key. Sending must be done by a separate session. - bool SetRecv(int cs, + // crypto suite and key. Sending must be done by a separate session. + bool SetRecv(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); - bool UpdateRecv(int cs, + bool UpdateRecv(int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); @@ -99,17 +99,17 @@ class SrtpSession { private: bool DoSetKey(int type, - int cs, + int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); bool SetKey(int type, - int cs, + int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); bool UpdateKey(int type, - int cs, + int crypto_suite, const uint8_t* key, size_t len, const std::vector& extension_ids); diff --git a/third_party/libwebrtc/pc/srtp_transport.cc b/third_party/libwebrtc/pc/srtp_transport.cc index 838040876c4d..1698b2128f84 100644 --- a/third_party/libwebrtc/pc/srtp_transport.cc +++ b/third_party/libwebrtc/pc/srtp_transport.cc @@ -43,24 +43,24 @@ RTCError SrtpTransport::SetSrtpSendKey(const cricket::CryptoParams& params) { webrtc::RTCErrorType::UNSUPPORTED_OPERATION, "Setting the SRTP send key twice is currently unsupported."); } - if (recv_params_ && recv_params_->cipher_suite != params.cipher_suite) { + if (recv_params_ && recv_params_->crypto_suite != params.crypto_suite) { LOG_AND_RETURN_ERROR( webrtc::RTCErrorType::UNSUPPORTED_OPERATION, "The send key and receive key must have the same cipher suite."); } - send_cipher_suite_ = rtc::SrtpCryptoSuiteFromName(params.cipher_suite); - if (*send_cipher_suite_ == rtc::kSrtpInvalidCryptoSuite) { + send_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(params.crypto_suite); + if (*send_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { return RTCError(RTCErrorType::INVALID_PARAMETER, "Invalid SRTP crypto suite"); } int send_key_len, send_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*send_cipher_suite_, &send_key_len, + if (!rtc::GetSrtpKeyAndSaltLengths(*send_crypto_suite_, &send_key_len, &send_salt_len)) { return RTCError(RTCErrorType::INVALID_PARAMETER, "Could not get lengths for crypto suite(s):" - " send cipher_suite "); + " send crypto_suite "); } send_key_ = rtc::ZeroOnFreeBuffer(send_key_len + send_salt_len); @@ -83,24 +83,24 @@ RTCError SrtpTransport::SetSrtpReceiveKey(const cricket::CryptoParams& params) { webrtc::RTCErrorType::UNSUPPORTED_OPERATION, "Setting the SRTP send key twice is currently unsupported."); } - if (send_params_ && send_params_->cipher_suite != params.cipher_suite) { + if (send_params_ && send_params_->crypto_suite != params.crypto_suite) { LOG_AND_RETURN_ERROR( webrtc::RTCErrorType::UNSUPPORTED_OPERATION, "The send key and receive key must have the same cipher suite."); } - recv_cipher_suite_ = rtc::SrtpCryptoSuiteFromName(params.cipher_suite); - if (*recv_cipher_suite_ == rtc::kSrtpInvalidCryptoSuite) { + recv_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(params.crypto_suite); + if (*recv_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { return RTCError(RTCErrorType::INVALID_PARAMETER, "Invalid SRTP crypto suite"); } int recv_key_len, recv_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*recv_cipher_suite_, &recv_key_len, + if (!rtc::GetSrtpKeyAndSaltLengths(*recv_crypto_suite_, &recv_key_len, &recv_salt_len)) { return RTCError(RTCErrorType::INVALID_PARAMETER, "Could not get lengths for crypto suite(s):" - " recv cipher_suite "); + " recv crypto_suite "); } recv_key_ = rtc::ZeroOnFreeBuffer(recv_key_len + recv_salt_len); @@ -265,11 +265,11 @@ void SrtpTransport::OnWritableState( SignalWritableState(IsWritable(/*rtcp=*/false) && IsWritable(/*rtcp=*/true)); } -bool SrtpTransport::SetRtpParams(int send_cs, +bool SrtpTransport::SetRtpParams(int send_crypto_suite, const uint8_t* send_key, int send_key_len, const std::vector& send_extension_ids, - int recv_cs, + int recv_crypto_suite, const uint8_t* recv_key, int recv_key_len, const std::vector& recv_extension_ids) { @@ -284,36 +284,38 @@ bool SrtpTransport::SetRtpParams(int send_cs, new_sessions = true; } bool ret = new_sessions - ? send_session_->SetSend(send_cs, send_key, send_key_len, - send_extension_ids) - : send_session_->UpdateSend(send_cs, send_key, send_key_len, - send_extension_ids); + ? send_session_->SetSend(send_crypto_suite, send_key, + send_key_len, send_extension_ids) + : send_session_->UpdateSend(send_crypto_suite, send_key, + send_key_len, send_extension_ids); if (!ret) { ResetParams(); return false; } - ret = new_sessions ? recv_session_->SetRecv(recv_cs, recv_key, recv_key_len, - recv_extension_ids) - : recv_session_->UpdateRecv( - recv_cs, recv_key, recv_key_len, recv_extension_ids); + ret = new_sessions + ? recv_session_->SetRecv(recv_crypto_suite, recv_key, recv_key_len, + recv_extension_ids) + : recv_session_->UpdateRecv(recv_crypto_suite, recv_key, + recv_key_len, recv_extension_ids); if (!ret) { ResetParams(); return false; } RTC_LOG(LS_INFO) << "SRTP " << (new_sessions ? "activated" : "updated") - << " with negotiated parameters: send cipher_suite " - << send_cs << " recv cipher_suite " << recv_cs; + << " with negotiated parameters: send crypto_suite " + << send_crypto_suite << " recv crypto_suite " + << recv_crypto_suite; MaybeUpdateWritableState(); return true; } -bool SrtpTransport::SetRtcpParams(int send_cs, +bool SrtpTransport::SetRtcpParams(int send_crypto_suite, const uint8_t* send_key, int send_key_len, const std::vector& send_extension_ids, - int recv_cs, + int recv_crypto_suite, const uint8_t* recv_key, int recv_key_len, const std::vector& recv_extension_ids) { @@ -325,20 +327,21 @@ bool SrtpTransport::SetRtcpParams(int send_cs, } send_rtcp_session_.reset(new cricket::SrtpSession(field_trials_)); - if (!send_rtcp_session_->SetSend(send_cs, send_key, send_key_len, + if (!send_rtcp_session_->SetSend(send_crypto_suite, send_key, send_key_len, send_extension_ids)) { return false; } recv_rtcp_session_.reset(new cricket::SrtpSession(field_trials_)); - if (!recv_rtcp_session_->SetRecv(recv_cs, recv_key, recv_key_len, + if (!recv_rtcp_session_->SetRecv(recv_crypto_suite, recv_key, recv_key_len, recv_extension_ids)) { return false; } RTC_LOG(LS_INFO) << "SRTCP activated with negotiated parameters:" - " send cipher_suite " - << send_cs << " recv cipher_suite " << recv_cs; + " send crypto_suite " + << send_crypto_suite << " recv crypto_suite " + << recv_crypto_suite; MaybeUpdateWritableState(); return true; } @@ -472,13 +475,13 @@ bool SrtpTransport::IsExternalAuthActive() const { } bool SrtpTransport::MaybeSetKeyParams() { - if (!send_cipher_suite_ || !recv_cipher_suite_) { + if (!send_crypto_suite_ || !recv_crypto_suite_) { return true; } - return SetRtpParams(*send_cipher_suite_, send_key_.data(), + return SetRtpParams(*send_crypto_suite_, send_key_.data(), static_cast(send_key_.size()), std::vector(), - *recv_cipher_suite_, recv_key_.data(), + *recv_crypto_suite_, recv_key_.data(), static_cast(recv_key_.size()), std::vector()); } diff --git a/third_party/libwebrtc/pc/srtp_transport.h b/third_party/libwebrtc/pc/srtp_transport.h index ae62d5b780b8..46c11ed56def 100644 --- a/third_party/libwebrtc/pc/srtp_transport.h +++ b/third_party/libwebrtc/pc/srtp_transport.h @@ -61,11 +61,11 @@ class SrtpTransport : public RtpTransport { // Create new send/recv sessions and set the negotiated crypto keys for RTP // packet encryption. The keys can either come from SDES negotiation or DTLS // handshake. - bool SetRtpParams(int send_cs, + bool SetRtpParams(int send_crypto_suite, const uint8_t* send_key, int send_key_len, const std::vector& send_extension_ids, - int recv_cs, + int recv_crypto_suite, const uint8_t* recv_key, int recv_key_len, const std::vector& recv_extension_ids); @@ -73,11 +73,11 @@ class SrtpTransport : public RtpTransport { // Create new send/recv sessions and set the negotiated crypto keys for RTCP // packet encryption. The keys can either come from SDES negotiation or DTLS // handshake. - bool SetRtcpParams(int send_cs, + bool SetRtcpParams(int send_crypto_suite, const uint8_t* send_key, int send_key_len, const std::vector& send_extension_ids, - int recv_cs, + int recv_crypto_suite, const uint8_t* recv_key, int recv_key_len, const std::vector& recv_extension_ids); @@ -86,7 +86,7 @@ class SrtpTransport : public RtpTransport { // If external auth is enabled, SRTP will write a dummy auth tag that then // later must get replaced before the packet is sent out. Only supported for - // non-GCM cipher suites and can be checked through "IsExternalAuthActive" + // non-GCM crypto suites and can be checked through "IsExternalAuthActive" // if it is actually used. This method is only valid before the RTP params // have been set. void EnableExternalAuth(); @@ -155,8 +155,8 @@ class SrtpTransport : public RtpTransport { absl::optional send_params_; absl::optional recv_params_; - absl::optional send_cipher_suite_; - absl::optional recv_cipher_suite_; + absl::optional send_crypto_suite_; + absl::optional recv_crypto_suite_; rtc::ZeroOnFreeBuffer send_key_; rtc::ZeroOnFreeBuffer recv_key_; diff --git a/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h b/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h index 79be6a04036c..730218291234 100644 --- a/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h +++ b/third_party/libwebrtc/pc/test/fake_peer_connection_for_stats.h @@ -19,6 +19,7 @@ #include #include "media/base/fake_media_engine.h" +#include "media/base/media_channel.h" #include "pc/channel.h" #include "pc/stream_collection.h" #include "pc/test/fake_data_channel_controller.h" @@ -27,89 +28,49 @@ namespace webrtc { // Fake VoiceMediaChannel where the result of GetStats can be configured. -class FakeVoiceMediaChannelForStats : public cricket::FakeVoiceMediaChannel { +class FakeVoiceMediaSendChannelForStats + : public cricket::FakeVoiceMediaSendChannel { public: - explicit FakeVoiceMediaChannelForStats(MediaChannel::Role role, - TaskQueueBase* network_thread) - : cricket::FakeVoiceMediaChannel(role, - nullptr, - cricket::AudioOptions(), - network_thread) {} + explicit FakeVoiceMediaSendChannelForStats(TaskQueueBase* network_thread) + : cricket::FakeVoiceMediaSendChannel(cricket::AudioOptions(), + network_thread) {} void SetStats(const cricket::VoiceMediaInfo& voice_info) { send_stats_ = cricket::VoiceMediaSendInfo(); send_stats_->senders = voice_info.senders; send_stats_->send_codecs = voice_info.send_codecs; - receive_stats_ = cricket::VoiceMediaReceiveInfo(); - receive_stats_->receivers = voice_info.receivers; - receive_stats_->receive_codecs = voice_info.receive_codecs; - receive_stats_->device_underrun_count = voice_info.device_underrun_count; } // VoiceMediaChannel overrides. - bool GetSendStats(cricket::VoiceMediaSendInfo* info) override { - RTC_DCHECK(role() == MediaChannel::Role::kSend); + bool GetStats(cricket::VoiceMediaSendInfo* info) override { if (send_stats_) { *info = *send_stats_; return true; } return false; } - bool GetReceiveStats(cricket::VoiceMediaReceiveInfo* info, - bool get_and_clear_legacy_stats) override { - RTC_DCHECK(role() == MediaChannel::Role::kReceive); - if (receive_stats_) { - *info = *receive_stats_; - return true; - } - return false; - } private: absl::optional send_stats_; - absl::optional receive_stats_; }; -// Fake VideoMediaChannel where the result of GetStats can be configured. -class FakeVideoMediaChannelForStats : public cricket::FakeVideoMediaChannel { +class FakeVoiceMediaReceiveChannelForStats + : public cricket::FakeVoiceMediaReceiveChannel { public: - explicit FakeVideoMediaChannelForStats(cricket::MediaChannel::Role role, - TaskQueueBase* network_thread) - : cricket::FakeVideoMediaChannel(role, - nullptr, - cricket::VideoOptions(), - network_thread) {} + explicit FakeVoiceMediaReceiveChannelForStats(TaskQueueBase* network_thread) + : cricket::FakeVoiceMediaReceiveChannel(cricket::AudioOptions(), + network_thread) {} - void SetStats(const cricket::VideoMediaInfo& video_info) { - switch (role()) { - case MediaChannel::Role::kSend: - send_stats_ = cricket::VideoMediaSendInfo(); - send_stats_->senders = video_info.senders; - send_stats_->aggregated_senders = video_info.aggregated_senders; - send_stats_->send_codecs = video_info.send_codecs; - break; - case MediaChannel::Role::kReceive: - receive_stats_ = cricket::VideoMediaReceiveInfo(); - receive_stats_->receivers = video_info.receivers; - receive_stats_->receive_codecs = video_info.receive_codecs; - break; - default: - RTC_CHECK_NOTREACHED(); - } + void SetStats(const cricket::VoiceMediaInfo& voice_info) { + receive_stats_ = cricket::VoiceMediaReceiveInfo(); + receive_stats_->receivers = voice_info.receivers; + receive_stats_->receive_codecs = voice_info.receive_codecs; + receive_stats_->device_underrun_count = voice_info.device_underrun_count; } - // VideoMediaChannel overrides. - bool GetSendStats(cricket::VideoMediaSendInfo* info) override { - RTC_DCHECK(role() == MediaChannel::Role::kSend); - - if (send_stats_) { - *info = *send_stats_; - return true; - } - return false; - } - bool GetReceiveStats(cricket::VideoMediaReceiveInfo* info) override { - RTC_DCHECK(role() == MediaChannel::Role::kReceive); + // VoiceMediaChannel overrides. + bool GetStats(cricket::VoiceMediaReceiveInfo* info, + bool get_and_clear_legacy_stats) override { if (receive_stats_) { *info = *receive_stats_; return true; @@ -117,8 +78,61 @@ class FakeVideoMediaChannelForStats : public cricket::FakeVideoMediaChannel { return false; } + private: + absl::optional receive_stats_; +}; + +// Fake VideoMediaChannel where the result of GetStats can be configured. +class FakeVideoMediaSendChannelForStats + : public cricket::FakeVideoMediaSendChannel { + public: + explicit FakeVideoMediaSendChannelForStats(TaskQueueBase* network_thread) + : cricket::FakeVideoMediaSendChannel(cricket::VideoOptions(), + network_thread) {} + + void SetStats(const cricket::VideoMediaInfo& video_info) { + send_stats_ = cricket::VideoMediaSendInfo(); + send_stats_->senders = video_info.senders; + send_stats_->aggregated_senders = video_info.aggregated_senders; + send_stats_->send_codecs = video_info.send_codecs; + } + + // VideoMediaChannel overrides. + bool GetStats(cricket::VideoMediaSendInfo* info) override { + if (send_stats_) { + *info = *send_stats_; + return true; + } + return false; + } + private: absl::optional send_stats_; +}; + +class FakeVideoMediaReceiveChannelForStats + : public cricket::FakeVideoMediaReceiveChannel { + public: + explicit FakeVideoMediaReceiveChannelForStats(TaskQueueBase* network_thread) + : cricket::FakeVideoMediaReceiveChannel(cricket::VideoOptions(), + network_thread) {} + + void SetStats(const cricket::VideoMediaInfo& video_info) { + receive_stats_ = cricket::VideoMediaReceiveInfo(); + receive_stats_->receivers = video_info.receivers; + receive_stats_->receive_codecs = video_info.receive_codecs; + } + + // VideoMediaChannel overrides. + bool GetStats(cricket::VideoMediaReceiveInfo* info) override { + if (receive_stats_) { + *info = *receive_stats_; + return true; + } + return false; + } + + private: absl::optional receive_stats_; }; @@ -131,8 +145,9 @@ class VoiceChannelForTesting : public cricket::VoiceChannel { rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, - std::unique_ptr send_channel, - std::unique_ptr receive_channel, + std::unique_ptr send_channel, + std::unique_ptr + receive_channel, const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, @@ -163,8 +178,9 @@ class VideoChannelForTesting : public cricket::VideoChannel { rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, - std::unique_ptr send_channel, - std::unique_ptr receive_channel, + std::unique_ptr send_channel, + std::unique_ptr + receive_channel, const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, @@ -266,17 +282,16 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { ->RemoveReceiver(receiver.get()); } - std::pair + std::pair AddVoiceChannel( const std::string& mid, const std::string& transport_name, cricket::VoiceMediaInfo initial_stats = cricket::VoiceMediaInfo()) { auto voice_media_send_channel = - std::make_unique( - cricket::MediaChannel::Role::kSend, network_thread_); + std::make_unique(network_thread_); auto voice_media_receive_channel = - std::make_unique( - cricket::MediaChannel::Role::kReceive, network_thread_); + std::make_unique(network_thread_); auto* voice_media_send_channel_ptr = voice_media_send_channel.get(); auto* voice_media_receive_channel_ptr = voice_media_receive_channel.get(); auto voice_channel = std::make_unique( @@ -301,17 +316,16 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { voice_media_receive_channel_ptr); } - std::pair + std::pair AddVideoChannel( const std::string& mid, const std::string& transport_name, cricket::VideoMediaInfo initial_stats = cricket::VideoMediaInfo()) { auto video_media_send_channel = - std::make_unique( - cricket::MediaChannel::Role::kSend, network_thread_); + std::make_unique(network_thread_); auto video_media_receive_channel = - std::make_unique( - cricket::MediaChannel::Role::kReceive, network_thread_); + std::make_unique(network_thread_); auto video_media_send_channel_ptr = video_media_send_channel.get(); auto video_media_receive_channel_ptr = video_media_receive_channel.get(); auto video_channel = std::make_unique( diff --git a/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h b/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h index 50a3d26c2e48..79a5b3474a30 100644 --- a/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h +++ b/third_party/libwebrtc/pc/test/frame_generator_capturer_video_track_source.h @@ -64,9 +64,15 @@ class FrameGeneratorCapturerVideoTrackSource : public VideoTrackSource { ~FrameGeneratorCapturerVideoTrackSource() = default; - void Start() { SetState(kLive); } + void Start() { + SetState(kLive); + video_capturer_->Start(); + } - void Stop() { SetState(kMuted); } + void Stop() { + SetState(kMuted); + video_capturer_->Stop(); + } bool is_screencast() const override { return is_screencast_; } diff --git a/third_party/libwebrtc/pc/test/integration_test_helpers.h b/third_party/libwebrtc/pc/test/integration_test_helpers.h index 24491e1a9d09..889161e79719 100644 --- a/third_party/libwebrtc/pc/test/integration_test_helpers.h +++ b/third_party/libwebrtc/pc/test/integration_test_helpers.h @@ -706,9 +706,13 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // Worst bots: // Nondebug: Linux32 Release at conceal rate 0.606597 (CI run) // Debug: linux_x86_dbg bot at conceal rate 0.854 + // internal bot at conceal rate 0.967 (b/294020344) + // TODO(https://crbug.com/webrtc/15393): Improve audio quality during + // renegotiation so that we can reduce these thresholds, 99% is not even + // close to the 20% deemed unacceptable above or the 0% that would be ideal. if (delta_samples > 0) { #if !defined(NDEBUG) - EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.95) + EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.99) << "Concealed " << delta_concealed << " of " << delta_samples << " samples"; #else diff --git a/third_party/libwebrtc/pc/test/mock_channel_interface.h b/third_party/libwebrtc/pc/test/mock_channel_interface.h index 3d82beb28741..6b85ed8d110a 100644 --- a/third_party/libwebrtc/pc/test/mock_channel_interface.h +++ b/third_party/libwebrtc/pc/test/mock_channel_interface.h @@ -14,6 +14,7 @@ #include #include +#include "media/base/media_channel.h" #include "pc/channel_interface.h" #include "test/gmock.h" @@ -27,12 +28,27 @@ class MockChannelInterface : public cricket::ChannelInterface { MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); MOCK_METHOD(VideoChannel*, AsVideoChannel, (), (override)); MOCK_METHOD(VoiceChannel*, AsVoiceChannel, (), (override)); - MOCK_METHOD(MediaChannel*, media_send_channel, (), (override)); - MOCK_METHOD(VoiceMediaChannel*, voice_media_send_channel, (), (override)); - MOCK_METHOD(VideoMediaChannel*, video_media_send_channel, (), (override)); - MOCK_METHOD(MediaChannel*, media_receive_channel, (), (override)); - MOCK_METHOD(VoiceMediaChannel*, voice_media_receive_channel, (), (override)); - MOCK_METHOD(VideoMediaChannel*, video_media_receive_channel, (), (override)); + MOCK_METHOD(MediaSendChannelInterface*, media_send_channel, (), (override)); + MOCK_METHOD(VoiceMediaSendChannelInterface*, + voice_media_send_channel, + (), + (override)); + MOCK_METHOD(VideoMediaSendChannelInterface*, + video_media_send_channel, + (), + (override)); + MOCK_METHOD(MediaReceiveChannelInterface*, + media_receive_channel, + (), + (override)); + MOCK_METHOD(VoiceMediaReceiveChannelInterface*, + voice_media_receive_channel, + (), + (override)); + MOCK_METHOD(VideoMediaReceiveChannelInterface*, + video_media_receive_channel, + (), + (override)); MOCK_METHOD(absl::string_view, transport_name, (), (const, override)); MOCK_METHOD(const std::string&, mid, (), (const, override)); MOCK_METHOD(void, Enable, (bool), (override)); diff --git a/third_party/libwebrtc/pc/test/mock_data_channel.h b/third_party/libwebrtc/pc/test/mock_data_channel.h index a9d08d2fcab2..ef781fe8ae46 100644 --- a/third_party/libwebrtc/pc/test/mock_data_channel.h +++ b/third_party/libwebrtc/pc/test/mock_data_channel.h @@ -29,7 +29,7 @@ class MockSctpDataChannel : public SctpDataChannel { id, "MockSctpDataChannel", state, - "udp", + "someProtocol", 0, 0, 0, diff --git a/third_party/libwebrtc/pc/test/mock_voice_media_channel.h b/third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h similarity index 51% rename from third_party/libwebrtc/pc/test/mock_voice_media_channel.h rename to third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h index e3eb42d15761..98723bcc360a 100644 --- a/third_party/libwebrtc/pc/test/mock_voice_media_channel.h +++ b/third_party/libwebrtc/pc/test/mock_voice_media_receive_channel_interface.h @@ -1,5 +1,5 @@ /* - * Copyright 2021 The WebRTC project authors. All Rights Reserved. + * Copyright 2023 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,8 +7,8 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef PC_TEST_MOCK_VOICE_MEDIA_CHANNEL_H_ -#define PC_TEST_MOCK_VOICE_MEDIA_CHANNEL_H_ +#ifndef PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_ +#define PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_ #include #include @@ -23,137 +23,38 @@ #include "test/gmock.h" #include "test/gtest.h" -using ::testing::InvokeWithoutArgs; -using ::testing::Mock; - namespace cricket { -class MockVoiceMediaChannel : public VoiceMediaChannel { + +class MockVoiceMediaReceiveChannelInterface + : public VoiceMediaReceiveChannelInterface { public: - MockVoiceMediaChannel(MediaChannel::Role role, - webrtc::TaskQueueBase* network_thread) - : VoiceMediaChannel(role, network_thread) {} + MockVoiceMediaReceiveChannelInterface() { + ON_CALL(*this, AsVoiceReceiveChannel).WillByDefault(testing::Return(this)); + } - MOCK_METHOD(void, - SetInterface, - (MediaChannelNetworkInterface * iface), - (override)); - MOCK_METHOD(void, - OnPacketReceived, - (const webrtc::RtpPacketReceived& packet), - (override)); - MOCK_METHOD(void, - OnPacketSent, - (const rtc::SentPacket& sent_packet), - (override)); - MOCK_METHOD(void, OnReadyToSend, (bool ready), (override)); - MOCK_METHOD(void, - OnNetworkRouteChanged, - (absl::string_view transport_name, - const rtc::NetworkRoute& network_route), - (override)); - MOCK_METHOD(void, SetExtmapAllowMixed, (bool extmap_allow_mixed), (override)); - MOCK_METHOD(bool, ExtmapAllowMixed, (), (const, override)); - MOCK_METHOD(bool, HasNetworkInterface, (), (const, override)); - MOCK_METHOD(bool, AddSendStream, (const StreamParams& sp), (override)); - MOCK_METHOD(bool, RemoveSendStream, (uint32_t ssrc), (override)); - MOCK_METHOD(bool, AddRecvStream, (const StreamParams& sp), (override)); - MOCK_METHOD(bool, RemoveRecvStream, (uint32_t ssrc), (override)); - MOCK_METHOD(void, ResetUnsignaledRecvStream, (), (override)); - MOCK_METHOD(absl::optional, - GetUnsignaledSsrc, - (), - (const, override)); - MOCK_METHOD(void, - ChooseReceiverReportSsrc, - (const std::set&), - (override)); - MOCK_METHOD(bool, SendCodecHasNack, (), (const, override)); - MOCK_METHOD(void, - SetSsrcListChangedCallback, - (absl::AnyInvocable&)>), - (override)); - MOCK_METHOD(void, - SetSendCodecChangedCallback, - (absl::AnyInvocable), - (override)); - MOCK_METHOD(void, OnDemuxerCriteriaUpdatePending, (), (override)); - MOCK_METHOD(void, OnDemuxerCriteriaUpdateComplete, (), (override)); - MOCK_METHOD(int, GetRtpSendTimeExtnId, (), (const, override)); - MOCK_METHOD( - void, - SetFrameEncryptor, - (uint32_t ssrc, - rtc::scoped_refptr frame_encryptor), - (override)); - MOCK_METHOD( - void, - SetFrameDecryptor, - (uint32_t ssrc, - rtc::scoped_refptr frame_decryptor), - (override)); - MOCK_METHOD(webrtc::RtpParameters, - GetRtpSendParameters, - (uint32_t ssrc), - (const, override)); - MOCK_METHOD(webrtc::RTCError, - SetRtpSendParameters, - (uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback), - (override)); - MOCK_METHOD( - void, - SetEncoderToPacketizerFrameTransformer, - (uint32_t ssrc, - rtc::scoped_refptr frame_transformer), - (override)); - MOCK_METHOD( - void, - SetDepacketizerToDecoderFrameTransformer, - (uint32_t ssrc, - rtc::scoped_refptr frame_transformer), - (override)); - - MOCK_METHOD(bool, - SetSendParameters, - (const AudioSendParameters& params), - (override)); + // VoiceMediaReceiveChannelInterface MOCK_METHOD(bool, SetRecvParameters, - (const AudioRecvParameters& params), + (const AudioReceiverParameters& params), (override)); MOCK_METHOD(webrtc::RtpParameters, GetRtpReceiveParameters, (uint32_t ssrc), (const, override)); + MOCK_METHOD(std::vector, + GetSources, + (uint32_t ssrc), + (const, override)); MOCK_METHOD(webrtc::RtpParameters, GetDefaultRtpReceiveParameters, (), (const, override)); MOCK_METHOD(void, SetPlayout, (bool playout), (override)); - MOCK_METHOD(void, SetSend, (bool send), (override)); - MOCK_METHOD(bool, - SetAudioSend, - (uint32_t ssrc, - bool enable, - const AudioOptions* options, - AudioSource* source), - (override)); MOCK_METHOD(bool, SetOutputVolume, (uint32_t ssrc, double volume), (override)); MOCK_METHOD(bool, SetDefaultOutputVolume, (double volume), (override)); - MOCK_METHOD(bool, CanInsertDtmf, (), (override)); - MOCK_METHOD(bool, - InsertDtmf, - (uint32_t ssrc, int event, int duration), - (override)); - MOCK_METHOD(bool, GetSendStats, (VoiceMediaSendInfo * info), (override)); - MOCK_METHOD(bool, - GetReceiveStats, - (VoiceMediaReceiveInfo * info, bool get_and_clear_legacy_stats), - (override)); MOCK_METHOD(void, SetRawAudioSink, (uint32_t ssrc, std::unique_ptr sink), @@ -162,11 +63,56 @@ class MockVoiceMediaChannel : public VoiceMediaChannel { SetDefaultRawAudioSink, (std::unique_ptr sink), (override)); - MOCK_METHOD(std::vector, - GetSources, - (uint32_t ssrc), - (const, override)); + MOCK_METHOD(bool, + GetStats, + (VoiceMediaReceiveInfo * stats, bool reset_legacy), + (override)); + MOCK_METHOD(void, SetReceiveNackEnabled, (bool enabled), (override)); + MOCK_METHOD(void, SetReceiveNonSenderRttEnabled, (bool enabled), (override)); + // MediaReceiveChannelInterface + MOCK_METHOD(VideoMediaReceiveChannelInterface*, + AsVideoReceiveChannel, + (), + (override)); + MOCK_METHOD(VoiceMediaReceiveChannelInterface*, + AsVoiceReceiveChannel, + (), + (override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(bool, AddRecvStream, (const StreamParams& sp), (override)); + MOCK_METHOD(bool, RemoveRecvStream, (uint32_t ssrc), (override)); + MOCK_METHOD(void, ResetUnsignaledRecvStream, (), (override)); + MOCK_METHOD(void, + SetInterface, + (MediaChannelNetworkInterface * iface), + (override)); + MOCK_METHOD(void, + OnPacketReceived, + (const webrtc::RtpPacketReceived& packet), + (override)); + MOCK_METHOD(absl::optional, + GetUnsignaledSsrc, + (), + (const, override)); + MOCK_METHOD(void, + ChooseReceiverReportSsrc, + (const std::set& choices), + (override)); + MOCK_METHOD(void, OnDemuxerCriteriaUpdatePending, (), (override)); + MOCK_METHOD(void, OnDemuxerCriteriaUpdateComplete, (), (override)); + MOCK_METHOD( + void, + SetFrameDecryptor, + (uint32_t ssrc, + rtc::scoped_refptr frame_decryptor), + (override)); + MOCK_METHOD( + void, + SetDepacketizerToDecoderFrameTransformer, + (uint32_t ssrc, + rtc::scoped_refptr frame_transformer), + (override)); MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (uint32_t ssrc, int delay_ms), @@ -175,11 +121,10 @@ class MockVoiceMediaChannel : public VoiceMediaChannel { GetBaseMinimumPlayoutDelayMs, (uint32_t ssrc), (const, override)); - MOCK_METHOD(bool, SenderNackEnabled, (), (const, override)); - MOCK_METHOD(bool, SenderNonSenderRttEnabled, (), (const, override)); - MOCK_METHOD(void, SetReceiveNackEnabled, (bool enabled), (override)); - MOCK_METHOD(void, SetReceiveNonSenderRttEnabled, (bool enabled), (override)); }; + +static_assert(!std::is_abstract_v, ""); + } // namespace cricket -#endif // PC_TEST_MOCK_VOICE_MEDIA_CHANNEL_H_ +#endif // PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_ diff --git a/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc b/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc index 5efd6df47103..5ff736084f70 100644 --- a/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc +++ b/third_party/libwebrtc/pc/video_rtp_receiver_unittest.cc @@ -17,6 +17,7 @@ #include "api/video/recordable_encoded_frame.h" #include "api/video/test/mock_recordable_encoded_frame.h" #include "media/base/fake_media_engine.h" +#include "media/base/media_channel.h" #include "rtc_base/task_queue_for_test.h" #include "test/gmock.h" #include "test/gtest.h" @@ -34,16 +35,25 @@ namespace { class VideoRtpReceiverTest : public testing::Test { protected: - class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel { + class MockVideoMediaSendChannel : public cricket::FakeVideoMediaSendChannel { public: - MockVideoMediaChannel( - cricket::FakeVideoEngine* engine, + MockVideoMediaSendChannel( const cricket::VideoOptions& options, TaskQueueBase* network_thread = rtc::Thread::Current()) - : FakeVideoMediaChannel(cricket::MediaChannel::Role::kBoth, - engine, - options, - network_thread) {} + : FakeVideoMediaSendChannel(options, network_thread) {} + MOCK_METHOD(void, + GenerateSendKeyFrame, + (uint32_t, const std::vector&), + (override)); + }; + + class MockVideoMediaReceiveChannel + : public cricket::FakeVideoMediaReceiveChannel { + public: + MockVideoMediaReceiveChannel( + const cricket::VideoOptions& options, + TaskQueueBase* network_thread = rtc::Thread::Current()) + : FakeVideoMediaReceiveChannel(options, network_thread) {} MOCK_METHOD(void, SetRecordableEncodedFrameCallback, (uint32_t, std::function), @@ -53,10 +63,6 @@ class VideoRtpReceiverTest : public testing::Test { (uint32_t), (override)); MOCK_METHOD(void, RequestRecvKeyFrame, (uint32_t), (override)); - MOCK_METHOD(void, - GenerateSendKeyFrame, - (uint32_t, const std::vector&), - (override)); }; class MockVideoSink : public rtc::VideoSinkInterface { @@ -66,7 +72,7 @@ class VideoRtpReceiverTest : public testing::Test { VideoRtpReceiverTest() : worker_thread_(rtc::Thread::Create()), - channel_(nullptr, cricket::VideoOptions()), + channel_(cricket::VideoOptions()), receiver_(rtc::make_ref_counted( worker_thread_.get(), std::string("receiver"), @@ -83,7 +89,7 @@ class VideoRtpReceiverTest : public testing::Test { SetMediaChannel(nullptr); } - void SetMediaChannel(cricket::MediaChannel* media_channel) { + void SetMediaChannel(cricket::MediaReceiveChannelInterface* media_channel) { SendTask(worker_thread_.get(), [&]() { receiver_->SetMediaChannel(media_channel); }); } @@ -94,7 +100,7 @@ class VideoRtpReceiverTest : public testing::Test { rtc::AutoThread main_thread_; std::unique_ptr worker_thread_; - NiceMock channel_; + NiceMock channel_; rtc::scoped_refptr receiver_; }; @@ -111,7 +117,7 @@ TEST_F(VideoRtpReceiverTest, GenerateKeyFrameOnChannelSwitchUnlessGenerateKeyframeCalled) { // A channel switch without previous call to GenerateKeyFrame shouldn't // cause a call to happen on the new channel. - MockVideoMediaChannel channel2(nullptr, cricket::VideoOptions()); + MockVideoMediaReceiveChannel channel2{cricket::VideoOptions()}; EXPECT_CALL(channel_, RequestRecvKeyFrame).Times(0); EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(0); SetMediaChannel(&channel2); @@ -121,12 +127,12 @@ TEST_F(VideoRtpReceiverTest, // re-generate it as we don't know if it was eventually received EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(1); Source()->GenerateKeyFrame(); - MockVideoMediaChannel channel3(nullptr, cricket::VideoOptions()); + MockVideoMediaReceiveChannel channel3{cricket::VideoOptions()}; EXPECT_CALL(channel3, RequestRecvKeyFrame); SetMediaChannel(&channel3); // Switching to a new channel should now not cause calls to GenerateKeyFrame. - StrictMock channel4(nullptr, cricket::VideoOptions()); + StrictMock channel4{cricket::VideoOptions()}; SetMediaChannel(&channel4); // We must call SetMediaChannel(nullptr) here since the mock media channels @@ -154,7 +160,7 @@ TEST_F(VideoRtpReceiverTest, DisablesEnablesEncodedOutputOnChannelSwitch) { EXPECT_CALL(channel_, ClearRecordableEncodedFrameCallback); MockVideoSink sink; Source()->AddEncodedSink(&sink); - MockVideoMediaChannel channel2(nullptr, cricket::VideoOptions()); + MockVideoMediaReceiveChannel channel2{cricket::VideoOptions()}; EXPECT_CALL(channel2, SetRecordableEncodedFrameCallback); SetMediaChannel(&channel2); Mock::VerifyAndClearExpectations(&channel2); @@ -163,7 +169,7 @@ TEST_F(VideoRtpReceiverTest, DisablesEnablesEncodedOutputOnChannelSwitch) { // to NOT set the callback again. EXPECT_CALL(channel2, ClearRecordableEncodedFrameCallback); Source()->RemoveEncodedSink(&sink); - StrictMock channel3(nullptr, cricket::VideoOptions()); + StrictMock channel3{cricket::VideoOptions()}; SetMediaChannel(&channel3); // We must call SetMediaChannel(nullptr) here since the mock media channels diff --git a/third_party/libwebrtc/pc/webrtc_sdp.cc b/third_party/libwebrtc/pc/webrtc_sdp.cc index 850b82835bdf..ecba22a85ae1 100644 --- a/third_party/libwebrtc/pc/webrtc_sdp.cc +++ b/third_party/libwebrtc/pc/webrtc_sdp.cc @@ -1695,7 +1695,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, for (const CryptoParams& crypto_params : media_desc->cryptos()) { InitAttrLine(kAttributeCrypto, &os); os << kSdpDelimiterColon << crypto_params.tag << " " - << crypto_params.cipher_suite << " " << crypto_params.key_params; + << crypto_params.crypto_suite << " " << crypto_params.key_params; if (!crypto_params.session_params.empty()) { os << " " << crypto_params.session_params; } @@ -2654,12 +2654,11 @@ static std::unique_ptr ParseContentDescription( for (int pt : payload_types) { payload_type_preferences[pt] = preference--; } - std::vector codecs = media_desc->codecs(); - absl::c_sort( - codecs, [&payload_type_preferences](const typename C::CodecType& a, - const typename C::CodecType& b) { - return payload_type_preferences[a.id] > payload_type_preferences[b.id]; - }); + std::vector codecs = media_desc->codecs(); + absl::c_sort(codecs, [&payload_type_preferences](const cricket::Codec& a, + const cricket::Codec& b) { + return payload_type_preferences[a.id] > payload_type_preferences[b.id]; + }); media_desc->set_codecs(codecs); return media_desc; } diff --git a/third_party/libwebrtc/rtc_base/BUILD.gn b/third_party/libwebrtc/rtc_base/BUILD.gn index 033f6e0e6461..84b2f2ff32ed 100644 --- a/third_party/libwebrtc/rtc_base/BUILD.gn +++ b/third_party/libwebrtc/rtc_base/BUILD.gn @@ -248,7 +248,42 @@ rtc_library("random") { ] } +rtc_library("bitrate_tracker") { + visibility = [ "*" ] + sources = [ + "bitrate_tracker.cc", + "bitrate_tracker.h", + ] + deps = [ + ":rate_statistics", + "../api/units:data_rate", + "../api/units:data_size", + "../api/units:time_delta", + "../api/units:timestamp", + "system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("frequency_tracker") { + visibility = [ "*" ] + sources = [ + "frequency_tracker.cc", + "frequency_tracker.h", + ] + deps = [ + ":rate_statistics", + "../api/units:frequency", + "../api/units:time_delta", + "../api/units:timestamp", + "system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + rtc_library("rate_statistics") { + # TODO(bugs.webrtc.org/13756): Restrict visibility to private when all usage + # of the RateStatistics is migrated to BitrateTracker and FrequencyTracker. visibility = [ "*" ] sources = [ "rate_statistics.cc", @@ -559,6 +594,7 @@ rtc_library("timeutils") { ":stringutils", "system:rtc_export", ] + if (rtc_exclude_system_time) { defines = [ "WEBRTC_EXCLUDE_SYSTEM_TIME" ] } @@ -566,6 +602,7 @@ rtc_library("timeutils") { libs = [] if (is_win) { libs += [ "winmm.lib" ] + deps += [ ":win32" ] } } @@ -869,6 +906,26 @@ rtc_library("net_helpers") { absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } +rtc_library("net_test_helpers") { + # TODO(mbonadei): Enable once net_helpers don't depend + # on this target anymore. + # testonly = true + sources = [ + "net_test_helpers.cc", + "net_test_helpers.h", + ] + deps = [ "system:rtc_export" ] + if (is_android) { + deps += [ ":ifaddrs_android" ] + } + if (is_win) { + deps += [ + ":win32", + "win:windows_version", + ] + } +} + rtc_library("async_resolver_interface") { visibility = [ "*" ] sources = [ @@ -1084,7 +1141,6 @@ if (is_win) { deps = [ ":byte_order", ":checks", - ":logging", ":macromagic", ":stringutils", ] @@ -1780,6 +1836,7 @@ if (rtc_include_tests) { ":logging", ":macromagic", ":net_helpers", + ":net_test_helpers", ":null_socket_server", ":platform_thread", ":rtc_base_tests_utils", @@ -1809,6 +1866,7 @@ if (rtc_include_tests) { sources = [ "base64_unittest.cc", "bit_buffer_unittest.cc", + "bitrate_tracker_unittest.cc", "bitstream_reader_unittest.cc", "bounded_inline_vector_unittest.cc", "buffer_queue_unittest.cc", @@ -1820,6 +1878,7 @@ if (rtc_include_tests) { "deprecated/recursive_critical_section_unittest.cc", "event_tracer_unittest.cc", "event_unittest.cc", + "frequency_tracker_unittest.cc", "logging_unittest.cc", "numerics/divide_round_unittest.cc", "numerics/histogram_percentile_counter_unittest.cc", @@ -1853,6 +1912,7 @@ if (rtc_include_tests) { ":async_packet_socket", ":async_udp_socket", ":bit_buffer", + ":bitrate_tracker", ":bitstream_reader", ":bounded_inline_vector", ":buffer", @@ -1864,6 +1924,7 @@ if (rtc_include_tests) { ":criticalsection", ":divide_round", ":event_tracer", + ":frequency_tracker", ":gunit_helpers", ":histogram_percentile_counter", ":ip_address", @@ -1903,7 +1964,11 @@ if (rtc_include_tests) { "../api:make_ref_counted", "../api:scoped_refptr", "../api/numerics", + "../api/units:data_rate", + "../api/units:data_size", + "../api/units:frequency", "../api/units:time_delta", + "../api/units:timestamp", "../system_wrappers", "../test:field_trial", "../test:fileutils", @@ -2037,6 +2102,7 @@ if (rtc_include_tests) { ":logging", ":macromagic", ":net_helpers", + ":net_test_helpers", ":network", ":network_route", ":null_socket_server", diff --git a/third_party/libwebrtc/rtc_base/audio_format_to_string_gn/moz.build b/third_party/libwebrtc/rtc_base/audio_format_to_string_gn/moz.build index f3d20adc3c2a..479e90eafd1c 100644 --- a/third_party/libwebrtc/rtc_base/audio_format_to_string_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/audio_format_to_string_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/bitrate_tracker.cc b/third_party/libwebrtc/rtc_base/bitrate_tracker.cc new file mode 100644 index 000000000000..340e444f24f4 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/bitrate_tracker.cc @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/bitrate_tracker.h" + +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" +#include "rtc_base/rate_statistics.h" + +namespace webrtc { + +BitrateTracker::BitrateTracker(TimeDelta max_window_size) + : impl_(max_window_size.ms(), RateStatistics::kBpsScale) {} + +absl::optional BitrateTracker::Rate(Timestamp now) const { + if (absl::optional rate = impl_.Rate(now.ms())) { + return DataRate::BitsPerSec(*rate); + } + return absl::nullopt; +} + +bool BitrateTracker::SetWindowSize(TimeDelta window_size, Timestamp now) { + return impl_.SetWindowSize(window_size.ms(), now.ms()); +} + +void BitrateTracker::Update(int64_t bytes, Timestamp now) { + impl_.Update(bytes, now.ms()); +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/bitrate_tracker.h b/third_party/libwebrtc/rtc_base/bitrate_tracker.h new file mode 100644 index 000000000000..a54bd9a56154 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/bitrate_tracker.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_BITRATE_TRACKER_H_ +#define RTC_BASE_BITRATE_TRACKER_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { +// Class to estimate bitrates over running window. +// Timestamps used in Update(), Rate() and SetWindowSize() must never +// decrease for two consecutive calls. +// This class is thread unsafe. +class RTC_EXPORT BitrateTracker { + public: + // max_window_sizes = Maximum window size for the rate estimation. + // Initial window size is set to this, but may be changed + // to something lower by calling SetWindowSize(). + explicit BitrateTracker(TimeDelta max_window_size); + + BitrateTracker(const BitrateTracker&) = default; + BitrateTracker(BitrateTracker&&) = default; + BitrateTracker& operator=(const BitrateTracker&) = delete; + BitrateTracker& operator=(BitrateTracker&&) = delete; + + ~BitrateTracker() = default; + + // Resets instance to original state. + void Reset() { impl_.Reset(); } + + // Updates bitrate with a new data point, moving averaging window as needed. + void Update(int64_t bytes, Timestamp now); + void Update(DataSize size, Timestamp now) { Update(size.bytes(), now); } + + // Returns bitrate, moving averaging window as needed. + // Returns nullopt when bitrate can't be measured. + absl::optional Rate(Timestamp now) const; + + // Update the size of the averaging window. The maximum allowed value for + // `window_size` is `max_window_size` as supplied in the constructor. + bool SetWindowSize(TimeDelta window_size, Timestamp now); + + private: + RateStatistics impl_; +}; +} // namespace webrtc + +#endif // RTC_BASE_BITRATE_TRACKER_H_ diff --git a/third_party/libwebrtc/media/delayable_gn/moz.build b/third_party/libwebrtc/rtc_base/bitrate_tracker_gn/moz.build similarity index 93% rename from third_party/libwebrtc/media/delayable_gn/moz.build rename to third_party/libwebrtc/rtc_base/bitrate_tracker_gn/moz.build index a7495471d729..80d1803068ff 100644 --- a/third_party/libwebrtc/media/delayable_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/bitrate_tracker_gn/moz.build @@ -30,6 +30,10 @@ LOCAL_INCLUDES += [ "/tools/profiler/public" ] +UNIFIED_SOURCES += [ + "/third_party/libwebrtc/rtc_base/bitrate_tracker.cc" +] + if not CONFIG["MOZ_DEBUG"]: DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" @@ -53,6 +57,10 @@ if CONFIG["OS_TARGET"] == "Android": DEFINES["__STDC_CONSTANT_MACROS"] = True DEFINES["__STDC_FORMAT_MACROS"] = True + OS_LIBS += [ + "log" + ] + if CONFIG["OS_TARGET"] == "Darwin": DEFINES["WEBRTC_MAC"] = True @@ -117,6 +125,13 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["_WINDOWS"] = True DEFINES["__STD_C"] = True + OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", + "winmm" + ] + if CONFIG["CPU_ARCH"] == "aarch64": DEFINES["WEBRTC_ARCH_ARM64"] = True @@ -124,6 +139,10 @@ if CONFIG["CPU_ARCH"] == "aarch64": if CONFIG["CPU_ARCH"] == "arm": + CXXFLAGS += [ + "-mfpu=neon" + ] + DEFINES["WEBRTC_ARCH_ARM"] = True DEFINES["WEBRTC_ARCH_ARM_V7"] = True DEFINES["WEBRTC_HAS_NEON"] = True @@ -179,6 +198,10 @@ if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": + CXXFLAGS += [ + "-msse2" + ] + OS_LIBS += [ "android_support" ] @@ -193,10 +216,14 @@ if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": + CXXFLAGS += [ + "-msse2" + ] + DEFINES["_GNU_SOURCE"] = True if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": DEFINES["_GNU_SOURCE"] = True -Library("delayable_gn") +Library("bitrate_tracker_gn") diff --git a/third_party/libwebrtc/rtc_base/bitrate_tracker_unittest.cc b/third_party/libwebrtc/rtc_base/bitrate_tracker_unittest.cc new file mode 100644 index 000000000000..2129aebfdd2d --- /dev/null +++ b/third_party/libwebrtc/rtc_base/bitrate_tracker_unittest.cc @@ -0,0 +1,267 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/bitrate_tracker.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::AllOf; +using ::testing::Ge; +using ::testing::Le; + +constexpr TimeDelta kWindow = TimeDelta::Millis(500); +constexpr TimeDelta kEpsilon = TimeDelta::Millis(1); + +TEST(BitrateTrackerTest, ReturnsNulloptInitially) { + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +TEST(BitrateTrackerTest, ReturnsNulloptAfterSingleDataPoint) { + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + stats.Update(1'500, now); + now += TimeDelta::Millis(10); + + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +TEST(BitrateTrackerTest, ReturnsRateAfterTwoMeasurements) { + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + stats.Update(1'500, now); + now += TimeDelta::Millis(10); + stats.Update(1'500, now); + + // One packet every 10ms would result in 1.2 Mbps, but until window is full, + // it could be treated as two packets in ~10ms window, measuring twice that + // bitrate. + EXPECT_THAT(stats.Rate(now), AllOf(Ge(DataRate::BitsPerSec(1'200'000)), + Le(DataRate::BitsPerSec(2'400'000)))); +} + +TEST(BitrateTrackerTest, MeasuresConstantRate) { + const Timestamp start = Timestamp::Seconds(12'345); + const TimeDelta kInterval = TimeDelta::Millis(10); + const DataSize kPacketSize = DataSize::Bytes(1'500); + const DataRate kConstantRate = kPacketSize / kInterval; + + Timestamp now = start; + BitrateTracker stats(kWindow); + + stats.Update(kPacketSize, now); + DataSize total_size = kPacketSize; + DataRate last_error = DataRate::PlusInfinity(); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { + SCOPED_TRACE(i); + now += kInterval; + total_size += kPacketSize; + stats.Update(kPacketSize, now); + + // Until window is full, bitrate is measured over a smaller window and might + // look larger than the constant rate. + absl::optional bitrate = stats.Rate(now); + ASSERT_THAT(bitrate, + AllOf(Ge(kConstantRate), Le(total_size / (now - start)))); + + // Expect the estimation error to decrease as the window is extended. + DataRate error = *bitrate - kConstantRate; + EXPECT_LE(error, last_error); + last_error = error; + } + + // Once window is full, bitrate measurment should be stable. + for (TimeDelta i = TimeDelta::Zero(); i < kInterval; + i += TimeDelta::Millis(1)) { + SCOPED_TRACE(i); + EXPECT_EQ(stats.Rate(now + i), kConstantRate); + } +} + +TEST(BitrateTrackerTest, IncreasingThenDecreasingBitrate) { + const DataSize kLargePacketSize = DataSize::Bytes(1'500); + const DataSize kSmallPacketSize = DataSize::Bytes(300); + const TimeDelta kLargeInterval = TimeDelta::Millis(10); + const TimeDelta kSmallInterval = TimeDelta::Millis(2); + + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + stats.Update(kLargePacketSize, now); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { + SCOPED_TRACE(i); + now += kLargeInterval; + stats.Update(kLargePacketSize, now); + } + absl::optional last_bitrate = stats.Rate(now); + EXPECT_EQ(last_bitrate, kLargePacketSize / kLargeInterval); + + // Decrease bitrate with smaller measurments. + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { + SCOPED_TRACE(i); + now += kLargeInterval; + stats.Update(kSmallPacketSize, now); + + absl::optional bitrate = stats.Rate(now); + EXPECT_LT(bitrate, last_bitrate); + + last_bitrate = bitrate; + } + EXPECT_EQ(last_bitrate, kSmallPacketSize / kLargeInterval); + + // Increase bitrate with more frequent measurments. + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kSmallInterval) { + SCOPED_TRACE(i); + now += kSmallInterval; + stats.Update(kSmallPacketSize, now); + + absl::optional bitrate = stats.Rate(now); + EXPECT_GE(bitrate, last_bitrate); + + last_bitrate = bitrate; + } + EXPECT_EQ(last_bitrate, kSmallPacketSize / kSmallInterval); +} + +TEST(BitrateTrackerTest, ResetAfterSilence) { + const TimeDelta kInterval = TimeDelta::Millis(10); + const DataSize kPacketSize = DataSize::Bytes(1'500); + + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + // Feed data until window has been filled. + stats.Update(kPacketSize, now); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { + now += kInterval; + stats.Update(kPacketSize, now); + } + ASSERT_GT(stats.Rate(now), DataRate::Zero()); + + now += kWindow + kEpsilon; + // Silence over window size should trigger auto reset for coming sample. + EXPECT_EQ(stats.Rate(now), absl::nullopt); + stats.Update(kPacketSize, now); + // Single measurment after reset is not enough to estimate the rate. + EXPECT_EQ(stats.Rate(now), absl::nullopt); + + // Manual reset, add the same check again. + stats.Reset(); + EXPECT_EQ(stats.Rate(now), absl::nullopt); + now += kInterval; + stats.Update(kPacketSize, now); + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +TEST(BitrateTrackerTest, HandlesChangingWindowSize) { + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + // Check window size is validated. + EXPECT_TRUE(stats.SetWindowSize(kWindow, now)); + EXPECT_FALSE(stats.SetWindowSize(kWindow + kEpsilon, now)); + EXPECT_FALSE(stats.SetWindowSize(TimeDelta::Zero(), now)); + EXPECT_TRUE(stats.SetWindowSize(kEpsilon, now)); + EXPECT_TRUE(stats.SetWindowSize(kWindow, now)); + + // Fill the buffer at a rate of 10 bytes per 10 ms (8 kbps). + const DataSize kValue = DataSize::Bytes(10); + const TimeDelta kInterval = TimeDelta::Millis(10); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { + now += kInterval; + stats.Update(kValue, now); + } + ASSERT_GT(stats.Rate(now), DataRate::BitsPerSec(8'000)); + + // Halve the window size, rate should stay the same. + EXPECT_TRUE(stats.SetWindowSize(kWindow / 2, now)); + EXPECT_EQ(stats.Rate(now), DataRate::BitsPerSec(8'000)); + + // Double the window size again, rate should stay the same. + // The window won't actually expand until new calls to the `Update`. + EXPECT_TRUE(stats.SetWindowSize(kWindow, now)); + EXPECT_EQ(stats.Rate(now), DataRate::BitsPerSec(8'000)); + + // Fill the now empty window half at twice the rate. + for (TimeDelta i = TimeDelta::Zero(); i < kWindow / 2; i += kInterval) { + now += kInterval; + stats.Update(2 * kValue, now); + } + + // Rate should have increased by 50%. + EXPECT_EQ(stats.Rate(now), DataRate::BitsPerSec(12'000)); +} + +TEST(BitrateTrackerTest, HandlesZeroCounts) { + const DataSize kPacketSize = DataSize::Bytes(1'500); + const TimeDelta kInterval = TimeDelta::Millis(10); + const Timestamp start = Timestamp::Seconds(12'345); + + Timestamp now = start; + BitrateTracker stats(kWindow); + + stats.Update(kPacketSize, now); + ASSERT_EQ(stats.Rate(now), absl::nullopt); + now += kInterval; + stats.Update(0, now); + absl::optional last_bitrate = stats.Rate(now); + EXPECT_GT(last_bitrate, DataRate::Zero()); + now += kInterval; + while (now < start + kWindow) { + SCOPED_TRACE(now - start); + stats.Update(0, now); + + absl::optional bitrate = stats.Rate(now); + EXPECT_GT(bitrate, DataRate::Zero()); + // As window expands, average bitrate decreases. + EXPECT_LT(bitrate, last_bitrate); + + last_bitrate = bitrate; + now += kInterval; + } + + // Initial kPacketSize should be outside the window now, so overall bitrate + // should be zero + EXPECT_EQ(stats.Rate(now), DataRate::Zero()); + + // Single measurment should be enough to get non zero rate. + stats.Update(kPacketSize, now); + EXPECT_EQ(stats.Rate(now), kPacketSize / kWindow); +} + +TEST(BitrateTrackerTest, ReturnsNulloptWhenOverflows) { + Timestamp now = Timestamp::Seconds(12'345); + BitrateTracker stats(kWindow); + + int64_t very_large_number = std::numeric_limits::max(); + stats.Update(very_large_number, now); + now += kEpsilon; + stats.Update(very_large_number, now); + + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +} // namespace +} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/copy_on_write_buffer.cc b/third_party/libwebrtc/rtc_base/copy_on_write_buffer.cc index 850327b0887c..d8ab53cb24ed 100644 --- a/third_party/libwebrtc/rtc_base/copy_on_write_buffer.cc +++ b/third_party/libwebrtc/rtc_base/copy_on_write_buffer.cc @@ -23,7 +23,7 @@ CopyOnWriteBuffer::CopyOnWriteBuffer() : offset_(0), size_(0) { CopyOnWriteBuffer::CopyOnWriteBuffer(const CopyOnWriteBuffer& buf) : buffer_(buf.buffer_), offset_(buf.offset_), size_(buf.size_) {} -CopyOnWriteBuffer::CopyOnWriteBuffer(CopyOnWriteBuffer&& buf) +CopyOnWriteBuffer::CopyOnWriteBuffer(CopyOnWriteBuffer&& buf) noexcept : buffer_(std::move(buf.buffer_)), offset_(buf.offset_), size_(buf.size_) { buf.offset_ = 0; buf.size_ = 0; diff --git a/third_party/libwebrtc/rtc_base/copy_on_write_buffer.h b/third_party/libwebrtc/rtc_base/copy_on_write_buffer.h index 849f5f5df22b..8332ee6f62c7 100644 --- a/third_party/libwebrtc/rtc_base/copy_on_write_buffer.h +++ b/third_party/libwebrtc/rtc_base/copy_on_write_buffer.h @@ -36,7 +36,7 @@ class RTC_EXPORT CopyOnWriteBuffer { // Share the data with an existing buffer. CopyOnWriteBuffer(const CopyOnWriteBuffer& buf); // Move contents from an existing buffer. - CopyOnWriteBuffer(CopyOnWriteBuffer&& buf); + CopyOnWriteBuffer(CopyOnWriteBuffer&& buf) noexcept; // Construct a buffer from a string, convenient for unittests. explicit CopyOnWriteBuffer(absl::string_view s); @@ -83,6 +83,17 @@ class RTC_EXPORT CopyOnWriteBuffer { explicit CopyOnWriteBuffer(const VecT& v) : CopyOnWriteBuffer(v.data(), v.size()) {} + // Construct a buffer from a vector like type and a capacity argument + template ().data())>, + typename std::enable_if_t< + !std::is_same::value && + HasDataAndSize::value && + internal::BufferCompat::value>* = nullptr> + explicit CopyOnWriteBuffer(const VecT& v, size_t capacity) + : CopyOnWriteBuffer(v.data(), v.size(), capacity) {} + ~CopyOnWriteBuffer(); // Get a pointer to the data. Just .data() will give you a (const) uint8_t*, diff --git a/third_party/libwebrtc/rtc_base/copy_on_write_buffer_unittest.cc b/third_party/libwebrtc/rtc_base/copy_on_write_buffer_unittest.cc index ad2c4e0fba3e..8a9fc4e2f44b 100644 --- a/third_party/libwebrtc/rtc_base/copy_on_write_buffer_unittest.cc +++ b/third_party/libwebrtc/rtc_base/copy_on_write_buffer_unittest.cc @@ -52,6 +52,8 @@ TEST(CopyOnWriteBufferTest, TestCreateEmptyData) { } TEST(CopyOnWriteBufferTest, TestMoveConstruct) { + EXPECT_TRUE(std::is_nothrow_move_constructible_v); + CopyOnWriteBuffer buf1(kTestData, 3, 10); size_t buf1_size = buf1.size(); size_t buf1_capacity = buf1.capacity(); diff --git a/third_party/libwebrtc/rtc_base/event_tracer_gn/moz.build b/third_party/libwebrtc/rtc_base/event_tracer_gn/moz.build index 0d2f54cb6630..32fe6402f408 100644 --- a/third_party/libwebrtc/rtc_base/event_tracer_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/event_tracer_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/alr_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/alr_experiment_gn/moz.build index 873ea744002a..37836938a526 100644 --- a/third_party/libwebrtc/rtc_base/experiments/alr_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/alr_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/balanced_degradation_settings_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/balanced_degradation_settings_gn/moz.build index e70ad11c923f..132fd4e0b8c6 100644 --- a/third_party/libwebrtc/rtc_base/experiments/balanced_degradation_settings_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/balanced_degradation_settings_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/bandwidth_quality_scaler_settings_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/bandwidth_quality_scaler_settings_gn/moz.build index cc93c29837ba..248549a9a84a 100644 --- a/third_party/libwebrtc/rtc_base/experiments/bandwidth_quality_scaler_settings_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/bandwidth_quality_scaler_settings_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/cpu_speed_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/cpu_speed_experiment_gn/moz.build index 012c461ffe1d..7a887a15c955 100644 --- a/third_party/libwebrtc/rtc_base/experiments/cpu_speed_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/cpu_speed_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_gn/moz.build index 31eaaa7d9a55..378f603cc939 100644 --- a/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/encoder_info_settings_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_gn/moz.build index bfa8be6e6f17..c4312e47f41d 100644 --- a/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/field_trial_parser_gn/moz.build @@ -129,6 +129,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/keyframe_interval_settings_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/keyframe_interval_settings_experiment_gn/moz.build index 0169a46babde..ff903e16902a 100644 --- a/third_party/libwebrtc/rtc_base/experiments/keyframe_interval_settings_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/keyframe_interval_settings_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/min_video_bitrate_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/min_video_bitrate_experiment_gn/moz.build index bb394fe2975f..89aac493eb74 100644 --- a/third_party/libwebrtc/rtc_base/experiments/min_video_bitrate_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/min_video_bitrate_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/normalize_simulcast_size_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/normalize_simulcast_size_experiment_gn/moz.build index 20e3713eb1d8..0c4cfa8921a5 100644 --- a/third_party/libwebrtc/rtc_base/experiments/normalize_simulcast_size_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/normalize_simulcast_size_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/quality_rampup_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/quality_rampup_experiment_gn/moz.build index ef8fd46914c9..643df8eb0b9e 100644 --- a/third_party/libwebrtc/rtc_base/experiments/quality_rampup_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/quality_rampup_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/quality_scaler_settings_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/quality_scaler_settings_gn/moz.build index 6cc2035a2d7c..0687e641f803 100644 --- a/third_party/libwebrtc/rtc_base/experiments/quality_scaler_settings_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/quality_scaler_settings_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/quality_scaling_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/quality_scaling_experiment_gn/moz.build index 2ecca976ab20..0add76907ab3 100644 --- a/third_party/libwebrtc/rtc_base/experiments/quality_scaling_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/quality_scaling_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/rate_control_settings_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/rate_control_settings_gn/moz.build index cbf9703b88bd..80f3f2699922 100644 --- a/third_party/libwebrtc/rtc_base/experiments/rate_control_settings_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/rate_control_settings_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment.cc b/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment.cc index b4c9c31d9960..e15b92893716 100644 --- a/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment.cc +++ b/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment.cc @@ -21,45 +21,19 @@ namespace webrtc { namespace { const char kRttMultExperiment[] = "WebRTC-RttMult"; -const float max_rtt_mult_setting = 1.0; -const float min_rtt_mult_setting = 0.0; -const float max_rtt_mult_add_cap_ms = 2000.0; -const float min_rtt_mult_add_cap_ms = 0.0; } // namespace bool RttMultExperiment::RttMultEnabled() { - return field_trial::IsEnabled(kRttMultExperiment); + return !field_trial::IsDisabled(kRttMultExperiment); } absl::optional RttMultExperiment::GetRttMultValue() { - if (!RttMultExperiment::RttMultEnabled()) - return absl::nullopt; - const std::string group = - webrtc::field_trial::FindFullName(kRttMultExperiment); - if (group.empty()) { - RTC_LOG(LS_WARNING) << "Could not find rtt_mult_experiment."; + if (!RttMultExperiment::RttMultEnabled()) { return absl::nullopt; } - - Settings s; - if (sscanf(group.c_str(), "Enabled-%f,%f", &s.rtt_mult_setting, - &s.rtt_mult_add_cap_ms) != 2) { - RTC_LOG(LS_WARNING) << "Invalid number of parameters provided."; - return absl::nullopt; - } - // Bounds check rtt_mult_setting and rtt_mult_add_cap_ms values. - s.rtt_mult_setting = std::min(s.rtt_mult_setting, max_rtt_mult_setting); - s.rtt_mult_setting = std::max(s.rtt_mult_setting, min_rtt_mult_setting); - s.rtt_mult_add_cap_ms = - std::min(s.rtt_mult_add_cap_ms, max_rtt_mult_add_cap_ms); - s.rtt_mult_add_cap_ms = - std::max(s.rtt_mult_add_cap_ms, min_rtt_mult_add_cap_ms); - RTC_LOG(LS_INFO) << "rtt_mult experiment: rtt_mult value = " - << s.rtt_mult_setting - << " rtt_mult addition cap = " << s.rtt_mult_add_cap_ms - << " ms."; - return s; + return RttMultExperiment::Settings{.rtt_mult_setting = 0.9, + .rtt_mult_add_cap_ms = 200.0}; } } // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_gn/moz.build index 8dfc5abd92e1..6c8fc29aa462 100644 --- a/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_unittest.cc b/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_unittest.cc index bb00f5ec345c..a6798a14114e 100644 --- a/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_unittest.cc +++ b/third_party/libwebrtc/rtc_base/experiments/rtt_mult_experiment_unittest.cc @@ -15,48 +15,17 @@ namespace webrtc { -TEST(RttMultExperimentTest, RttMultDisabledByDefault) { - EXPECT_FALSE(RttMultExperiment::RttMultEnabled()); -} - -TEST(RttMultExperimentTest, RttMultEnabledByFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-RttMult/Enabled-0.60,100.0/"); +TEST(RttMultExperimentTest, RttMultEnabledByDefault) { EXPECT_TRUE(RttMultExperiment::RttMultEnabled()); + ASSERT_TRUE(RttMultExperiment::GetRttMultValue()); + EXPECT_EQ(0.9f, RttMultExperiment::GetRttMultValue()->rtt_mult_setting); + EXPECT_EQ(200.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_add_cap_ms); } -TEST(RttMultExperimentTest, RttMultTestValue) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-RttMult/Enabled-0.60,100.0/"); - EXPECT_EQ(0.6f, RttMultExperiment::GetRttMultValue()->rtt_mult_setting); - EXPECT_EQ(100.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_add_cap_ms); -} - -TEST(RttMultExperimentTest, RttMultTestMalformedEnabled) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-RttMult/Enable-0.60,100.0/"); +TEST(RttMultExperimentTest, RttMultDisabledByFieldTrial) { + webrtc::test::ScopedFieldTrials field_trials("WebRTC-RttMult/Disabled/"); EXPECT_FALSE(RttMultExperiment::RttMultEnabled()); EXPECT_FALSE(RttMultExperiment::GetRttMultValue()); } -TEST(RttMultExperimentTest, RttMultTestValueOutOfBoundsPositive) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-RttMult/Enabled-1.5,2100.0/"); - EXPECT_EQ(1.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_setting); - EXPECT_EQ(2000.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_add_cap_ms); -} - -TEST(RttMultExperimentTest, RttMultTestValueOutOfBoundsNegative) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-RttMult/Enabled--0.5,-100.0/"); - EXPECT_EQ(0.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_setting); - EXPECT_EQ(0.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_add_cap_ms); -} - -TEST(RttMultExperimentTest, RttMultTestMalformedValue) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-RttMult/Enabled-0.25,10a0.0/"); - EXPECT_NE(100.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_add_cap_ms); -} - } // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/experiments/stable_target_rate_experiment_gn/moz.build b/third_party/libwebrtc/rtc_base/experiments/stable_target_rate_experiment_gn/moz.build index 0948e2a3b9fc..35ab472dd649 100644 --- a/third_party/libwebrtc/rtc_base/experiments/stable_target_rate_experiment_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/experiments/stable_target_rate_experiment_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/frequency_tracker.cc b/third_party/libwebrtc/rtc_base/frequency_tracker.cc new file mode 100644 index 000000000000..c3be30e3b866 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/frequency_tracker.cc @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/frequency_tracker.h" + +#include "absl/types/optional.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/rate_statistics.h" + +namespace webrtc { + +FrequencyTracker::FrequencyTracker(TimeDelta max_window_size) + : impl_(max_window_size.ms(), 1'000'000) {} + +absl::optional FrequencyTracker::Rate(Timestamp now) const { + if (absl::optional rate = impl_.Rate(now.ms())) { + return Frequency::MilliHertz(*rate); + } + return absl::nullopt; +} + +void FrequencyTracker::Update(int64_t count, Timestamp now) { + impl_.Update(count, now.ms()); +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/frequency_tracker.h b/third_party/libwebrtc/rtc_base/frequency_tracker.h new file mode 100644 index 000000000000..3ee2ab0e1afd --- /dev/null +++ b/third_party/libwebrtc/rtc_base/frequency_tracker.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_FREQUENCY_TRACKER_H_ +#define RTC_BASE_FREQUENCY_TRACKER_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/rate_statistics.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { +// Class to estimate frequency (e.g. frame rate) over running window. +// Timestamps used in Update() and Rate() must never decrease for two +// consecutive calls. +// This class is thread unsafe. +class RTC_EXPORT FrequencyTracker { + public: + explicit FrequencyTracker(TimeDelta window_size); + + FrequencyTracker(const FrequencyTracker&) = default; + FrequencyTracker(FrequencyTracker&&) = default; + FrequencyTracker& operator=(const FrequencyTracker&) = delete; + FrequencyTracker& operator=(FrequencyTracker&&) = delete; + + ~FrequencyTracker() = default; + + // Reset instance to original state. + void Reset() { impl_.Reset(); } + + // Update rate with a new data point, moving averaging window as needed. + void Update(int64_t count, Timestamp now); + void Update(Timestamp now) { Update(1, now); } + + // Returns rate, moving averaging window as needed. + // Returns nullopt when rate can't be measured. + absl::optional Rate(Timestamp now) const; + + private: + RateStatistics impl_; +}; +} // namespace webrtc + +#endif // RTC_BASE_FREQUENCY_TRACKER_H_ diff --git a/third_party/libwebrtc/rtc_base/frequency_tracker_gn/moz.build b/third_party/libwebrtc/rtc_base/frequency_tracker_gn/moz.build new file mode 100644 index 000000000000..b546fdccb098 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/frequency_tracker_gn/moz.build @@ -0,0 +1,229 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + + ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ### + ### DO NOT edit it by hand. ### + +COMPILE_FLAGS["OS_INCLUDES"] = [] +AllowCompilerWarnings() + +DEFINES["ABSL_ALLOCATOR_NOTHROW"] = "1" +DEFINES["RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY"] = True +DEFINES["RTC_ENABLE_VP9"] = True +DEFINES["WEBRTC_ENABLE_PROTOBUF"] = "0" +DEFINES["WEBRTC_LIBRARY_IMPL"] = True +DEFINES["WEBRTC_MOZILLA_BUILD"] = True +DEFINES["WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS"] = "0" +DEFINES["WEBRTC_STRICT_FIELD_TRIALS"] = "0" + +FINAL_LIBRARY = "webrtc" + + +LOCAL_INCLUDES += [ + "!/ipc/ipdl/_ipdlheaders", + "!/third_party/libwebrtc/gen", + "/ipc/chromium/src", + "/third_party/libwebrtc/", + "/third_party/libwebrtc/third_party/abseil-cpp/", + "/tools/profiler/public" +] + +UNIFIED_SOURCES += [ + "/third_party/libwebrtc/rtc_base/frequency_tracker.cc" +] + +if not CONFIG["MOZ_DEBUG"]: + + DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" + DEFINES["NDEBUG"] = True + DEFINES["NVALGRIND"] = True + +if CONFIG["MOZ_DEBUG"] == "1": + + DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1" + +if CONFIG["OS_TARGET"] == "Android": + + DEFINES["ANDROID"] = True + DEFINES["ANDROID_NDK_VERSION_ROLL"] = "r22_1" + DEFINES["HAVE_SYS_UIO_H"] = True + DEFINES["WEBRTC_ANDROID"] = True + DEFINES["WEBRTC_ANDROID_OPENSLES"] = True + DEFINES["WEBRTC_LINUX"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_GNU_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + + OS_LIBS += [ + "log" + ] + +if CONFIG["OS_TARGET"] == "Darwin": + + DEFINES["WEBRTC_MAC"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_LIBCPP_HAS_NO_ALIGNED_ALLOCATION"] = True + DEFINES["__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES"] = "0" + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + +if CONFIG["OS_TARGET"] == "Linux": + + DEFINES["USE_AURA"] = "1" + DEFINES["USE_GLIB"] = "1" + DEFINES["USE_NSS_CERTS"] = "1" + DEFINES["USE_OZONE"] = "1" + DEFINES["USE_UDEV"] = True + DEFINES["WEBRTC_LINUX"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_FILE_OFFSET_BITS"] = "64" + DEFINES["_LARGEFILE64_SOURCE"] = True + DEFINES["_LARGEFILE_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + +if CONFIG["OS_TARGET"] == "OpenBSD": + + DEFINES["USE_GLIB"] = "1" + DEFINES["USE_OZONE"] = "1" + DEFINES["USE_X11"] = "1" + DEFINES["WEBRTC_BSD"] = True + DEFINES["WEBRTC_POSIX"] = True + DEFINES["_FILE_OFFSET_BITS"] = "64" + DEFINES["_LARGEFILE64_SOURCE"] = True + DEFINES["_LARGEFILE_SOURCE"] = True + DEFINES["__STDC_CONSTANT_MACROS"] = True + DEFINES["__STDC_FORMAT_MACROS"] = True + +if CONFIG["OS_TARGET"] == "WINNT": + + DEFINES["CERT_CHAIN_PARA_HAS_EXTRA_FIELDS"] = True + DEFINES["NOMINMAX"] = True + DEFINES["NTDDI_VERSION"] = "0x0A000000" + DEFINES["PSAPI_VERSION"] = "2" + DEFINES["RTC_ENABLE_WIN_WGC"] = True + DEFINES["UNICODE"] = True + DEFINES["USE_AURA"] = "1" + DEFINES["WEBRTC_WIN"] = True + DEFINES["WIN32"] = True + DEFINES["WIN32_LEAN_AND_MEAN"] = True + DEFINES["WINAPI_FAMILY"] = "WINAPI_FAMILY_DESKTOP_APP" + DEFINES["WINVER"] = "0x0A00" + DEFINES["_ATL_NO_OPENGL"] = True + DEFINES["_CRT_RAND_S"] = True + DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True + DEFINES["_ENABLE_EXTENDED_ALIGNED_STORAGE"] = True + DEFINES["_HAS_EXCEPTIONS"] = "0" + DEFINES["_HAS_NODISCARD"] = True + DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True + DEFINES["_SECURE_ATL"] = True + DEFINES["_UNICODE"] = True + DEFINES["_WIN32_WINNT"] = "0x0A00" + DEFINES["_WINDOWS"] = True + DEFINES["__STD_C"] = True + + OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", + "winmm" + ] + +if CONFIG["CPU_ARCH"] == "aarch64": + + DEFINES["WEBRTC_ARCH_ARM64"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + +if CONFIG["CPU_ARCH"] == "arm": + + CXXFLAGS += [ + "-mfpu=neon" + ] + + DEFINES["WEBRTC_ARCH_ARM"] = True + DEFINES["WEBRTC_ARCH_ARM_V7"] = True + DEFINES["WEBRTC_HAS_NEON"] = True + +if CONFIG["CPU_ARCH"] == "mips32": + + DEFINES["MIPS32_LE"] = True + DEFINES["MIPS_FPU_LE"] = True + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["CPU_ARCH"] == "mips64": + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["CPU_ARCH"] == "x86": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + +if CONFIG["CPU_ARCH"] == "x86_64": + + DEFINES["WEBRTC_ENABLE_AVX2"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Android": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Darwin": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "OpenBSD": + + DEFINES["_DEBUG"] = True + +if CONFIG["MOZ_DEBUG"] == "1" and CONFIG["OS_TARGET"] == "WINNT": + + DEFINES["_HAS_ITERATOR_DEBUGGING"] = "0" + +if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["USE_X11"] = "1" + +if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Android": + + OS_LIBS += [ + "android_support", + "unwind" + ] + +if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Android": + + CXXFLAGS += [ + "-msse2" + ] + + OS_LIBS += [ + "android_support" + ] + +if CONFIG["CPU_ARCH"] == "aarch64" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["CPU_ARCH"] == "arm" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["CPU_ARCH"] == "x86" and CONFIG["OS_TARGET"] == "Linux": + + CXXFLAGS += [ + "-msse2" + ] + + DEFINES["_GNU_SOURCE"] = True + +if CONFIG["CPU_ARCH"] == "x86_64" and CONFIG["OS_TARGET"] == "Linux": + + DEFINES["_GNU_SOURCE"] = True + +Library("frequency_tracker_gn") diff --git a/third_party/libwebrtc/rtc_base/frequency_tracker_unittest.cc b/third_party/libwebrtc/rtc_base/frequency_tracker_unittest.cc new file mode 100644 index 000000000000..00788c3ee8d6 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/frequency_tracker_unittest.cc @@ -0,0 +1,203 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/frequency_tracker.h" + +#include +#include + +#include "absl/types/optional.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::AllOf; +using ::testing::Gt; +using ::testing::Lt; + +constexpr TimeDelta kWindow = TimeDelta::Millis(500); +constexpr TimeDelta kEpsilon = TimeDelta::Millis(1); + +TEST(FrequencyTrackerTest, ReturnsNulloptInitially) { + Timestamp now = Timestamp::Seconds(12'345); + FrequencyTracker stats(kWindow); + + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +TEST(FrequencyTrackerTest, ReturnsNulloptAfterSingleDataPoint) { + Timestamp now = Timestamp::Seconds(12'345); + FrequencyTracker stats(kWindow); + + stats.Update(now); + now += TimeDelta::Millis(10); + + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +TEST(FrequencyTrackerTest, ReturnsRateAfterTwoMeasurements) { + Timestamp now = Timestamp::Seconds(12'345); + FrequencyTracker stats(kWindow); + + stats.Update(now); + now += TimeDelta::Millis(1); + stats.Update(now); + + // 1 event per 1 ms ~= 1'000 events per second. + EXPECT_EQ(stats.Rate(now), Frequency::Hertz(1'000)); +} + +TEST(FrequencyTrackerTest, MeasuresConstantRate) { + const Timestamp start = Timestamp::Seconds(12'345); + const TimeDelta kInterval = TimeDelta::Millis(10); + const Frequency kConstantRate = 1 / kInterval; + + Timestamp now = start; + FrequencyTracker stats(kWindow); + + stats.Update(now); + Frequency last_error = Frequency::PlusInfinity(); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { + SCOPED_TRACE(i); + now += kInterval; + stats.Update(now); + + // Until window is full, rate is measured over a smaller window and might + // look larger than the constant rate. + absl::optional rate = stats.Rate(now); + ASSERT_GE(rate, kConstantRate); + + // Expect the estimation error to decrease as the window is extended. + Frequency error = *rate - kConstantRate; + EXPECT_LE(error, last_error); + last_error = error; + } + + // Once window is full, rate measurment should be stable. + for (TimeDelta i = TimeDelta::Zero(); i < kInterval; + i += TimeDelta::Millis(1)) { + SCOPED_TRACE(i); + EXPECT_EQ(stats.Rate(now + i), kConstantRate); + } +} + +TEST(FrequencyTrackerTest, CanMeasureFractionalRate) { + const TimeDelta kInterval = TimeDelta::Millis(134); + Timestamp now = Timestamp::Seconds(12'345); + // FrequencyTracker counts number of events in the window, thus when window is + // fraction of 1 second, number of events per second would always be integer. + const TimeDelta window = TimeDelta::Seconds(2); + + FrequencyTracker framerate(window); + framerate.Update(now); + for (TimeDelta i = TimeDelta::Zero(); i < window; i += kInterval) { + now += kInterval; + framerate.Update(now); + } + + // Should be aproximitly 7.5 fps + EXPECT_THAT(framerate.Rate(now), + AllOf(Gt(Frequency::Hertz(7)), Lt(Frequency::Hertz(8)))); +} + +TEST(FrequencyTrackerTest, IncreasingThenDecreasingRate) { + const int64_t kLargeSize = 1'500; + const int64_t kSmallSize = 300; + const TimeDelta kLargeInterval = TimeDelta::Millis(10); + const TimeDelta kSmallInterval = TimeDelta::Millis(2); + + Timestamp now = Timestamp::Seconds(12'345); + FrequencyTracker stats(kWindow); + + stats.Update(kLargeSize, now); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { + SCOPED_TRACE(i); + now += kLargeInterval; + stats.Update(kLargeSize, now); + } + absl::optional last_rate = stats.Rate(now); + EXPECT_EQ(last_rate, kLargeSize / kLargeInterval); + + // Decrease rate with smaller measurments. + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { + SCOPED_TRACE(i); + now += kLargeInterval; + stats.Update(kSmallSize, now); + + absl::optional rate = stats.Rate(now); + EXPECT_LT(rate, last_rate); + + last_rate = rate; + } + EXPECT_EQ(last_rate, kSmallSize / kLargeInterval); + + // Increase rate with more frequent measurments. + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kSmallInterval) { + SCOPED_TRACE(i); + now += kSmallInterval; + stats.Update(kSmallSize, now); + + absl::optional rate = stats.Rate(now); + EXPECT_GE(rate, last_rate); + + last_rate = rate; + } + EXPECT_EQ(last_rate, kSmallSize / kSmallInterval); +} + +TEST(FrequencyTrackerTest, ResetAfterSilence) { + const TimeDelta kInterval = TimeDelta::Millis(10); + const int64_t kPixels = 640 * 360; + + Timestamp now = Timestamp::Seconds(12'345); + FrequencyTracker pixel_rate(kWindow); + + // Feed data until window has been filled. + pixel_rate.Update(kPixels, now); + for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { + now += kInterval; + pixel_rate.Update(kPixels, now); + } + ASSERT_GT(pixel_rate.Rate(now), Frequency::Zero()); + + now += kWindow + kEpsilon; + // Silence over window size should trigger auto reset for coming sample. + EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + pixel_rate.Update(kPixels, now); + // Single measurment after reset is not enough to estimate the rate. + EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + + // Manual reset, add the same check again. + pixel_rate.Reset(); + EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + now += kInterval; + pixel_rate.Update(kPixels, now); + EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); +} + +TEST(FrequencyTrackerTest, ReturnsNulloptWhenOverflows) { + Timestamp now = Timestamp::Seconds(12'345); + FrequencyTracker stats(kWindow); + + int64_t very_large_number = std::numeric_limits::max(); + stats.Update(very_large_number, now); + now += kEpsilon; + stats.Update(very_large_number, now); + + EXPECT_EQ(stats.Rate(now), absl::nullopt); +} + +} // namespace +} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_base/ip_address_gn/moz.build b/third_party/libwebrtc/rtc_base/ip_address_gn/moz.build index 55c55c7b2b41..d8f6355c7ccf 100644 --- a/third_party/libwebrtc/rtc_base/ip_address_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/ip_address_gn/moz.build @@ -129,8 +129,7 @@ if CONFIG["OS_TARGET"] == "WINNT": OS_LIBS += [ "crypt32", "iphlpapi", - "secur32", - "winmm" + "secur32" ] if CONFIG["CPU_ARCH"] == "aarch64": diff --git a/third_party/libwebrtc/rtc_base/logging.h b/third_party/libwebrtc/rtc_base/logging.h index a980fb4ad3d4..a3733d7543bf 100644 --- a/third_party/libwebrtc/rtc_base/logging.h +++ b/third_party/libwebrtc/rtc_base/logging.h @@ -664,17 +664,17 @@ class LogMessage { ::rtc::webrtc_logging_impl::LogStreamer<>() \ << ::rtc::webrtc_logging_impl::LogMetadata(file, line, sev) -#define RTC_LOG(sev) \ - !rtc::LogMessage::IsNoop<::rtc::sev>() && \ +#define RTC_LOG(sev) \ + !::rtc::LogMessage::IsNoop<::rtc::sev>() && \ RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) -#define RTC_LOG_IF(sev, condition) \ - !rtc::LogMessage::IsNoop<::rtc::sev>() && (condition) && \ +#define RTC_LOG_IF(sev, condition) \ + !::rtc::LogMessage::IsNoop<::rtc::sev>() && (condition) && \ RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) // The _V version is for when a variable is passed in. #define RTC_LOG_V(sev) \ - !rtc::LogMessage::IsNoop(sev) && RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) + !::rtc::LogMessage::IsNoop(sev) && RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) // The _F version prefixes the message with the current function name. #if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F) @@ -698,7 +698,7 @@ inline bool LogCheckLevel(LoggingSeverity sev) { } #define RTC_LOG_E(sev, ctx, err) \ - !rtc::LogMessage::IsNoop<::rtc::sev>() && \ + !::rtc::LogMessage::IsNoop<::rtc::sev>() && \ ::rtc::webrtc_logging_impl::LogCall() & \ ::rtc::webrtc_logging_impl::LogStreamer<>() \ << ::rtc::webrtc_logging_impl::LogMetadataErr { \ @@ -736,7 +736,7 @@ inline const char* AdaptString(const std::string& str) { } // namespace webrtc_logging_impl #define RTC_LOG_TAG(sev, tag) \ - !rtc::LogMessage::IsNoop(sev) && \ + !::rtc::LogMessage::IsNoop(sev) && \ ::rtc::webrtc_logging_impl::LogCall() & \ ::rtc::webrtc_logging_impl::LogStreamer<>() \ << ::rtc::webrtc_logging_impl::LogMetadataTag { \ diff --git a/third_party/libwebrtc/rtc_base/logging_gn/moz.build b/third_party/libwebrtc/rtc_base/logging_gn/moz.build index c58a306730f8..5512024097ba 100644 --- a/third_party/libwebrtc/rtc_base/logging_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/logging_gn/moz.build @@ -127,6 +127,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/nat_unittest.cc b/third_party/libwebrtc/rtc_base/nat_unittest.cc index f6dd83cadba6..19e53543ba24 100644 --- a/third_party/libwebrtc/rtc_base/nat_unittest.cc +++ b/third_party/libwebrtc/rtc_base/nat_unittest.cc @@ -26,6 +26,7 @@ #include "rtc_base/nat_socket_factory.h" #include "rtc_base/nat_types.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/network.h" #include "rtc_base/physical_socket_server.h" #include "rtc_base/socket.h" diff --git a/third_party/libwebrtc/rtc_base/net_helpers.cc b/third_party/libwebrtc/rtc_base/net_helpers.cc index 73fe86231307..00cd434a5807 100644 --- a/third_party/libwebrtc/rtc_base/net_helpers.cc +++ b/third_party/libwebrtc/rtc_base/net_helpers.cc @@ -19,15 +19,9 @@ #include #include -#include "rtc_base/win/windows_version.h" #endif #if defined(WEBRTC_POSIX) && !defined(__native_client__) #include -#if defined(WEBRTC_ANDROID) -#include "rtc_base/ifaddrs_android.h" -#else -#include -#endif #endif // defined(WEBRTC_POSIX) && !defined(__native_client__) namespace rtc { @@ -48,83 +42,4 @@ int inet_pton(int af, absl::string_view src, void* dst) { return ::inet_pton(af, src_str.c_str(), dst); #endif } - -bool HasIPv4Enabled() { -#if defined(WEBRTC_POSIX) && !defined(__native_client__) - bool has_ipv4 = false; - struct ifaddrs* ifa; - if (getifaddrs(&ifa) < 0) { - return false; - } - for (struct ifaddrs* cur = ifa; cur != nullptr; cur = cur->ifa_next) { - if (cur->ifa_addr != nullptr && cur->ifa_addr->sa_family == AF_INET) { - has_ipv4 = true; - break; - } - } - freeifaddrs(ifa); - return has_ipv4; -#else - return true; -#endif -} - -bool HasIPv6Enabled() { -#if defined(WINUWP) - // WinUWP always has IPv6 capability. - return true; -#elif defined(WEBRTC_WIN) - if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_VISTA) { - return true; - } - if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_XP) { - return false; - } - DWORD protbuff_size = 4096; - std::unique_ptr protocols; - LPWSAPROTOCOL_INFOW protocol_infos = nullptr; - int requested_protocols[2] = {AF_INET6, 0}; - - int err = 0; - int ret = 0; - // Check for protocols in a do-while loop until we provide a buffer large - // enough. (WSCEnumProtocols sets protbuff_size to its desired value). - // It is extremely unlikely that this will loop more than once. - do { - protocols.reset(new char[protbuff_size]); - protocol_infos = reinterpret_cast(protocols.get()); - ret = WSCEnumProtocols(requested_protocols, protocol_infos, &protbuff_size, - &err); - } while (ret == SOCKET_ERROR && err == WSAENOBUFS); - - if (ret == SOCKET_ERROR) { - return false; - } - - // Even if ret is positive, check specifically for IPv6. - // Non-IPv6 enabled WinXP will still return a RAW protocol. - for (int i = 0; i < ret; ++i) { - if (protocol_infos[i].iAddressFamily == AF_INET6) { - return true; - } - } - return false; -#elif defined(WEBRTC_POSIX) && !defined(__native_client__) - bool has_ipv6 = false; - struct ifaddrs* ifa; - if (getifaddrs(&ifa) < 0) { - return false; - } - for (struct ifaddrs* cur = ifa; cur != nullptr; cur = cur->ifa_next) { - if (cur->ifa_addr != nullptr && cur->ifa_addr->sa_family == AF_INET6) { - has_ipv6 = true; - break; - } - } - freeifaddrs(ifa); - return has_ipv6; -#else - return true; -#endif -} } // namespace rtc diff --git a/third_party/libwebrtc/rtc_base/net_helpers.h b/third_party/libwebrtc/rtc_base/net_helpers.h index 478cec3eedcd..a529f4f03f4d 100644 --- a/third_party/libwebrtc/rtc_base/net_helpers.h +++ b/third_party/libwebrtc/rtc_base/net_helpers.h @@ -29,9 +29,6 @@ namespace rtc { const char* inet_ntop(int af, const void* src, char* dst, socklen_t size); int inet_pton(int af, absl::string_view src, void* dst); -RTC_EXPORT bool HasIPv4Enabled(); -RTC_EXPORT bool HasIPv6Enabled(); - } // namespace rtc #endif // RTC_BASE_NET_HELPERS_H_ diff --git a/third_party/libwebrtc/rtc_base/net_helpers_gn/moz.build b/third_party/libwebrtc/rtc_base/net_helpers_gn/moz.build index 1cb24c3d75c9..cf8da5ce3ebb 100644 --- a/third_party/libwebrtc/rtc_base/net_helpers_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/net_helpers_gn/moz.build @@ -129,8 +129,7 @@ if CONFIG["OS_TARGET"] == "WINNT": OS_LIBS += [ "crypt32", "iphlpapi", - "secur32", - "winmm" + "secur32" ] if CONFIG["CPU_ARCH"] == "aarch64": diff --git a/third_party/libwebrtc/rtc_base/net_test_helpers.cc b/third_party/libwebrtc/rtc_base/net_test_helpers.cc new file mode 100644 index 000000000000..806d7dee60c0 --- /dev/null +++ b/third_party/libwebrtc/rtc_base/net_test_helpers.cc @@ -0,0 +1,111 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/net_test_helpers.h" + +#include +#include + +#if defined(WEBRTC_WIN) +#include +#include + +#include "rtc_base/win/windows_version.h" +#endif +#if defined(WEBRTC_POSIX) && !defined(__native_client__) +#include +#if defined(WEBRTC_ANDROID) +#include "rtc_base/ifaddrs_android.h" +#else +#include +#endif +#endif // defined(WEBRTC_POSIX) && !defined(__native_client__) + +namespace rtc { + +bool HasIPv4Enabled() { +#if defined(WEBRTC_POSIX) && !defined(__native_client__) + bool has_ipv4 = false; + struct ifaddrs* ifa; + if (getifaddrs(&ifa) < 0) { + return false; + } + for (struct ifaddrs* cur = ifa; cur != nullptr; cur = cur->ifa_next) { + if (cur->ifa_addr != nullptr && cur->ifa_addr->sa_family == AF_INET) { + has_ipv4 = true; + break; + } + } + freeifaddrs(ifa); + return has_ipv4; +#else + return true; +#endif +} + +bool HasIPv6Enabled() { +#if defined(WINUWP) + // WinUWP always has IPv6 capability. + return true; +#elif defined(WEBRTC_WIN) + if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_VISTA) { + return true; + } + if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_XP) { + return false; + } + DWORD protbuff_size = 4096; + std::unique_ptr protocols; + LPWSAPROTOCOL_INFOW protocol_infos = nullptr; + int requested_protocols[2] = {AF_INET6, 0}; + + int err = 0; + int ret = 0; + // Check for protocols in a do-while loop until we provide a buffer large + // enough. (WSCEnumProtocols sets protbuff_size to its desired value). + // It is extremely unlikely that this will loop more than once. + do { + protocols.reset(new char[protbuff_size]); + protocol_infos = reinterpret_cast(protocols.get()); + ret = WSCEnumProtocols(requested_protocols, protocol_infos, &protbuff_size, + &err); + } while (ret == SOCKET_ERROR && err == WSAENOBUFS); + + if (ret == SOCKET_ERROR) { + return false; + } + + // Even if ret is positive, check specifically for IPv6. + // Non-IPv6 enabled WinXP will still return a RAW protocol. + for (int i = 0; i < ret; ++i) { + if (protocol_infos[i].iAddressFamily == AF_INET6) { + return true; + } + } + return false; +#elif defined(WEBRTC_POSIX) && !defined(__native_client__) + bool has_ipv6 = false; + struct ifaddrs* ifa; + if (getifaddrs(&ifa) < 0) { + return false; + } + for (struct ifaddrs* cur = ifa; cur != nullptr; cur = cur->ifa_next) { + if (cur->ifa_addr != nullptr && cur->ifa_addr->sa_family == AF_INET6) { + has_ipv6 = true; + break; + } + } + freeifaddrs(ifa); + return has_ipv6; +#else + return true; +#endif +} +} // namespace rtc diff --git a/third_party/libwebrtc/modules/audio_device/android/ensure_initialized.h b/third_party/libwebrtc/rtc_base/net_test_helpers.h similarity index 51% rename from third_party/libwebrtc/modules/audio_device/android/ensure_initialized.h rename to third_party/libwebrtc/rtc_base/net_test_helpers.h index c1997b4acdf1..2e8320c4d860 100644 --- a/third_party/libwebrtc/modules/audio_device/android/ensure_initialized.h +++ b/third_party/libwebrtc/rtc_base/net_test_helpers.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * Copyright 2023 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,10 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -namespace webrtc { -namespace audiodevicemodule { +#ifndef RTC_BASE_NET_TEST_HELPERS_H_ +#define RTC_BASE_NET_TEST_HELPERS_H_ -void EnsureInitialized(); +#include "rtc_base/system/rtc_export.h" -} // namespace audiodevicemodule -} // namespace webrtc +namespace rtc { + +RTC_EXPORT bool HasIPv4Enabled(); +RTC_EXPORT bool HasIPv6Enabled(); + +} // namespace rtc + +#endif // RTC_BASE_NET_TEST_HELPERS_H_ diff --git a/third_party/libwebrtc/rtc_base/network_unittest.cc b/third_party/libwebrtc/rtc_base/network_unittest.cc index 1579f800b6dc..d1999f349c79 100644 --- a/third_party/libwebrtc/rtc_base/network_unittest.cc +++ b/third_party/libwebrtc/rtc_base/network_unittest.cc @@ -21,6 +21,7 @@ #include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/physical_socket_server.h" @@ -45,6 +46,12 @@ using ::testing::UnorderedElementsAreArray; namespace rtc { +#define MAYBE_SKIP_IPV4 \ + if (!HasIPv4Enabled()) { \ + RTC_LOG(LS_INFO) << "No IPv4... skipping"; \ + return; \ + } + namespace { IPAddress IPFromString(absl::string_view str) { @@ -1263,6 +1270,7 @@ TEST_F(NetworkTest, TestNetworkMonitoring) { #define MAYBE_DefaultLocalAddress DefaultLocalAddress #endif TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { + MAYBE_SKIP_IPV4; IPAddress ip; FakeNetworkMonitorFactory factory; PhysicalSocketServer socket_server; diff --git a/third_party/libwebrtc/rtc_base/physical_socket_server_unittest.cc b/third_party/libwebrtc/rtc_base/physical_socket_server_unittest.cc index f24c7e46cec1..de64a3181294 100644 --- a/third_party/libwebrtc/rtc_base/physical_socket_server_unittest.cc +++ b/third_party/libwebrtc/rtc_base/physical_socket_server_unittest.cc @@ -19,6 +19,7 @@ #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/network_monitor.h" #include "rtc_base/socket_unittest.h" #include "rtc_base/test_utils.h" diff --git a/third_party/libwebrtc/rtc_base/platform_thread_gn/moz.build b/third_party/libwebrtc/rtc_base/platform_thread_gn/moz.build index 877e4decb1be..8e8e257d918f 100644 --- a/third_party/libwebrtc/rtc_base/platform_thread_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/platform_thread_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/protobuf_utils.h b/third_party/libwebrtc/rtc_base/protobuf_utils.h index 786365db1a67..497b7f488fbc 100644 --- a/third_party/libwebrtc/rtc_base/protobuf_utils.h +++ b/third_party/libwebrtc/rtc_base/protobuf_utils.h @@ -15,8 +15,8 @@ #if WEBRTC_ENABLE_PROTOBUF -#include "third_party/protobuf/src/google/protobuf/message_lite.h" -#include "third_party/protobuf/src/google/protobuf/repeated_field.h" +#include "third_party/protobuf/src/google/protobuf/message_lite.h" // nogncheck +#include "third_party/protobuf/src/google/protobuf/repeated_field.h" // nogncheck namespace webrtc { diff --git a/third_party/libwebrtc/rtc_base/rate_statistics_gn/moz.build b/third_party/libwebrtc/rtc_base/rate_statistics_gn/moz.build index 5b7ef1d09da3..3a0113d12296 100644 --- a/third_party/libwebrtc/rtc_base/rate_statistics_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/rate_statistics_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/rate_tracker_gn/moz.build b/third_party/libwebrtc/rtc_base/rate_tracker_gn/moz.build index 7849033fd901..90c9c25aec52 100644 --- a/third_party/libwebrtc/rtc_base/rate_tracker_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/rate_tracker_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/rtc_event_gn/moz.build b/third_party/libwebrtc/rtc_base/rtc_event_gn/moz.build index 6fde36031cdc..f8dd4dd5984a 100644 --- a/third_party/libwebrtc/rtc_base/rtc_event_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/rtc_event_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/socket_unittest.cc b/third_party/libwebrtc/rtc_base/socket_unittest.cc index 40da8c26cf24..0a41a776aca6 100644 --- a/third_party/libwebrtc/rtc_base/socket_unittest.cc +++ b/third_party/libwebrtc/rtc_base/socket_unittest.cc @@ -27,6 +27,7 @@ #include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_server.h" #include "rtc_base/socket_unittest.h" diff --git a/third_party/libwebrtc/rtc_base/system/no_unique_address.h b/third_party/libwebrtc/rtc_base/system/no_unique_address.h index 6bede2c6b6d8..a40db3453b19 100644 --- a/third_party/libwebrtc/rtc_base/system/no_unique_address.h +++ b/third_party/libwebrtc/rtc_base/system/no_unique_address.h @@ -25,7 +25,7 @@ // clang-cl doesn't support it yet and support is unclear also when the target // platform is iOS. #ifndef __has_cpp_attribute -#define __has_cpp_attribute(__x) 0 +#define __has_cpp_attribute(x) 0 #endif #if __has_cpp_attribute(no_unique_address) // NOLINTNEXTLINE(whitespace/braces) diff --git a/third_party/libwebrtc/rtc_base/test_client_unittest.cc b/third_party/libwebrtc/rtc_base/test_client_unittest.cc index a0c8b88e626b..b2866a2d349f 100644 --- a/third_party/libwebrtc/rtc_base/test_client_unittest.cc +++ b/third_party/libwebrtc/rtc_base/test_client_unittest.cc @@ -17,6 +17,7 @@ #include "rtc_base/async_udp_socket.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" #include "rtc_base/physical_socket_server.h" #include "rtc_base/socket.h" #include "rtc_base/test_echo_server.h" diff --git a/third_party/libwebrtc/rtc_base/third_party/base64/README.chromium b/third_party/libwebrtc/rtc_base/third_party/base64/README.chromium index 92ba0d36d7fe..1f01511feef9 100644 --- a/third_party/libwebrtc/rtc_base/third_party/base64/README.chromium +++ b/third_party/libwebrtc/rtc_base/third_party/base64/README.chromium @@ -6,6 +6,7 @@ Date: 2018-06-20 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: A simple base64 encoder and decoder diff --git a/third_party/libwebrtc/rtc_base/third_party/sigslot/README.chromium b/third_party/libwebrtc/rtc_base/third_party/sigslot/README.chromium index e0575cf16ad2..9867dd032066 100644 --- a/third_party/libwebrtc/rtc_base/third_party/sigslot/README.chromium +++ b/third_party/libwebrtc/rtc_base/third_party/sigslot/README.chromium @@ -6,6 +6,7 @@ Date: 2018-07-09 License: Custom license License File: LICENSE Security Critical: yes +Shipped: yes Description: C++ Signal/Slot Library diff --git a/third_party/libwebrtc/rtc_base/time_utils.cc b/third_party/libwebrtc/rtc_base/time_utils.cc index 78434536e1c9..9f112e49c189 100644 --- a/third_party/libwebrtc/rtc_base/time_utils.cc +++ b/third_party/libwebrtc/rtc_base/time_utils.cc @@ -14,17 +14,27 @@ #include #endif -#if defined(WEBRTC_WIN) -#include -#endif - #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/system_time.h" #include "rtc_base/time_utils.h" +#if defined(WEBRTC_WIN) +#include "rtc_base/win32.h" +#endif +#if defined(WEBRTC_WIN) +#include +#endif namespace rtc { +#if defined(WEBRTC_WIN) || defined(WINUWP) +// FileTime (January 1st 1601) to Unix time (January 1st 1970) +// offset in units of 100ns. +static constexpr uint64_t kFileTimeToUnixTimeEpochOffset = + 116444736000000000ULL; +static constexpr uint64_t kFileTimeToMicroSeconds = 10LL; +#endif + ClockInterface* g_clock = nullptr; ClockInterface* SetClockForTesting(ClockInterface* clock) { @@ -115,8 +125,6 @@ class TimeHelper final { } private: - static constexpr uint64_t kFileTimeToUnixTimeEpochOffset = - 116444736000000000ULL; static constexpr uint64_t kNTPTimeToUnixTimeEpochOffset = 2208988800000L; // The number of nanoseconds since unix system epoch @@ -231,13 +239,15 @@ int64_t TimeUTCMicros() { // Convert from second (1.0) and microsecond (1e-6). return (static_cast(time.tv_sec) * rtc::kNumMicrosecsPerSec + time.tv_usec); - #elif defined(WEBRTC_WIN) - struct _timeb time; - _ftime(&time); - // Convert from second (1.0) and milliseconds (1e-3). - return (static_cast(time.time) * rtc::kNumMicrosecsPerSec + - static_cast(time.millitm) * rtc::kNumMicrosecsPerMillisec); + FILETIME ft; + // This will give us system file in UTC format in multiples of 100ns. + GetSystemTimeAsFileTime(&ft); + LARGE_INTEGER li; + li.HighPart = ft.dwHighDateTime; + li.LowPart = ft.dwLowDateTime; + return (li.QuadPart - kFileTimeToUnixTimeEpochOffset) / + kFileTimeToMicroSeconds; #endif } diff --git a/third_party/libwebrtc/rtc_base/timeutils_gn/moz.build b/third_party/libwebrtc/rtc_base/timeutils_gn/moz.build index a0ade7c241f5..92fd2c323b5f 100644 --- a/third_party/libwebrtc/rtc_base/timeutils_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/timeutils_gn/moz.build @@ -128,6 +128,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/rtc_base/win32.cc b/third_party/libwebrtc/rtc_base/win32.cc index b44e5130261a..9ce0523413b1 100644 --- a/third_party/libwebrtc/rtc_base/win32.cc +++ b/third_party/libwebrtc/rtc_base/win32.cc @@ -18,7 +18,6 @@ #include "rtc_base/arraysize.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" #include "rtc_base/string_utils.h" namespace rtc { diff --git a/third_party/libwebrtc/rtc_base/win32_gn/moz.build b/third_party/libwebrtc/rtc_base/win32_gn/moz.build index b134f3004ef5..3d9223f39c05 100644 --- a/third_party/libwebrtc/rtc_base/win32_gn/moz.build +++ b/third_party/libwebrtc/rtc_base/win32_gn/moz.build @@ -58,8 +58,7 @@ LOCAL_INCLUDES += [ OS_LIBS += [ "crypt32", "iphlpapi", - "secur32", - "winmm" + "secur32" ] UNIFIED_SOURCES += [ diff --git a/third_party/libwebrtc/rtc_tools/BUILD.gn b/third_party/libwebrtc/rtc_tools/BUILD.gn index b324438c2df6..5ead8a0506ff 100644 --- a/third_party/libwebrtc/rtc_tools/BUILD.gn +++ b/third_party/libwebrtc/rtc_tools/BUILD.gn @@ -428,7 +428,11 @@ if (!build_with_chromium) { rtc_executable("video_encoder") { visibility = [ "*" ] testonly = true - sources = [ "video_encoder/video_encoder.cc" ] + sources = [ + "video_encoder/encoded_image_file_writer.cc", + "video_encoder/encoded_image_file_writer.h", + "video_encoder/video_encoder.cc", + ] deps = [ "//api:create_frame_generator", "//api:frame_generator_api", @@ -440,6 +444,7 @@ if (!build_with_chromium) { "//modules/video_coding/codecs/av1:av1_svc_config", "//modules/video_coding/svc:scalability_mode_util", "//rtc_base:logging", + "//test:y4m_frame_generator", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/flags:usage", diff --git a/third_party/libwebrtc/rtc_tools/DEPS b/third_party/libwebrtc/rtc_tools/DEPS index 2a06bf043f48..f62653d3ae77 100644 --- a/third_party/libwebrtc/rtc_tools/DEPS +++ b/third_party/libwebrtc/rtc_tools/DEPS @@ -37,6 +37,10 @@ specific_include_rules = { "+modules/video_coding/codecs/av1/av1_svc_config.h", "+modules/video_coding/include/video_codec_interface.h", "+modules/video_coding/svc/scalability_mode_util.h", + ], + ".*encoded_image_file_writer\.(cc|h)": [ + "+modules/video_coding/include/video_codec_interface.h", + "+modules/video_coding/svc/scalability_mode_util.h", "+modules/video_coding/utility/ivf_file_writer.h", ], } diff --git a/third_party/libwebrtc/rtc_tools/rtc_event_log_to_text/converter.cc b/third_party/libwebrtc/rtc_tools/rtc_event_log_to_text/converter.cc index 6bd3458a6f1c..f171260a4ecc 100644 --- a/third_party/libwebrtc/rtc_tools/rtc_event_log_to_text/converter.cc +++ b/third_party/libwebrtc/rtc_tools/rtc_event_log_to_text/converter.cc @@ -175,7 +175,8 @@ bool Convert(std::string inputfile, auto bwe_probe_failure_handler = [&](const LoggedBweProbeFailureEvent& event) { fprintf(output, "BWE_PROBE_FAILURE %" PRId64 " id=%d reason=%d\n", - event.log_time_ms(), event.id, event.failure_reason); + event.log_time_ms(), event.id, + static_cast(event.failure_reason)); }; auto bwe_probe_success_handler = @@ -209,7 +210,8 @@ bool Convert(std::string inputfile, auto dtls_transport_state_handler = [&](const LoggedDtlsTransportState& event) { fprintf(output, "DTLS_TRANSPORT_STATE %" PRId64 " state=%d\n", - event.log_time_ms(), event.dtls_transport_state); + event.log_time_ms(), + static_cast(event.dtls_transport_state)); }; auto dtls_transport_writable_handler = diff --git a/third_party/libwebrtc/rtc_tools/video_encoder/encoded_image_file_writer.cc b/third_party/libwebrtc/rtc_tools/video_encoder/encoded_image_file_writer.cc new file mode 100644 index 000000000000..624bce3643a2 --- /dev/null +++ b/third_party/libwebrtc/rtc_tools/video_encoder/encoded_image_file_writer.cc @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "rtc_tools/video_encoder/encoded_image_file_writer.h" + +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace test { + +EncodedImageFileWriter::EncodedImageFileWriter( + const VideoCodec& video_codec_setting) + : video_codec_setting_(video_codec_setting) { + const char* codec_string = + CodecTypeToPayloadString(video_codec_setting.codecType); + + // Retrieve scalability mode information. + absl::optional scalability_mode = + video_codec_setting.GetScalabilityMode(); + RTC_CHECK(scalability_mode); + spatial_layers_ = ScalabilityModeToNumSpatialLayers(*scalability_mode); + temporal_layers_ = ScalabilityModeToNumTemporalLayers(*scalability_mode); + inter_layer_pred_mode_ = + ScalabilityModeToInterLayerPredMode(*scalability_mode); + + RTC_CHECK_GT(spatial_layers_, 0); + RTC_CHECK_GT(temporal_layers_, 0); + // Create writer for every decode target. + for (int i = 0; i < spatial_layers_; ++i) { + for (int j = 0; j < temporal_layers_; ++j) { + char buffer[256]; + rtc::SimpleStringBuilder name(buffer); + name << "output-" << codec_string << "-" + << ScalabilityModeToString(*scalability_mode) << "-L" << i << "T" + << j << ".ivf"; + + decode_target_writers_.emplace_back(std::make_pair( + IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(name.str()), 0), + name.str())); + } + } +} + +EncodedImageFileWriter::~EncodedImageFileWriter() { + for (size_t i = 0; i < decode_target_writers_.size(); ++i) { + decode_target_writers_[i].first->Close(); + RTC_LOG(LS_INFO) << "Written: " << decode_target_writers_[i].second; + } +} + +int EncodedImageFileWriter::Write(const EncodedImage& encoded_image) { + // L1T1 does not set `SpatialIndex` and `TemporalIndex` in `EncodedImage`. + const int spatial_index = encoded_image.SpatialIndex().value_or(0); + const int temporal_index = encoded_image.TemporalIndex().value_or(0); + RTC_CHECK_LT(spatial_index, spatial_layers_); + RTC_CHECK_LT(temporal_index, temporal_layers_); + + if (spatial_index == 0) { + is_base_layer_key_frame = + (encoded_image._frameType == VideoFrameType::kVideoFrameKey); + } + + switch (inter_layer_pred_mode_) { + case InterLayerPredMode::kOff: { + // Write to this spatial layer. + for (int j = temporal_index; j < temporal_layers_; ++j) { + const int index = spatial_index * temporal_layers_ + j; + RTC_CHECK_LT(index, decode_target_writers_.size()); + + decode_target_writers_[index].first->WriteFrame( + encoded_image, video_codec_setting_.codecType); + } + break; + } + + case InterLayerPredMode::kOn: { + // Write to this and higher spatial layers. + for (int i = spatial_index; i < spatial_layers_; ++i) { + for (int j = temporal_index; j < temporal_layers_; ++j) { + const int index = i * temporal_layers_ + j; + RTC_CHECK_LT(index, decode_target_writers_.size()); + + decode_target_writers_[index].first->WriteFrame( + encoded_image, video_codec_setting_.codecType); + } + } + break; + } + + case InterLayerPredMode::kOnKeyPic: { + for (int i = spatial_index; i < spatial_layers_; ++i) { + for (int j = temporal_index; j < temporal_layers_; ++j) { + const int index = i * temporal_layers_ + j; + RTC_CHECK_LT(index, decode_target_writers_.size()); + + decode_target_writers_[index].first->WriteFrame( + encoded_image, video_codec_setting_.codecType); + } + + // Write to higher spatial layers only if key frame. + if (!is_base_layer_key_frame) { + break; + } + } + break; + } + } + + return 0; +} + +} // namespace test +} // namespace webrtc diff --git a/third_party/libwebrtc/rtc_tools/video_encoder/encoded_image_file_writer.h b/third_party/libwebrtc/rtc_tools/video_encoder/encoded_image_file_writer.h new file mode 100644 index 000000000000..abe01b61b286 --- /dev/null +++ b/third_party/libwebrtc/rtc_tools/video_encoder/encoded_image_file_writer.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_TOOLS_VIDEO_ENCODER_ENCODED_IMAGE_FILE_WRITER_H_ +#define RTC_TOOLS_VIDEO_ENCODER_ENCODED_IMAGE_FILE_WRITER_H_ + +#include +#include +#include +#include + +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/utility/ivf_file_writer.h" + +namespace webrtc { +namespace test { + +// The `EncodedImageFileWriter` writes the `EncodedImage` into ivf output. It +// supports SVC to output ivf for all decode targets. +class EncodedImageFileWriter final { + // The pair of writer and output file name. + using IvfWriterPair = std::pair, std::string>; + + public: + explicit EncodedImageFileWriter(const VideoCodec& video_codec_setting); + + ~EncodedImageFileWriter(); + + int Write(const EncodedImage& encoded_image); + + private: + VideoCodec video_codec_setting_; + + int spatial_layers_ = 0; + int temporal_layers_ = 0; + InterLayerPredMode inter_layer_pred_mode_ = InterLayerPredMode::kOff; + + bool is_base_layer_key_frame = false; + std::vector decode_target_writers_; +}; + +} // namespace test +} // namespace webrtc + +#endif // RTC_TOOLS_VIDEO_ENCODER_ENCODED_IMAGE_FILE_WRITER_H_ diff --git a/third_party/libwebrtc/rtc_tools/video_encoder/video_encoder.cc b/third_party/libwebrtc/rtc_tools/video_encoder/video_encoder.cc index 9f0100e2d025..a2eeef8d5fae 100644 --- a/third_party/libwebrtc/rtc_tools/video_encoder/video_encoder.cc +++ b/third_party/libwebrtc/rtc_tools/video_encoder/video_encoder.cc @@ -7,9 +7,6 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ - -#include - #include #include "absl/flags/flag.h" @@ -22,26 +19,32 @@ #include "modules/video_coding/codecs/av1/av1_svc_config.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/svc/scalability_mode_util.h" -#include "modules/video_coding/utility/ivf_file_writer.h" #include "rtc_base/logging.h" +#include "rtc_tools/video_encoder/encoded_image_file_writer.h" +#include "test/testsupport/y4m_frame_generator.h" ABSL_FLAG(std::string, video_codec, "", - "Sepcify codec of video encoder: vp8, vp9, h264, av1"); + "Specify codec of video encoder: vp8, vp9, h264, av1"); ABSL_FLAG(std::string, scalability_mode, "L1T1", - "Sepcify scalability mode of video encoder"); + "Specify scalability mode of video encoder"); ABSL_FLAG(uint32_t, raw_frame_generator, 0, - "Sepcify SquareFrameGenerator or SlideGenerator.\n" + "Specify SquareFrameGenerator or SlideGenerator.\n" "0: SquareFrameGenerator, 1: SlideGenerator"); -ABSL_FLAG(uint32_t, width, 1280, "Sepcify width of video encoder"); +ABSL_FLAG(uint32_t, width, 1280, "Specify width of video encoder"); ABSL_FLAG(uint32_t, height, 720, "Specify height of video encoder"); +ABSL_FLAG(std::string, + y4m_input_file, + "", + "Specify y4m input file of Y4mFrameGenerator"); + ABSL_FLAG(std::string, ivf_input_file, "", @@ -72,19 +75,6 @@ namespace { // See `WebRtcVideoSendChannel::kDefaultQpMax`. constexpr unsigned int kDefaultQpMax = 56; -const char* FrameTypeToString(const VideoFrameType& frame_type) { - switch (frame_type) { - case VideoFrameType::kEmptyFrame: - return "empty"; - case VideoFrameType::kVideoFrameKey: - return "key"; - case VideoFrameType::kVideoFrameDelta: - return "delta"; - } - RTC_CHECK_NOTREACHED(); - return ""; -} - [[maybe_unused]] const char* InterLayerPredModeToString( const InterLayerPredMode& inter_layer_pred_mode) { switch (inter_layer_pred_mode) { @@ -103,7 +93,7 @@ std::string ToString(const EncodedImage& encoded_image) { char buffer[1024]; rtc::SimpleStringBuilder ss(buffer); - ss << FrameTypeToString(encoded_image._frameType) + ss << VideoFrameTypeToString(encoded_image._frameType) << ", size=" << encoded_image.size() << ", qp=" << encoded_image.qp_ << ", timestamp=" << encoded_image.Timestamp(); @@ -153,110 +143,42 @@ std::string ToString(const EncodedImage& encoded_image) { } // Wrapper of `EncodedImageCallback` that writes all encoded images into ivf -// output. Each spatial layer has separated output including all its dependant -// layers. -class EncodedImageFileWriter : public EncodedImageCallback { - using TestIvfWriter = std::pair, std::string>; - +// files through `test::EncodedImageFileWriter`. +class TestEncodedImageCallback final : public EncodedImageCallback { public: - explicit EncodedImageFileWriter(const VideoCodec& video_codec_setting) + explicit TestEncodedImageCallback(const VideoCodec& video_codec_setting) : video_codec_setting_(video_codec_setting) { - const char* codec_string = - CodecTypeToPayloadString(video_codec_setting.codecType); - - // Retrieve scalability mode information. - absl::optional scalability_mode = - video_codec_setting.GetScalabilityMode(); - RTC_CHECK(scalability_mode); - spatial_layers_ = ScalabilityModeToNumSpatialLayers(*scalability_mode); - inter_layer_pred_mode_ = - ScalabilityModeToInterLayerPredMode(*scalability_mode); - - RTC_CHECK_GT(spatial_layers_, 0); - // Create writer for every spatial layer with the "-Lx" postfix. - for (int i = 0; i < spatial_layers_; ++i) { - char buffer[256]; - rtc::SimpleStringBuilder name(buffer); - name << "output-" << codec_string << "-" - << ScalabilityModeToString(*scalability_mode) << "-L" << i << ".ivf"; - - writers_.emplace_back(std::make_pair( - IvfFileWriter::Wrap(FileWrapper::OpenWriteOnly(name.str()), 0), - name.str())); - } + writer_ = + std::make_unique(video_codec_setting); } - ~EncodedImageFileWriter() override { - for (size_t i = 0; i < writers_.size(); ++i) { - writers_[i].first->Close(); - RTC_LOG(LS_INFO) << "Written: " << writers_[i].second; - } - } + ~TestEncodedImageCallback() = default; private: Result OnEncodedImage(const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info) override { - RTC_CHECK(codec_specific_info); - - ++frames_; RTC_LOG(LS_VERBOSE) << "frame " << frames_ << ": {" << ToString(encoded_image) << "}, codec_specific_info: {" << ToString(*codec_specific_info) << "}"; - if (spatial_layers_ == 1) { - // Single spatial layer stream. - RTC_CHECK_EQ(writers_.size(), 1); - RTC_CHECK(!encoded_image.SpatialIndex() || - *encoded_image.SpatialIndex() == 0); - writers_[0].first->WriteFrame(encoded_image, - video_codec_setting_.codecType); - } else { - // Multiple spatial layers stream. - RTC_CHECK_GT(spatial_layers_, 1); - RTC_CHECK_GT(writers_.size(), 1); - RTC_CHECK(encoded_image.SpatialIndex()); - int index = *encoded_image.SpatialIndex(); + RTC_CHECK(writer_); + writer_->Write(encoded_image); - RTC_CHECK_LT(index, writers_.size()); - switch (inter_layer_pred_mode_) { - case InterLayerPredMode::kOff: - writers_[index].first->WriteFrame(encoded_image, - video_codec_setting_.codecType); - break; - - case InterLayerPredMode::kOn: - // Write the encoded image into this layer and higher spatial layers. - for (size_t i = index; i < writers_.size(); ++i) { - writers_[i].first->WriteFrame(encoded_image, - video_codec_setting_.codecType); - } - break; - - case InterLayerPredMode::kOnKeyPic: - // Write the encoded image into this layer. - writers_[index].first->WriteFrame(encoded_image, - video_codec_setting_.codecType); - // If this is key frame, write to higher spatial layers as well. - if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { - for (size_t i = index + 1; i < writers_.size(); ++i) { - writers_[i].first->WriteFrame(encoded_image, - video_codec_setting_.codecType); - } - } - break; - } + RTC_CHECK(codec_specific_info); + // For SVC, every picture generates multiple encoded images of different + // spatial layers. + if (codec_specific_info->end_of_picture) { + ++frames_; } return Result(Result::Error::OK); } - VideoCodec video_codec_setting_ = {}; - int spatial_layers_ = 0; - InterLayerPredMode inter_layer_pred_mode_ = InterLayerPredMode::kOff; - - std::vector writers_; + VideoCodec video_codec_setting_; int32_t frames_ = 0; + + std::unique_ptr writer_; }; // Wrapper of `BuiltinVideoEncoderFactory`. @@ -267,7 +189,7 @@ class TestVideoEncoderFactoryWrapper final { RTC_CHECK(builtin_video_encoder_factory_); } - ~TestVideoEncoderFactoryWrapper() {} + ~TestVideoEncoderFactoryWrapper() = default; void ListSupportedFormats() const { // Log all supported formats. @@ -430,9 +352,9 @@ class TestVideoEncoderFactoryWrapper final { // A video encode tool supports to specify video codec, scalability mode, // resolution, frame rate, bitrate, key frame interval and maximum number of // frames. The video encoder supports multiple `FrameGeneratorInterface` -// implementations: `SquareFrameGenerator`, `SlideFrameGenerator` and -// `IvfFileFrameGenerator`. All the encoded bitstreams are wrote into ivf output -// files. +// implementations: `SquareFrameGenerator`, `SlideFrameGenerator`, +// `Y4mFrameGenerator` and `IvfFileFrameGenerator`. All the encoded bitstreams +// are wrote into ivf output files. int main(int argc, char* argv[]) { absl::SetProgramUsageMessage( "A video encode tool.\n" @@ -448,6 +370,9 @@ int main(int argc, char* argv[]) { "--frame_rate_fps=30 " "--bitrate_kbps=500\n" "\n" + "./video_encoder --y4m_input_file=input.y4m --video_codec=av1 " + "--scalability_mode=L1T3\n" + "\n" "./video_encoder --ivf_input_file=input.ivf --video_codec=av1 " "--scalability_mode=L1T3\n"); absl::ParseCommandLine(argc, argv); @@ -472,6 +397,7 @@ int main(int argc, char* argv[]) { uint32_t raw_frame_generator = absl::GetFlag(FLAGS_raw_frame_generator); + const std::string y4m_input_file = absl::GetFlag(FLAGS_y4m_input_file); const std::string ivf_input_file = absl::GetFlag(FLAGS_ivf_input_file); const uint32_t frame_rate_fps = absl::GetFlag(FLAGS_frame_rate_fps); @@ -510,8 +436,26 @@ int main(int argc, char* argv[]) { } // Create `FrameGeneratorInterface`. + if (!y4m_input_file.empty() && !ivf_input_file.empty()) { + RTC_LOG(LS_ERROR) + << "Can not specify both '--y4m_input_file' and '--ivf_input_file'"; + return EXIT_FAILURE; + } + std::unique_ptr frame_buffer_generator; - if (!ivf_input_file.empty()) { + if (!y4m_input_file.empty()) { + // Use `Y4mFrameGenerator` if specify `--y4m_input_file`. + frame_buffer_generator = std::make_unique( + y4m_input_file, webrtc::test::Y4mFrameGenerator::RepeatMode::kLoop); + + webrtc::test::FrameGeneratorInterface::Resolution resolution = + frame_buffer_generator->GetResolution(); + if (resolution.width != width || resolution.height != height) { + frame_buffer_generator->ChangeResolution(width, height); + } + + RTC_LOG(LS_INFO) << "Create Y4mFrameGenerator: " << width << "x" << height; + } else if (!ivf_input_file.empty()) { // Use `IvfFileFrameGenerator` if specify `--ivf_input_file`. frame_buffer_generator = webrtc::test::CreateFromIvfFileFrameGenerator(ivf_input_file); @@ -570,12 +514,14 @@ int main(int argc, char* argv[]) { video_codec_setting); RTC_CHECK(video_encoder); - // Create `EncodedImageFileWriter`. - std::unique_ptr encoded_image_file_writer = - std::make_unique(video_codec_setting); - RTC_CHECK(encoded_image_file_writer); + // Create `TestEncodedImageCallback`. + std::unique_ptr + test_encoded_image_callback = + std::make_unique( + video_codec_setting); + RTC_CHECK(test_encoded_image_callback); int ret = video_encoder->RegisterEncodeCompleteCallback( - encoded_image_file_writer.get()); + test_encoded_image_callback.get()); RTC_CHECK_EQ(ret, WEBRTC_VIDEO_CODEC_OK); // Start to encode frames. diff --git a/third_party/libwebrtc/sdk/BUILD.gn b/third_party/libwebrtc/sdk/BUILD.gn index bef4521dc688..eea26dc31d7e 100644 --- a/third_party/libwebrtc/sdk/BUILD.gn +++ b/third_party/libwebrtc/sdk/BUILD.gn @@ -230,9 +230,6 @@ if (is_ios || is_mac) { "objc/native/api/audio_device_module.h", "objc/native/api/audio_device_module.mm", ] - if (is_mac) { - frameworks = [ "AudioUnit.framework" ] - } deps = [ ":audio_device", @@ -253,6 +250,33 @@ if (is_ios || is_mac) { deps = [ "../rtc_base:threading" ] } + rtc_library("opengl_ui_objc") { + visibility = [ "*" ] + allow_poison = [ + "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove. + "default_task_queue", + ] + sources = [ + "objc/components/renderer/opengl/RTCDisplayLinkTimer.h", + "objc/components/renderer/opengl/RTCDisplayLinkTimer.m", + "objc/components/renderer/opengl/RTCEAGLVideoView.h", + "objc/components/renderer/opengl/RTCEAGLVideoView.m", + ] + + # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning + # workaround. + defines = [ "GLES_SILENCE_DEPRECATION" ] + configs += [ "..:common_objc" ] + deps = [ + ":base_objc", + ":helpers_objc", + ":metal_objc", + ":opengl_objc", + ":videocapture_objc", + ":videoframebuffer_objc", + ] + } + rtc_library("audio_device") { visibility = [ "*" ] @@ -279,6 +303,7 @@ if (is_ios || is_mac) { "../api/task_queue:pending_task_safety_flag", "../modules/audio_device:audio_device_api", "../modules/audio_device:audio_device_buffer", + "../modules/audio_device:audio_device_config", "../modules/audio_device:audio_device_generic", "../rtc_base:buffer", "../rtc_base:checks", @@ -394,6 +419,53 @@ if (is_ios || is_mac) { "../rtc_base/system:gcd_helpers", ] } + + rtc_library("opengl_objc") { + sources = [ + "objc/components/renderer/opengl/RTCDefaultShader.h", + "objc/components/renderer/opengl/RTCDefaultShader.mm", + "objc/components/renderer/opengl/RTCI420TextureCache.h", + "objc/components/renderer/opengl/RTCI420TextureCache.mm", + "objc/components/renderer/opengl/RTCNV12TextureCache.h", + "objc/components/renderer/opengl/RTCNV12TextureCache.m", + "objc/components/renderer/opengl/RTCOpenGLDefines.h", + "objc/components/renderer/opengl/RTCShader.h", + "objc/components/renderer/opengl/RTCShader.mm", + "objc/components/renderer/opengl/RTCVideoViewShading.h", + ] + frameworks = [ + "CoreVideo.framework", + "GLKit.framework", + "OpenGLES.framework", + "QuartzCore.framework", + ] + + # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning + # workaround. + defines = [ "GLES_SILENCE_DEPRECATION" ] + + deps = [ + ":base_objc", + ":helpers_objc", + ":mediaconstraints_objc", + ":native_video", + ":videoframebuffer_objc", + ":videosource_objc", + "../api:libjingle_peerconnection_api", + "../api/video:video_frame", + "../api/video:video_rtp_headers", + "../common_video", + "../media:rtc_media_base", + "../rtc_base:checks", + "../rtc_base:logging", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + + configs += [ + "..:common_objc", + ":used_from_extension", + ] + } } rtc_source_set("audio_device_api_objc") { @@ -525,97 +597,6 @@ if (is_ios || is_mac) { } if (!build_with_mozilla) { - rtc_library("opengl_objc") { - sources = [ - "objc/components/renderer/opengl/RTCDefaultShader.h", - "objc/components/renderer/opengl/RTCDefaultShader.mm", - "objc/components/renderer/opengl/RTCI420TextureCache.h", - "objc/components/renderer/opengl/RTCI420TextureCache.mm", - "objc/components/renderer/opengl/RTCOpenGLDefines.h", - "objc/components/renderer/opengl/RTCShader.h", - "objc/components/renderer/opengl/RTCShader.mm", - "objc/components/renderer/opengl/RTCVideoViewShading.h", - ] - frameworks = [ "CoreVideo.framework" ] - if (is_ios) { - sources += [ - "objc/components/renderer/opengl/RTCNV12TextureCache.h", - "objc/components/renderer/opengl/RTCNV12TextureCache.m", - ] - frameworks += [ - "GLKit.framework", - "OpenGLES.framework", - "QuartzCore.framework", - ] - } else if (is_mac) { - frameworks += [ - "CoreMedia.framework", - "OpenGL.framework", - ] - } - - # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning - # workaround. - defines = [ "GLES_SILENCE_DEPRECATION" ] - - deps = [ - ":base_objc", - ":helpers_objc", - ":mediaconstraints_objc", - ":native_video", - ":videoframebuffer_objc", - ":videosource_objc", - "../api:libjingle_peerconnection_api", - "../api/video:video_frame", - "../api/video:video_rtp_headers", - "../common_video", - "../media:rtc_media_base", - "../rtc_base:checks", - "../rtc_base:logging", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - - configs += [ - "..:common_objc", - ":used_from_extension", - ] - } - - rtc_library("opengl_ui_objc") { - visibility = [ "*" ] - allow_poison = [ - "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove. - "default_task_queue", - ] - if (is_ios) { - sources = [ - "objc/components/renderer/opengl/RTCDisplayLinkTimer.h", - "objc/components/renderer/opengl/RTCDisplayLinkTimer.m", - "objc/components/renderer/opengl/RTCEAGLVideoView.h", - "objc/components/renderer/opengl/RTCEAGLVideoView.m", - ] - - # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning - # workaround. - defines = [ "GLES_SILENCE_DEPRECATION" ] - } - if (is_mac) { - sources = [ - "objc/components/renderer/opengl/RTCNSGLVideoView.h", - "objc/components/renderer/opengl/RTCNSGLVideoView.m", - ] - } - configs += [ "..:common_objc" ] - deps = [ - ":base_objc", - ":helpers_objc", - ":metal_objc", - ":opengl_objc", - ":videocapture_objc", - ":videoframebuffer_objc", - ] - } - rtc_library("metal_objc") { visibility = [ "*" ] allow_poison = [ @@ -1207,7 +1188,7 @@ if (is_ios || is_mac) { "//third_party/libyuv", ] - if (rtc_ios_macos_use_opengl_rendering) { + if (rtc_ios_use_opengl_rendering) { deps += [ ":opengl_objc" ] } @@ -1388,9 +1369,6 @@ if (is_ios || is_mac) { ":videocodec_objc", ":videotoolbox_objc", ] - if (rtc_ios_macos_use_opengl_rendering) { - deps += [ ":opengl_ui_objc" ] - } if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1494,7 +1472,6 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", - "objc/components/renderer/opengl/RTCNSGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h", @@ -1519,7 +1496,6 @@ if (is_ios || is_mac) { ":default_codec_factory_objc", ":native_api", ":native_video", - ":opengl_ui_objc", ":peerconnectionfactory_base_objc", ":videocapture_objc", ":videocodec_objc", diff --git a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java b/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java deleted file mode 100644 index de0d0d61f942..000000000000 --- a/third_party/libwebrtc/sdk/android/api/org/webrtc/audio/LegacyAudioDeviceModule.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc.audio; - -import org.webrtc.voiceengine.WebRtcAudioRecord; -import org.webrtc.voiceengine.WebRtcAudioTrack; - -/** - * This class represents the legacy AudioDeviceModule that is currently hardcoded into C++ WebRTC. - * It will return a null native AudioDeviceModule pointer, leading to an internal object being - * created inside WebRTC that is controlled by static calls to the classes under the voiceengine - * package. Please use the new JavaAudioDeviceModule instead of this class. - */ -@Deprecated -public class LegacyAudioDeviceModule implements AudioDeviceModule { - @Override - public long getNativeAudioDeviceModulePointer() { - // Returning a null pointer will make WebRTC construct the built-in legacy AudioDeviceModule for - // Android internally. - return 0; - } - - @Override - public void release() { - // All control for this ADM goes through static global methods and the C++ object is owned - // internally by WebRTC. - } - - @Override - public void setSpeakerMute(boolean mute) { - WebRtcAudioTrack.setSpeakerMute(mute); - } - - @Override - public void setMicrophoneMute(boolean mute) { - WebRtcAudioRecord.setMicrophoneMute(mute); - } -} diff --git a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java index f398602a2828..506e33ffe429 100644 --- a/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java +++ b/third_party/libwebrtc/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java @@ -55,11 +55,13 @@ class WebRtcAudioManager { : getMinInputFrameSize(sampleRate, numberOfInputChannels); } - private static boolean isLowLatencyOutputSupported(Context context) { + @CalledByNative + static boolean isLowLatencyOutputSupported(Context context) { return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); } - private static boolean isLowLatencyInputSupported(Context context) { + @CalledByNative + static boolean isLowLatencyInputSupported(Context context) { // TODO(henrika): investigate if some sort of device list is needed here // as well. The NDK doc states that: "As of API level 21, lower latency // audio input is supported on select devices. To take advantage of this diff --git a/third_party/libwebrtc/sdk/objc/base/RTCMacros.h b/third_party/libwebrtc/sdk/objc/base/RTCMacros.h index 469e3c93bd0b..114ced0ea6ab 100644 --- a/third_party/libwebrtc/sdk/objc/base/RTCMacros.h +++ b/third_party/libwebrtc/sdk/objc/base/RTCMacros.h @@ -36,9 +36,10 @@ // WebRTC.framework with their own prefix in case symbol clashing is a // problem. // -// This macro must only be defined here and not on via compiler flag to -// ensure it has a unique value. +// This macro must be defined uniformily across all the translation units. +#ifndef RTC_OBJC_TYPE_PREFIX #define RTC_OBJC_TYPE_PREFIX +#endif // RCT_OBJC_TYPE // diff --git a/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 98d3cf9f455b..e7c47b4e9963 100644 --- a/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/third_party/libwebrtc/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -118,16 +118,11 @@ const int64_t kNanosecondsPerSecond = 1000000000; } + (NSArray *)captureDevices { -#if defined(WEBRTC_IOS) && defined(__IPHONE_10_0) && \ - __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified]; return session.devices; -#else - return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; -#endif } + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device { diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index f70e2ad5ee4b..5a2e7d380f7c 100644 --- a/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/third_party/libwebrtc/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -12,8 +12,6 @@ #import "RTCVideoRenderer.h" -NS_AVAILABLE_MAC(10.11) - RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm index 51dca3223dcc..9d686f625c5f 100644 --- a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -10,11 +10,7 @@ #import "RTCDefaultShader.h" -#if TARGET_OS_IPHONE #import -#else -#import -#endif #import "RTCOpenGLDefines.h" #import "RTCShader.h" @@ -139,9 +135,7 @@ static const char kNV12FragmentShaderSource[] = RTCLog(@"Failed to setup vertex buffer"); return NO; } -#if !TARGET_OS_IPHONE - glBindVertexArray(_vertexArray); -#endif + glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); if (!_currentRotation || rotation != *_currentRotation) { _currentRotation = absl::optional(rotation); diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index 5dccd4bf6a2e..a91e927cb4cb 100644 --- a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -10,11 +10,7 @@ #import "RTCI420TextureCache.h" -#if TARGET_OS_IPHONE #import -#else -#import -#endif #import "base/RTCI420Buffer.h" #import "base/RTCVideoFrameBuffer.h" @@ -51,11 +47,7 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; - (instancetype)initWithContext:(GlContextType *)context { if (self = [super init]) { -#if TARGET_OS_IPHONE _hasUnpackRowLength = (context.API == kEAGLRenderingAPIOpenGLES3); -#else - _hasUnpackRowLength = YES; -#endif glPixelStorei(GL_UNPACK_ALIGNMENT, 1); [self setupTextures]; diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h deleted file mode 100644 index c9ee986f88d0..000000000000 --- a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#if !TARGET_OS_IPHONE - -#import - -#import "RTCVideoRenderer.h" -#import "RTCVideoViewShading.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTC_OBJC_TYPE(RTCNSGLVideoView); - -RTC_OBJC_EXPORT -@protocol RTC_OBJC_TYPE -(RTCNSGLVideoViewDelegate) @end - -RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView - -@property(nonatomic, weak) id delegate; - -- (instancetype)initWithFrame:(NSRect)frameRect - pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader - NS_DESIGNATED_INITIALIZER; - -@end - -NS_ASSUME_NONNULL_END - -#endif diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m deleted file mode 100644 index 168c73126fc3..000000000000 --- a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#if !TARGET_OS_IPHONE - -#import "RTCNSGLVideoView.h" - -#import -#import -#import - -#import "RTCDefaultShader.h" -#import "RTCI420TextureCache.h" -#import "base/RTCLogging.h" -#import "base/RTCVideoFrame.h" - -@interface RTC_OBJC_TYPE (RTCNSGLVideoView) -() - // `videoFrame` is set when we receive a frame from a worker thread and is read - // from the display link callback so atomicity is required. - @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * - videoFrame; -@property(atomic, strong) RTCI420TextureCache *i420TextureCache; - -- (void)drawFrame; -@end - -static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, - const CVTimeStamp *now, - const CVTimeStamp *outputTime, - CVOptionFlags flagsIn, - CVOptionFlags *flagsOut, - void *displayLinkContext) { - RTC_OBJC_TYPE(RTCNSGLVideoView) *view = - (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext; - [view drawFrame]; - return kCVReturnSuccess; -} - -@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { - CVDisplayLinkRef _displayLink; - RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame; - id _shader; -} - -@synthesize delegate = _delegate; -@synthesize videoFrame = _videoFrame; -@synthesize i420TextureCache = _i420TextureCache; - -- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { - return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]]; -} - -- (instancetype)initWithFrame:(NSRect)frame - pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader { - if (self = [super initWithFrame:frame pixelFormat:format]) { - _shader = shader; - } - return self; -} - -- (void)dealloc { - [self teardownDisplayLink]; -} - -- (void)drawRect:(NSRect)rect { - [self drawFrame]; -} - -- (void)reshape { - [super reshape]; - NSRect frame = [self frame]; - [self ensureGLContext]; - CGLLockContext([[self openGLContext] CGLContextObj]); - glViewport(0, 0, frame.size.width, frame.size.height); - CGLUnlockContext([[self openGLContext] CGLContextObj]); -} - -- (void)lockFocus { - NSOpenGLContext *context = [self openGLContext]; - [super lockFocus]; - if ([context view] != self) { - [context setView:self]; - } - [context makeCurrentContext]; -} - -- (void)prepareOpenGL { - [super prepareOpenGL]; - [self ensureGLContext]; - glDisable(GL_DITHER); - [self setupDisplayLink]; -} - -- (void)clearGLContext { - [self ensureGLContext]; - self.i420TextureCache = nil; - [super clearGLContext]; -} - -#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - -// These methods may be called on non-main thread. -- (void)setSize:(CGSize)size { - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate videoView:self didChangeVideoSize:size]; - }); -} - -- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - self.videoFrame = frame; -} - -#pragma mark - Private - -- (void)drawFrame { - RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; - if (!frame || frame == _lastDrawnFrame) { - return; - } - // This method may be called from CVDisplayLink callback which isn't on the - // main thread so we have to lock the GL context before drawing. - NSOpenGLContext *context = [self openGLContext]; - CGLLockContext([context CGLContextObj]); - - [self ensureGLContext]; - glClear(GL_COLOR_BUFFER_BIT); - - // Rendering native CVPixelBuffer is not supported on OS X. - // TODO(magjed): Add support for NV12 texture cache on OS X. - frame = [frame newI420VideoFrame]; - if (!self.i420TextureCache) { - self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context]; - } - RTCI420TextureCache *i420TextureCache = self.i420TextureCache; - if (i420TextureCache) { - [i420TextureCache uploadFrameToTextures:frame]; - [_shader applyShadingForFrameWithWidth:frame.width - height:frame.height - rotation:frame.rotation - yPlane:i420TextureCache.yTexture - uPlane:i420TextureCache.uTexture - vPlane:i420TextureCache.vTexture]; - [context flushBuffer]; - _lastDrawnFrame = frame; - } - CGLUnlockContext([context CGLContextObj]); -} - -- (void)setupDisplayLink { - if (_displayLink) { - return; - } - // Synchronize buffer swaps with vertical refresh rate. - GLint swapInt = 1; - [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; - - // Create display link. - CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink); - CVDisplayLinkSetOutputCallback(_displayLink, - &OnDisplayLinkFired, - (__bridge void *)self); - // Set the display link for the current renderer. - CGLContextObj cglContext = [[self openGLContext] CGLContextObj]; - CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj]; - CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext( - _displayLink, cglContext, cglPixelFormat); - CVDisplayLinkStart(_displayLink); -} - -- (void)teardownDisplayLink { - if (!_displayLink) { - return; - } - CVDisplayLinkRelease(_displayLink); - _displayLink = NULL; -} - -- (void)ensureGLContext { - NSOpenGLContext* context = [self openGLContext]; - NSAssert(context, @"context shouldn't be nil"); - if ([NSOpenGLContext currentContext] != context) { - [context makeCurrentContext]; - } -} - -@end - -#endif // !TARGET_OS_IPHONE diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h index 408853586115..d84d99227844 100644 --- a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCOpenGLDefines.h @@ -10,7 +10,6 @@ #import -#if TARGET_OS_IPHONE #define RTC_PIXEL_FORMAT GL_LUMINANCE #define SHADER_VERSION #define VERTEX_SHADER_IN "attribute" @@ -22,16 +21,3 @@ @class EAGLContext; typedef EAGLContext GlContextType; -#else -#define RTC_PIXEL_FORMAT GL_RED -#define SHADER_VERSION "#version 150\n" -#define VERTEX_SHADER_IN "in" -#define VERTEX_SHADER_OUT "out" -#define FRAGMENT_SHADER_IN "in" -#define FRAGMENT_SHADER_OUT "out vec4 fragColor;\n" -#define FRAGMENT_SHADER_COLOR "fragColor" -#define FRAGMENT_SHADER_TEXTURE "texture" - -@class NSOpenGLContext; -typedef NSOpenGLContext GlContextType; -#endif diff --git a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm index 8eccd7fbec60..25f6eee34e66 100644 --- a/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm +++ b/third_party/libwebrtc/sdk/objc/components/renderer/opengl/RTCShader.mm @@ -10,11 +10,7 @@ #import "RTCShader.h" -#if TARGET_OS_IPHONE #import -#else -#import -#endif #include #include @@ -125,13 +121,6 @@ GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) { } BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) { -#if !TARGET_OS_IPHONE - glGenVertexArrays(1, vertexArray); - if (*vertexArray == 0) { - return NO; - } - glBindVertexArray(*vertexArray); -#endif glGenBuffers(1, vertexBuffer); if (*vertexBuffer == 0) { glDeleteVertexArrays(1, vertexArray); diff --git a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m index 28743a6fc109..56c74971b692 100644 --- a/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m +++ b/third_party/libwebrtc/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m @@ -287,8 +287,8 @@ __block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; __block RTC_OBJC_TYPE(RTCPeerConnection) * pc1; - RTCSessionDescription *rollback = [[RTCSessionDescription alloc] initWithType:RTCSdpTypeRollback - sdp:@""]; + RTC_OBJC_TYPE(RTCSessionDescription) *rollback = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeRollback sdp:@""]; @autoreleasepool { factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; diff --git a/third_party/libwebrtc/stats/rtcstats_objects.cc b/third_party/libwebrtc/stats/rtcstats_objects.cc index 31720ad33f5f..77feaf87ba46 100644 --- a/third_party/libwebrtc/stats/rtcstats_objects.cc +++ b/third_party/libwebrtc/stats/rtcstats_objects.cc @@ -307,11 +307,14 @@ WEBRTC_RTCSTATS_IMPL( &packets_received, &packets_discarded, &fec_packets_received, + &fec_bytes_received, &fec_packets_discarded, + &fec_ssrc, &bytes_received, &header_bytes_received, &retransmitted_packets_received, &retransmitted_bytes_received, + &rtx_ssrc, &last_packet_received_timestamp, &jitter_buffer_delay, &jitter_buffer_target_delay, @@ -371,11 +374,14 @@ RTCInboundRtpStreamStats::RTCInboundRtpStreamStats(std::string id, packets_received("packetsReceived"), packets_discarded("packetsDiscarded"), fec_packets_received("fecPacketsReceived"), + fec_bytes_received("fecBytesReceived"), fec_packets_discarded("fecPacketsDiscarded"), + fec_ssrc("fecSsrc"), bytes_received("bytesReceived"), header_bytes_received("headerBytesReceived"), retransmitted_packets_received("retransmittedPacketsReceived"), retransmitted_bytes_received("retransmittedBytesReceived"), + rtx_ssrc("rtxSsrc"), last_packet_received_timestamp("lastPacketReceivedTimestamp"), jitter_buffer_delay("jitterBufferDelay"), jitter_buffer_target_delay("jitterBufferTargetDelay"), @@ -460,7 +466,8 @@ WEBRTC_RTCSTATS_IMPL( &qp_sum, &active, &power_efficient_encoder, - &scalability_mode) + &scalability_mode, + &rtx_ssrc) // clang-format on RTCOutboundRtpStreamStats::RTCOutboundRtpStreamStats(std::string id, @@ -496,7 +503,8 @@ RTCOutboundRtpStreamStats::RTCOutboundRtpStreamStats(std::string id, qp_sum("qpSum"), active("active"), power_efficient_encoder("powerEfficientEncoder"), - scalability_mode("scalabilityMode") {} + scalability_mode("scalabilityMode"), + rtx_ssrc("rtxSsrc") {} RTCOutboundRtpStreamStats::RTCOutboundRtpStreamStats( const RTCOutboundRtpStreamStats& other) = default; diff --git a/third_party/libwebrtc/system_wrappers/field_trial_gn/moz.build b/third_party/libwebrtc/system_wrappers/field_trial_gn/moz.build index 780a6fdef2f1..f447ddb77228 100644 --- a/third_party/libwebrtc/system_wrappers/field_trial_gn/moz.build +++ b/third_party/libwebrtc/system_wrappers/field_trial_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/test/BUILD.gn b/third_party/libwebrtc/test/BUILD.gn index f687a9b212b4..860ae3a089c6 100644 --- a/third_party/libwebrtc/test/BUILD.gn +++ b/third_party/libwebrtc/test/BUILD.gn @@ -66,6 +66,7 @@ rtc_library("frame_generator_impl") { "../modules/video_coding:webrtc_h264", "../modules/video_coding:webrtc_vp8", "../modules/video_coding:webrtc_vp9", + "../modules/video_coding/codecs/av1:dav1d_decoder", "../rtc_base:checks", "../rtc_base:criticalsection", "../rtc_base:logging", @@ -256,6 +257,7 @@ if (!build_with_chromium) { "../api:media_stream_interface", "../api:scoped_refptr", "../modules/video_capture:video_capture_module", + "../rtc_base:logging", "../rtc_base:threading", "../sdk:base_objc", "../sdk:native_api", @@ -418,11 +420,11 @@ if (is_ios) { sources = [ "ios/coverage_util_ios.h", "ios/coverage_util_ios.mm", - "ios/google_test_runner_delegate.h", "ios/test_support.h", "ios/test_support.mm", ] deps = [ + ":google_test_runner_delegate", ":perf_test", "../api/test/metrics:chrome_perf_dashboard_metrics_exporter", "../api/test/metrics:global_metrics_logger_and_exporter", @@ -436,13 +438,17 @@ if (is_ios) { configs += [ ":test_support_objc_config" ] } + rtc_library("google_test_runner_delegate") { + sources = [ "ios/google_test_runner_delegate.h" ] + } + rtc_library("google_test_runner_objc") { testonly = true visibility = [ "*" ] sources = [ "ios/google_test_runner.mm" ] - deps = [ ":test_support_objc" ] configs += [ "//build/config/ios:xctest_config" ] frameworks = [ "UIKit.framework" ] + deps = [ ":google_test_runner_delegate" ] } config("test_support_objc_config") { @@ -1267,6 +1273,7 @@ if (!build_with_chromium) { ":run_loop", ":scoped_key_value_config", ":test_support", + ":test_video_capturer", ":video_test_common", ":video_test_constants", "../api:array_view", diff --git a/third_party/libwebrtc/test/call_test.cc b/third_party/libwebrtc/test/call_test.cc index 7a1bbd296994..b8a1cd76b83d 100644 --- a/third_party/libwebrtc/test/call_test.cc +++ b/third_party/libwebrtc/test/call_test.cc @@ -634,7 +634,14 @@ void CallTest::Start() { audio_recv_stream->Start(); } +void CallTest::StartVideoSources() { + for (size_t i = 0; i < video_sources_.size(); ++i) { + video_sources_[i]->Start(); + } +} + void CallTest::StartVideoStreams() { + StartVideoSources(); for (size_t i = 0; i < video_send_streams_.size(); ++i) { std::vector active_rtp_streams( video_send_configs_[i].rtp.ssrcs.size(), true); diff --git a/third_party/libwebrtc/test/call_test.h b/third_party/libwebrtc/test/call_test.h index 41db9cefd178..08d0e49a685d 100644 --- a/third_party/libwebrtc/test/call_test.h +++ b/third_party/libwebrtc/test/call_test.h @@ -36,6 +36,7 @@ #include "test/rtp_rtcp_observer.h" #include "test/run_loop.h" #include "test/scoped_key_value_config.h" +#include "test/test_video_capturer.h" #include "test/video_test_constants.h" namespace webrtc { @@ -162,6 +163,7 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { void ConnectVideoSourcesToStreams(); void Start(); + void StartVideoSources(); void StartVideoStreams(); void Stop(); void StopVideoStreams(); @@ -210,8 +212,7 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { std::vector flexfec_receive_streams_; test::FrameGeneratorCapturer* frame_generator_capturer_; - std::vector>> - video_sources_; + std::vector> video_sources_; DegradationPreference degradation_preference_ = DegradationPreference::MAINTAIN_FRAMERATE; diff --git a/third_party/libwebrtc/test/frame_generator_capturer.h b/third_party/libwebrtc/test/frame_generator_capturer.h index b1fd7ebfc737..6824ba681e14 100644 --- a/third_party/libwebrtc/test/frame_generator_capturer.h +++ b/third_party/libwebrtc/test/frame_generator_capturer.h @@ -52,8 +52,8 @@ class FrameGeneratorCapturer : public TestVideoCapturer { TaskQueueFactory& task_queue_factory); virtual ~FrameGeneratorCapturer(); - void Start(); - void Stop(); + void Start() override; + void Stop() override; void ChangeResolution(size_t width, size_t height); void ChangeFramerate(int target_framerate); diff --git a/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc b/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc index 12743d89d9d4..83f894dc28db 100644 --- a/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc +++ b/third_party/libwebrtc/test/fuzzers/utils/rtp_replayer.cc @@ -187,6 +187,8 @@ void RtpReplayer::ReplayPackets( RTC_LOG(LS_ERROR) << "Packet error, corrupt packets or incorrect setup?"; break; } + // Set the clock rate - always 90K for video + received_packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); call->Receiver()->DeliverRtpPacket( MediaType::VIDEO, std::move(received_packet), diff --git a/third_party/libwebrtc/test/mac_capturer.h b/third_party/libwebrtc/test/mac_capturer.h index 35cd1ccd1e14..58ccfc0675e2 100644 --- a/third_party/libwebrtc/test/mac_capturer.h +++ b/third_party/libwebrtc/test/mac_capturer.h @@ -17,6 +17,7 @@ #include "api/media_stream_interface.h" #include "api/scoped_refptr.h" #include "modules/video_capture/video_capture.h" +#include "rtc_base/logging.h" #include "rtc_base/thread.h" #include "test/test_video_capturer.h" @@ -32,6 +33,15 @@ class MacCapturer : public TestVideoCapturer, size_t capture_device_index); ~MacCapturer() override; + void Start() override { + RTC_LOG(LS_WARNING) << "Capturer doesn't support resume/pause and always " + "produces the video"; + } + void Stop() override { + RTC_LOG(LS_WARNING) << "Capturer doesn't support resume/pause and always " + "produces the video"; + } + void OnFrame(const VideoFrame& frame) override; int GetFrameWidth() const override { return static_cast(width_); } diff --git a/third_party/libwebrtc/test/mock_transformable_frame.h b/third_party/libwebrtc/test/mock_transformable_frame.h index 039013f218a8..9361aaeb0603 100644 --- a/third_party/libwebrtc/test/mock_transformable_frame.h +++ b/third_party/libwebrtc/test/mock_transformable_frame.h @@ -23,6 +23,7 @@ class MockTransformableFrame : public TransformableFrameInterface { MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override)); MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override)); }; } // namespace webrtc diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc index 09fd7c8a82e8..bca52d9bfcab 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc @@ -62,6 +62,9 @@ void DefaultAudioQualityAnalyzer::OnStatsReports( TimeDelta::Seconds(stat->jitter_buffer_target_delay.ValueOrDefault(0.)); sample.jitter_buffer_emitted_count = stat->jitter_buffer_emitted_count.ValueOrDefault(0ul); + sample.total_samples_duration = + stat->total_samples_duration.ValueOrDefault(0.); + sample.total_audio_energy = stat->total_audio_energy.ValueOrDefault(0.); TrackIdStreamInfoMap::StreamInfo stream_info = analyzer_helper_->GetStreamInfoFromTrackId(*stat->track_identifier); @@ -115,6 +118,9 @@ void DefaultAudioQualityAnalyzer::OnStatsReports( jitter_buffer_target_delay_diff.ms() / jitter_buffer_emitted_count_diff); } + audio_stream_stats.energy.AddSample(sqrt( + (sample.total_audio_energy - prev_sample.total_audio_energy) / + (sample.total_samples_duration - prev_sample.total_samples_duration))); last_stats_sample_[stream_info.stream_label] = sample; } @@ -161,6 +167,10 @@ void DefaultAudioQualityAnalyzer::Stop() { "preferred_buffer_size_ms", GetTestCaseName(item.first), item.second.preferred_buffer_size_ms, Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter, metric_metadata); + metrics_logger_->LogMetric("energy", GetTestCaseName(item.first), + item.second.energy, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, + metric_metadata); } } diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h index 9e427afed801..c59f72742260 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h @@ -32,6 +32,7 @@ struct AudioStreamStats { SamplesStatsCounter speech_expand_rate; SamplesStatsCounter average_jitter_buffer_delay_ms; SamplesStatsCounter preferred_buffer_size_ms; + SamplesStatsCounter energy; }; class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface { @@ -59,6 +60,8 @@ class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface { TimeDelta jitter_buffer_delay = TimeDelta::Zero(); TimeDelta jitter_buffer_target_delay = TimeDelta::Zero(); uint64_t jitter_buffer_emitted_count = 0; + double total_samples_duration = 0.0; + double total_audio_energy = 0.0; }; std::string GetTestCaseName(const std::string& stream_label) const; diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn b/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn index 6ce0c4968b63..17876e54be68 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/BUILD.gn @@ -345,10 +345,13 @@ rtc_library("analyzing_video_sink") { ":analyzing_video_sinks_helper", ":simulcast_dummy_buffer_helper", ":video_dumping", + "../..:metric_metadata_keys", "../../../..:fixed_fps_video_frame_writer_adapter", "../../../..:test_renderer", "../../../../../api:video_quality_analyzer_api", "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", "../../../../../api/test/pclf:media_configuration", "../../../../../api/test/video:video_frame_writer", "../../../../../api/units:timestamp", diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc index fb221e6797f3..2392483b1ace 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.cc @@ -16,6 +16,8 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" #include "api/test/pclf/media_configuration.h" #include "api/test/video/video_frame_writer.h" #include "api/units/timestamp.h" @@ -26,6 +28,7 @@ #include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h" #include "test/pc/e2e/analyzer/video/video_dumping.h" +#include "test/pc/e2e/metric_metadata_keys.h" #include "test/testsupport/fixed_fps_video_frame_writer_adapter.h" #include "test/video_renderer.h" @@ -108,6 +111,26 @@ void AnalyzingVideoSink::OnFrame(const VideoFrame& frame) { } } +void AnalyzingVideoSink::LogMetrics(webrtc::test::MetricsLogger& metrics_logger, + absl::string_view test_case_name) const { + if (report_infra_stats_) { + MutexLock lock(&mutex_); + const std::string test_case(test_case_name); + // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::map metadata = { + {MetricMetadataKey::kPeerMetadataKey, peer_name_}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case}}; + metrics_logger.LogMetric( + "analyzing_sink_processing_time_ms", test_case + "/" + peer_name_, + stats_.analyzing_sink_processing_time_ms, test::Unit::kMilliseconds, + test::ImprovementDirection::kSmallerIsBetter, metadata); + metrics_logger.LogMetric("scaling_tims_ms", test_case + "/" + peer_name_, + stats_.scaling_tims_ms, test::Unit::kMilliseconds, + test::ImprovementDirection::kSmallerIsBetter, + metadata); + } +} + AnalyzingVideoSink::Stats AnalyzingVideoSink::stats() const { MutexLock lock(&mutex_); return stats_; diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h index 1834bbe46977..4c7fbebe1825 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/analyzing_video_sink.h @@ -18,6 +18,7 @@ #include "absl/strings/string_view.h" #include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metrics_logger.h" #include "api/test/pclf/media_configuration.h" #include "api/test/video/video_frame_writer.h" #include "api/test/video_quality_analyzer_interface.h" @@ -56,6 +57,9 @@ class AnalyzingVideoSink : public rtc::VideoSinkInterface { void OnFrame(const VideoFrame& frame) override; + void LogMetrics(webrtc::test::MetricsLogger& metrics_logger, + absl::string_view test_case_name) const; + Stats stats() const; private: diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc index 2c81c5dc05dc..473461c3ba4f 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc @@ -1335,7 +1335,7 @@ std::string DefaultVideoQualityAnalyzer::GetStreamLabelInternal( RTC_CHECK(false) << "Unknown frame_id=" << frame_id; } -double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() { +double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() const { return cpu_measurer_.GetCpuUsagePercent(); } diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h index 060a836ccfd9..0fff92a40a0c 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h @@ -107,7 +107,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // obtained by calling GetKnownVideoStreams() std::map GetStats() const; AnalyzerStats GetAnalyzerStats() const; - double GetCpuUsagePercent(); + double GetCpuUsagePercent() const; // Returns mapping from the stream label to the history of frames that were // met in this stream in the order as they were captured. diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc index 847c9f09a6dc..0cee4b4b3b64 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc @@ -37,7 +37,7 @@ void DefaultVideoQualityAnalyzerCpuMeasurer::StopExcludingCpuThreadTime() { cpu_time_ -= rtc::GetThreadCpuTimeNanos(); } -double DefaultVideoQualityAnalyzerCpuMeasurer::GetCpuUsagePercent() { +double DefaultVideoQualityAnalyzerCpuMeasurer::GetCpuUsagePercent() const { MutexLock lock(&mutex_); return static_cast(cpu_time_) / wallclock_time_ * 100.0; } diff --git a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h index dd9fa07af268..28ca48a72531 100644 --- a/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h +++ b/third_party/libwebrtc/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.h @@ -18,7 +18,7 @@ namespace webrtc { // This class is thread safe. class DefaultVideoQualityAnalyzerCpuMeasurer { public: - double GetCpuUsagePercent(); + double GetCpuUsagePercent() const; void StartMeasuringCpuProcessTime(); void StopMeasuringCpuProcessTime(); @@ -26,7 +26,7 @@ class DefaultVideoQualityAnalyzerCpuMeasurer { void StopExcludingCpuThreadTime(); private: - Mutex mutex_; + mutable Mutex mutex_; int64_t cpu_time_ RTC_GUARDED_BY(mutex_) = 0; int64_t wallclock_time_ RTC_GUARDED_BY(mutex_) = 0; }; diff --git a/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h b/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h index 70db07b31c3c..846721d8642b 100644 --- a/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h +++ b/third_party/libwebrtc/test/pc/e2e/media/test_video_capturer_video_track_source.h @@ -45,9 +45,15 @@ class TestVideoCapturerVideoTrackSource : public test::TestVideoTrackSource { ~TestVideoCapturerVideoTrackSource() = default; - void Start() override { SetState(kLive); } + void Start() override { + SetState(kLive); + video_capturer_->Start(); + } - void Stop() override { SetState(kMuted); } + void Stop() override { + SetState(kMuted); + video_capturer_->Stop(); + } int GetFrameWidth() const override { return video_capturer_->GetFrameWidth(); diff --git a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc index a2830af72d3b..46f7e93243a8 100644 --- a/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc +++ b/third_party/libwebrtc/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc @@ -1090,10 +1090,32 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "video_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = + {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_video"}, + {MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, + MetricValidationInfo{ + .test_case = "test_case/alice_audio", + .name = "energy", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kNeitherIsBetter, + .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, + "alice_audio"}, + {MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}, + {MetricMetadataKey::kExperimentalTestNameMetadataKey, + "test_case"}}}, + MetricValidationInfo{ + .test_case = "test_case/bob_audio", + .name = "energy", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = { - {MetricMetadataKey::kAudioStreamMetadataKey, "bob_video"}, + {MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, {MetricMetadataKey::kReceiverMetadataKey, "alice"}, {MetricMetadataKey::kExperimentalTestNameMetadataKey, "test_case"}}})); diff --git a/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc b/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc index 3419b9e0693c..697bf055a7f7 100644 --- a/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc +++ b/third_party/libwebrtc/test/peer_scenario/peer_scenario_client.cc @@ -338,6 +338,7 @@ PeerScenarioClient::VideoSendTrack PeerScenarioClient::CreateVideo( capturer->Init(); res.source = rtc::make_ref_counted( std::move(capturer), config.screencast); + res.source->Start(); auto track = pc_factory_->CreateVideoTrack(res.source, track_id); res.track = track.get(); res.sender = diff --git a/third_party/libwebrtc/test/scenario/video_stream.cc b/third_party/libwebrtc/test/scenario/video_stream.cc index ec139763c8bf..e082aa37c68c 100644 --- a/third_party/libwebrtc/test/scenario/video_stream.cc +++ b/third_party/libwebrtc/test/scenario/video_stream.cc @@ -419,6 +419,7 @@ SendVideoStream::SendVideoStream(CallClient* sender, send_config.suspend_below_min_bitrate = config.encoder.suspend_below_min_bitrate; + video_capturer_->Start(); sender_->SendTask([&] { if (config.stream.fec_controller_factory) { send_stream_ = sender_->call_->CreateVideoSendStream( diff --git a/third_party/libwebrtc/test/test_video_capturer.cc b/third_party/libwebrtc/test/test_video_capturer.cc index 3098731eb3b0..385af12b8051 100644 --- a/third_party/libwebrtc/test/test_video_capturer.cc +++ b/third_party/libwebrtc/test/test_video_capturer.cc @@ -46,8 +46,9 @@ void TestVideoCapturer::OnFrame(const VideoFrame& original_frame) { MutexLock lock(&lock_); enable_adaptation = enable_adaptation_; } - if (enable_adaptation) { + if (!enable_adaptation) { broadcaster_.OnFrame(frame); + return; } if (!video_adapter_.AdaptFrameResolution( diff --git a/third_party/libwebrtc/test/test_video_capturer.h b/third_party/libwebrtc/test/test_video_capturer.h index 48b7f7a7f84d..49660d897240 100644 --- a/third_party/libwebrtc/test/test_video_capturer.h +++ b/third_party/libwebrtc/test/test_video_capturer.h @@ -49,6 +49,13 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { int height, const absl::optional& max_fps); + // Starts or resumes video capturing. Can be called multiple times during + // lifetime of this object. + virtual void Start() = 0; + // Stops or pauses video capturing. Can be called multiple times during + // lifetime of this object. + virtual void Stop() = 0; + virtual int GetFrameWidth() const = 0; virtual int GetFrameHeight() const = 0; @@ -62,7 +69,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { Mutex lock_; std::unique_ptr preprocessor_ RTC_GUARDED_BY(lock_); - bool enable_adaptation_ RTC_GUARDED_BY(lock_) = false; + bool enable_adaptation_ RTC_GUARDED_BY(lock_) = true; rtc::VideoBroadcaster broadcaster_; cricket::VideoAdapter video_adapter_; }; diff --git a/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc index ae87fb8b0454..0c7504b25a06 100644 --- a/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc +++ b/third_party/libwebrtc/test/testsupport/ivf_video_frame_generator.cc @@ -16,6 +16,7 @@ #include "api/video/i420_buffer.h" #include "api/video_codecs/video_codec.h" #include "media/base/media_constants.h" +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" @@ -145,6 +146,9 @@ std::unique_ptr IvfVideoFrameGenerator::CreateVideoDecoder( if (codec_type == VideoCodecType::kVideoCodecH264) { return H264Decoder::Create(); } + if (codec_type == VideoCodecType::kVideoCodecAV1) { + return CreateDav1dDecoder(); + } return nullptr; } diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_generator.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_generator.cc index 39a5ad97aa45..f1ecbf9b41ca 100644 --- a/third_party/libwebrtc/test/testsupport/y4m_frame_generator.cc +++ b/third_party/libwebrtc/test/testsupport/y4m_frame_generator.cc @@ -59,7 +59,26 @@ Y4mFrameGenerator::VideoFrameData Y4mFrameGenerator::NextFrame() { static_cast(height_)}; rtc::scoped_refptr next_frame_buffer = frame_reader_->PullFrame(); - return VideoFrameData(next_frame_buffer, update_rect); + + if (!next_frame_buffer || + (static_cast(next_frame_buffer->width()) == width_ && + static_cast(next_frame_buffer->height()) == height_)) { + return VideoFrameData(next_frame_buffer, update_rect); + } + + // Allocate a new buffer and return scaled version. + rtc::scoped_refptr scaled_buffer( + I420Buffer::Create(width_, height_)); + webrtc::I420Buffer::SetBlack(scaled_buffer.get()); + scaled_buffer->ScaleFrom(*next_frame_buffer->ToI420()); + return VideoFrameData(scaled_buffer, update_rect); +} + +void Y4mFrameGenerator::ChangeResolution(size_t width, size_t height) { + width_ = width; + height_ = height; + RTC_CHECK_GT(width_, 0); + RTC_CHECK_GT(height_, 0); } FrameGeneratorInterface::Resolution Y4mFrameGenerator::GetResolution() const { diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_generator.h b/third_party/libwebrtc/test/testsupport/y4m_frame_generator.h index bccd109318d8..4ff64be7dc53 100644 --- a/third_party/libwebrtc/test/testsupport/y4m_frame_generator.h +++ b/third_party/libwebrtc/test/testsupport/y4m_frame_generator.h @@ -45,9 +45,7 @@ class Y4mFrameGenerator : public FrameGeneratorInterface { VideoFrameData NextFrame() override; - void ChangeResolution(size_t width, size_t height) override { - RTC_CHECK_NOTREACHED(); - } + void ChangeResolution(size_t width, size_t height) override; Resolution GetResolution() const override; diff --git a/third_party/libwebrtc/test/testsupport/y4m_frame_generator_test.cc b/third_party/libwebrtc/test/testsupport/y4m_frame_generator_test.cc index 4341c3e3eb6e..24d10c89923c 100644 --- a/third_party/libwebrtc/test/testsupport/y4m_frame_generator_test.cc +++ b/third_party/libwebrtc/test/testsupport/y4m_frame_generator_test.cc @@ -82,6 +82,29 @@ TEST_F(Y4mFrameGeneratorTest, CanReadFPSFromFileWhenRoundingIsNeeded) { remove(input_filepath.c_str()); } +TEST_F(Y4mFrameGeneratorTest, CanChangeResolution) { + constexpr int kNewWidth = 4; + constexpr int kNewHeight = 6; + constexpr int kFrameCount = 10; + + Y4mFrameGenerator generator(input_filepath_, + Y4mFrameGenerator::RepeatMode::kLoop); + FrameGeneratorInterface::Resolution res = generator.GetResolution(); + EXPECT_EQ(res.width, 2u); + EXPECT_EQ(res.height, 2u); + + generator.ChangeResolution(kNewWidth, kNewHeight); + res = generator.GetResolution(); + EXPECT_EQ(static_cast(res.width), kNewWidth); + EXPECT_EQ(static_cast(res.height), kNewHeight); + + for (int i = 0; i < kFrameCount; ++i) { + FrameGeneratorInterface::VideoFrameData frame = generator.NextFrame(); + EXPECT_EQ(frame.buffer->width(), kNewWidth); + EXPECT_EQ(frame.buffer->height(), kNewHeight); + } +} + TEST_F(Y4mFrameGeneratorTest, SingleRepeatMode) { Y4mFrameGenerator generator(input_filepath_, Y4mFrameGenerator::RepeatMode::kSingle); diff --git a/third_party/libwebrtc/test/vcm_capturer.h b/third_party/libwebrtc/test/vcm_capturer.h index da2b948fe0d1..1deea212292c 100644 --- a/third_party/libwebrtc/test/vcm_capturer.h +++ b/third_party/libwebrtc/test/vcm_capturer.h @@ -15,6 +15,7 @@ #include "api/scoped_refptr.h" #include "modules/video_capture/video_capture.h" +#include "rtc_base/logging.h" #include "test/test_video_capturer.h" namespace webrtc { @@ -29,6 +30,15 @@ class VcmCapturer : public TestVideoCapturer, size_t capture_device_index); virtual ~VcmCapturer(); + void Start() override { + RTC_LOG(LS_WARNING) << "Capturer doesn't support resume/pause and always " + "produces the video"; + } + void Stop() override { + RTC_LOG(LS_WARNING) << "Capturer doesn't support resume/pause and always " + "produces the video"; + } + void OnFrame(const VideoFrame& frame) override; int GetFrameWidth() const override { return static_cast(width_); } diff --git a/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py b/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py index 764c378c7b1d..3ab0cbe95328 100755 --- a/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py +++ b/third_party/libwebrtc/tools_webrtc/ios/build_ios_libs.py @@ -39,7 +39,7 @@ ENABLED_ARCHS = [ DEFAULT_ARCHS = [ 'device:arm64', 'simulator:arm64', 'simulator:x64' ] -IOS_DEPLOYMENT_TARGET = { +IOS_MINIMUM_DEPLOYMENT_TARGET = { 'device': '12.0', 'simulator': '12.0', 'catalyst': '14.0' @@ -99,6 +99,11 @@ def _ParseArgs(): action='store_true', default=False, help='Use RBE to build.') + parser.add_argument('--deployment-target', + default=IOS_MINIMUM_DEPLOYMENT_TARGET['device'], + help='Raise the minimum deployment target to build for. ' + 'Cannot be lowered below 12.0 for iOS/iPadOS ' + 'and 14.0 for Catalyst.') parser.add_argument( '--extra-gn-args', default=[], @@ -151,6 +156,12 @@ def _ParseArchitecture(architectures): return result +def _VersionMax(*versions): + return max( + *versions, + key=lambda version: [int(component) for component in version.split('.')]) + + def BuildWebRTC(output_dir, target_environment, target_arch, flavor, gn_target_name, ios_deployment_target, libvpx_build_vp9, use_goma, use_remoteexec, extra_gn_args): @@ -234,6 +245,8 @@ def main(): framework_paths = [] all_lib_paths = [] for (environment, archs) in list(architectures.items()): + ios_deployment_target = _VersionMax( + args.deployment_target, IOS_MINIMUM_DEPLOYMENT_TARGET[environment]) framework_path = os.path.join(args.output_dir, environment) framework_paths.append(framework_path) lib_paths = [] @@ -241,8 +254,8 @@ def main(): lib_path = os.path.join(framework_path, arch + '_libs') lib_paths.append(lib_path) BuildWebRTC(lib_path, environment, arch, args.build_config, - gn_target_name, IOS_DEPLOYMENT_TARGET[environment], - LIBVPX_BUILD_VP9, args.use_goma, args.use_remoteexec, gn_args) + gn_target_name, ios_deployment_target, LIBVPX_BUILD_VP9, + args.use_goma, args.use_remoteexec, gn_args) all_lib_paths.extend(lib_paths) # Combine the slices. diff --git a/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl b/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl index 19c11748c7fa..9fabf0044ed0 100644 --- a/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl +++ b/third_party/libwebrtc/tools_webrtc/mb/mb_config.pyl @@ -122,9 +122,7 @@ # build anything). # TODO(http://crbug.com/1029452): Nuke these and isolate on builder # instead? - 'Perf Android32 (O Pixel2)': 'release_bot_x64', 'Perf Android32 (R Pixel5)': 'release_bot_x64', - 'Perf Android64 (O Pixel2)': 'release_bot_x64', 'Perf Android64 (R Pixel5)': 'release_bot_x64', 'Perf Fuchsia': 'release_bot_x64_fuchsia', 'Perf Linux Bionic': 'release_bot_x64', @@ -271,7 +269,8 @@ ], 'ios_internal_pure_release_bot_arm64': [ 'ios', 'pure_release_bot', 'arm64', - 'ios_code_signing_identity_description', 'xctest' + 'ios_code_signing_identity_description', 'xctest', + 'rtc_objc_test_prefix', ], 'ios_internal_release_bot_arm64': [ 'ios', 'release_bot', 'arm64', 'ios_code_signing_identity_description', @@ -456,6 +455,9 @@ 'release_bot': { 'mixins': ['pure_release_bot', 'dcheck_always_on'], }, + 'rtc_objc_test_prefix': { + 'gn_args': 'rtc_objc_prefix="RTC_TESTING"', + }, 'rtti': { 'gn_args': 'use_rtti=true', }, diff --git a/third_party/libwebrtc/video/config/encoder_config_gn/moz.build b/third_party/libwebrtc/video/config/encoder_config_gn/moz.build index daf1d0d43453..9eb6abf8c116 100644 --- a/third_party/libwebrtc/video/config/encoder_config_gn/moz.build +++ b/third_party/libwebrtc/video/config/encoder_config_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/video/config/encoder_stream_factory.cc b/third_party/libwebrtc/video/config/encoder_stream_factory.cc index de475b90ebd5..c955602cfce0 100644 --- a/third_party/libwebrtc/video/config/encoder_stream_factory.cc +++ b/third_party/libwebrtc/video/config/encoder_stream_factory.cc @@ -174,10 +174,27 @@ EncoderStreamFactory::CreateDefaultVideoStreams( const absl::optional& experimental_min_bitrate) const { std::vector layers; + // The max bitrate specified by the API. + // - `encoder_config.simulcast_layers[0].max_bitrate_bps` comes from the first + // RtpEncodingParamters, which is the encoding of this stream. + // - `encoder_config.max_bitrate_bps` comes from SDP; "b=AS" or conditionally + // "x-google-max-bitrate". + // If `api_max_bitrate_bps` has a value then it is positive. + absl::optional api_max_bitrate_bps; + if (encoder_config.simulcast_layers[0].max_bitrate_bps > 0) { + api_max_bitrate_bps = encoder_config.simulcast_layers[0].max_bitrate_bps; + } + if (encoder_config.max_bitrate_bps > 0) { + api_max_bitrate_bps = + api_max_bitrate_bps.has_value() + ? std::min(encoder_config.max_bitrate_bps, *api_max_bitrate_bps) + : encoder_config.max_bitrate_bps; + } + // For unset max bitrates set default bitrate for non-simulcast. int max_bitrate_bps = - (encoder_config.max_bitrate_bps > 0) - ? encoder_config.max_bitrate_bps + api_max_bitrate_bps.has_value() + ? *api_max_bitrate_bps : GetMaxDefaultVideoBitrateKbps(width, height, is_screenshare_) * 1000; @@ -189,7 +206,7 @@ EncoderStreamFactory::CreateDefaultVideoStreams( // Use set min bitrate. min_bitrate_bps = encoder_config.simulcast_layers[0].min_bitrate_bps; // If only min bitrate is configured, make sure max is above min. - if (encoder_config.max_bitrate_bps <= 0) + if (!api_max_bitrate_bps.has_value()) max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps); } int max_framerate = (encoder_config.simulcast_layers[0].max_framerate > 0) @@ -253,7 +270,7 @@ EncoderStreamFactory::CreateDefaultVideoStreams( sum_max_bitrates_kbps += spatial_layer.maxBitrate; } RTC_DCHECK_GE(sum_max_bitrates_kbps, 0); - if (encoder_config.max_bitrate_bps <= 0) { + if (!api_max_bitrate_bps.has_value()) { max_bitrate_bps = sum_max_bitrates_kbps * 1000; } else { max_bitrate_bps = diff --git a/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc index fd35b0fbd4e8..85d36dc48d86 100644 --- a/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc +++ b/third_party/libwebrtc/video/end_to_end_tests/histogram_tests.cc @@ -157,9 +157,6 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, const std::string video_prefix = screenshare ? "WebRTC.Video.Screenshare." : "WebRTC.Video."; - // The content type extension is disabled in non screenshare test, - // therefore no slicing on simulcast id should be present. - const std::string video_suffix = screenshare ? ".S0" : ""; // Verify that stats have been updated once. EXPECT_METRIC_EQ(2, metrics::NumSamples("WebRTC.Call.LifetimeInSeconds")); @@ -248,17 +245,13 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.CurrentDelayInMs")); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.OnewayDelayInMs")); - EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs" + - video_suffix)); + EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "EndToEndDelayInMs")); EXPECT_METRIC_EQ(1, - metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs" + - video_suffix)); - EXPECT_METRIC_EQ(1, metrics::NumSamples(video_prefix + "InterframeDelayInMs" + - video_suffix)); + metrics::NumSamples(video_prefix + "EndToEndDelayMaxInMs")); EXPECT_METRIC_EQ(1, - metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs" + - video_suffix)); - + metrics::NumSamples(video_prefix + "InterframeDelayInMs")); + EXPECT_METRIC_EQ( + 1, metrics::NumSamples(video_prefix + "InterframeDelayMaxInMs")); EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.RenderSqrtPixelsPerSecond")); diff --git a/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc index 377209b8a58b..2d8bf0586d33 100644 --- a/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc +++ b/third_party/libwebrtc/video/end_to_end_tests/multi_codec_receive_tests.cc @@ -254,6 +254,7 @@ void MultiCodecReceiveTest::RunTestWithCodecs( GetVideoSendStream()->Start(); CreateFrameGeneratorCapturer(kFps, kWidth / 2, kHeight / 2); ConnectVideoSourcesToStreams(); + StartVideoSources(); }); EXPECT_TRUE(observer_.Wait()) << "Timed out waiting for frames."; } diff --git a/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc index 08b1bc85e47e..246b3cf523f8 100644 --- a/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc +++ b/third_party/libwebrtc/video/end_to_end_tests/retransmission_tests.cc @@ -166,7 +166,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesNackAndRetransmitsAudio) { nack.SetPacketIds(nack_list, 1); rtc::Buffer buffer = nack.Build(); - EXPECT_TRUE(receive_transport_->SendRtcp(buffer.data(), buffer.size())); + EXPECT_TRUE(receive_transport_->SendRtcp(buffer)); } return SEND_PACKET; diff --git a/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc b/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc index 009a3878fbbf..fde2e9ca5c35 100644 --- a/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc +++ b/third_party/libwebrtc/video/end_to_end_tests/rtp_rtcp_tests.cc @@ -336,9 +336,10 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( rtcp::RapidResyncRequest force_send_sr_back_request; rtc::Buffer packet = force_send_sr_back_request.Build(); static_cast(receive_transport_.get()) - ->SendRtcp(packet.data(), packet.size()); + ->SendRtcp(packet); } CreateFrameGeneratorCapturer(30, 1280, 720); + StartVideoSources(); }); observer.ResetExpectedSsrcs(1); diff --git a/third_party/libwebrtc/video/receive_statistics_proxy.cc b/third_party/libwebrtc/video/receive_statistics_proxy.cc index 98eb94786fee..8d00af91ff6c 100644 --- a/third_party/libwebrtc/video/receive_statistics_proxy.cc +++ b/third_party/libwebrtc/video/receive_statistics_proxy.cc @@ -50,20 +50,6 @@ const char* UmaPrefixForContentType(VideoContentType content_type) { return "WebRTC.Video"; } -std::string UmaSuffixForContentType(VideoContentType content_type) { - char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); - int simulcast_id = videocontenttypehelpers::GetSimulcastId(content_type); - if (simulcast_id > 0) { - ss << ".S" << simulcast_id - 1; - } - int experiment_id = videocontenttypehelpers::GetExperimentId(content_type); - if (experiment_id > 0) { - ss << ".ExperimentGroup" << experiment_id - 1; - } - return ss.str(); -} - // TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some // rtc::Thread instances and/or implementations that don't register as the // current task queue. @@ -255,22 +241,8 @@ void ReceiveStatisticsProxy::UpdateHistograms( for (const auto& it : content_specific_stats_) { // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes). VideoContentType content_type = it.first; - if (videocontenttypehelpers::GetSimulcastId(content_type) > 0) { - // Aggregate on experiment id. - videocontenttypehelpers::SetExperimentId(&content_type, 0); - aggregated_stats[content_type].Add(it.second); - } - // Calculate experiment specific metrics (".ExperimentGroup[0-7]" suffixes). - content_type = it.first; - if (videocontenttypehelpers::GetExperimentId(content_type) > 0) { - // Aggregate on simulcast id. - videocontenttypehelpers::SetSimulcastId(&content_type, 0); - aggregated_stats[content_type].Add(it.second); - } // Calculate aggregated metrics (no suffixes. Aggregated on everything). content_type = it.first; - videocontenttypehelpers::SetSimulcastId(&content_type, 0); - videocontenttypehelpers::SetExperimentId(&content_type, 0); aggregated_stats[content_type].Add(it.second); } @@ -278,77 +250,66 @@ void ReceiveStatisticsProxy::UpdateHistograms( // For the metric Foo we report the following slices: // WebRTC.Video.Foo, // WebRTC.Video.Screenshare.Foo, - // WebRTC.Video.Foo.S[0-3], - // WebRTC.Video.Foo.ExperimentGroup[0-7], - // WebRTC.Video.Screenshare.Foo.S[0-3], - // WebRTC.Video.Screenshare.Foo.ExperimentGroup[0-7]. auto content_type = it.first; auto stats = it.second; std::string uma_prefix = UmaPrefixForContentType(content_type); - std::string uma_suffix = UmaSuffixForContentType(content_type); - // Metrics can be sliced on either simulcast id or experiment id but not - // both. - RTC_DCHECK(videocontenttypehelpers::GetExperimentId(content_type) == 0 || - videocontenttypehelpers::GetSimulcastId(content_type) == 0); absl::optional e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples); if (e2e_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".EndToEndDelayInMs" + uma_suffix, *e2e_delay_ms); - log_stream << uma_prefix << ".EndToEndDelayInMs" << uma_suffix << " " - << *e2e_delay_ms << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".EndToEndDelayInMs", + *e2e_delay_ms); + log_stream << uma_prefix << ".EndToEndDelayInMs" + << " " << *e2e_delay_ms << '\n'; } absl::optional e2e_delay_max_ms = stats.e2e_delay_counter.Max(); if (e2e_delay_max_ms && e2e_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_100000( - uma_prefix + ".EndToEndDelayMaxInMs" + uma_suffix, *e2e_delay_max_ms); - log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << uma_suffix << " " - << *e2e_delay_max_ms << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".EndToEndDelayMaxInMs", + *e2e_delay_max_ms); + log_stream << uma_prefix << ".EndToEndDelayMaxInMs" + << " " << *e2e_delay_max_ms << '\n'; } absl::optional interframe_delay_ms = stats.interframe_delay_counter.Avg(kMinRequiredSamples); if (interframe_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".InterframeDelayInMs" + uma_suffix, - *interframe_delay_ms); - log_stream << uma_prefix << ".InterframeDelayInMs" << uma_suffix << " " - << *interframe_delay_ms << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayInMs", + *interframe_delay_ms); + log_stream << uma_prefix << ".InterframeDelayInMs" + << " " << *interframe_delay_ms << '\n'; } absl::optional interframe_delay_max_ms = stats.interframe_delay_counter.Max(); if (interframe_delay_max_ms && interframe_delay_ms) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".InterframeDelayMaxInMs" + uma_suffix, - *interframe_delay_max_ms); - log_stream << uma_prefix << ".InterframeDelayMaxInMs" << uma_suffix << " " - << *interframe_delay_max_ms << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayMaxInMs", + *interframe_delay_max_ms); + log_stream << uma_prefix << ".InterframeDelayMaxInMs" + << " " << *interframe_delay_max_ms << '\n'; } absl::optional interframe_delay_95p_ms = stats.interframe_delay_percentiles.GetPercentile(0.95f); if (interframe_delay_95p_ms && interframe_delay_ms != -1) { RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".InterframeDelay95PercentileInMs" + uma_suffix, + uma_prefix + ".InterframeDelay95PercentileInMs", *interframe_delay_95p_ms); log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" - << uma_suffix << " " << *interframe_delay_95p_ms << '\n'; + << " " << *interframe_delay_95p_ms << '\n'; } absl::optional width = stats.received_width.Avg(kMinRequiredSamples); if (width) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".ReceivedWidthInPixels" + uma_suffix, *width); - log_stream << uma_prefix << ".ReceivedWidthInPixels" << uma_suffix << " " - << *width << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedWidthInPixels", + *width); + log_stream << uma_prefix << ".ReceivedWidthInPixels" + << " " << *width << '\n'; } absl::optional height = stats.received_height.Avg(kMinRequiredSamples); if (height) { - RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".ReceivedHeightInPixels" + uma_suffix, *height); - log_stream << uma_prefix << ".ReceivedHeightInPixels" << uma_suffix << " " - << *height << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedHeightInPixels", + *height); + log_stream << uma_prefix << ".ReceivedHeightInPixels" + << " " << *height << '\n'; } if (content_type != VideoContentType::UNSPECIFIED) { @@ -359,9 +320,8 @@ void ReceiveStatisticsProxy::UpdateHistograms( int media_bitrate_kbps = static_cast(stats.total_media_bytes * 8 / flow_duration_sec / 1000); RTC_HISTOGRAM_COUNTS_SPARSE_10000( - uma_prefix + ".MediaBitrateReceivedInKbps" + uma_suffix, - media_bitrate_kbps); - log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << uma_suffix + uma_prefix + ".MediaBitrateReceivedInKbps", media_bitrate_kbps); + log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << " " << media_bitrate_kbps << '\n'; } @@ -372,18 +332,16 @@ void ReceiveStatisticsProxy::UpdateHistograms( int key_frames_permille = (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; RTC_HISTOGRAM_COUNTS_SPARSE_1000( - uma_prefix + ".KeyFramesReceivedInPermille" + uma_suffix, - key_frames_permille); - log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << uma_suffix + uma_prefix + ".KeyFramesReceivedInPermille", key_frames_permille); + log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << " " << key_frames_permille << '\n'; } absl::optional qp = stats.qp_counter.Avg(kMinRequiredSamples); if (qp) { - RTC_HISTOGRAM_COUNTS_SPARSE_200( - uma_prefix + ".Decoded.Vp8.Qp" + uma_suffix, *qp); - log_stream << uma_prefix << ".Decoded.Vp8.Qp" << uma_suffix << " " - << *qp << '\n'; + RTC_HISTOGRAM_COUNTS_SPARSE_200(uma_prefix + ".Decoded.Vp8.Qp", *qp); + log_stream << uma_prefix << ".Decoded.Vp8.Qp" + << " " << *qp << '\n'; } } } diff --git a/third_party/libwebrtc/video/receive_statistics_proxy_unittest.cc b/third_party/libwebrtc/video/receive_statistics_proxy_unittest.cc index ec6dd0c42002..bc11efefd06a 100644 --- a/third_party/libwebrtc/video/receive_statistics_proxy_unittest.cc +++ b/third_party/libwebrtc/video/receive_statistics_proxy_unittest.cc @@ -524,7 +524,7 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsIncomingPayloadType) { TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecoderInfo) { auto init_stats = statistics_proxy_->GetStats(); - EXPECT_EQ(init_stats.decoder_implementation_name, "unknown"); + EXPECT_EQ(init_stats.decoder_implementation_name, absl::nullopt); EXPECT_EQ(init_stats.power_efficient_decoder, absl::nullopt); const VideoDecoder::DecoderInfo decoder_info{ @@ -1708,98 +1708,5 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, DecodeTimeReported) { 1, metrics::NumEvents("WebRTC.Video.DecodeTimeInMs", kDecodeTime.ms())); } -TEST_P(ReceiveStatisticsProxyTestWithContent, - StatsAreSlicedOnSimulcastAndExperiment) { - const uint8_t experiment_id = 1; - webrtc::VideoContentType content_type = content_type_; - videocontenttypehelpers::SetExperimentId(&content_type, experiment_id); - const TimeDelta kInterFrameDelay1 = TimeDelta::Millis(30); - const TimeDelta kInterFrameDelay2 = TimeDelta::Millis(50); - webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - - videocontenttypehelpers::SetSimulcastId(&content_type, 1); - for (int i = 0; i <= kMinRequiredSamples; ++i) { - time_controller_.AdvanceTime(kInterFrameDelay1); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), - content_type); - } - - videocontenttypehelpers::SetSimulcastId(&content_type, 2); - for (int i = 0; i <= kMinRequiredSamples; ++i) { - time_controller_.AdvanceTime(kInterFrameDelay2); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), - content_type); - } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); - - if (videocontenttypehelpers::IsScreenshare(content_type)) { - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); - EXPECT_METRIC_EQ(1, metrics::NumSamples( - "WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); - EXPECT_METRIC_EQ(1, metrics::NumSamples( - "WebRTC.Video.Screenshare.InterframeDelayInMs.S0")); - EXPECT_METRIC_EQ(1, - metrics::NumSamples( - "WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S0")); - EXPECT_METRIC_EQ(1, metrics::NumSamples( - "WebRTC.Video.Screenshare.InterframeDelayInMs.S1")); - EXPECT_METRIC_EQ(1, - metrics::NumSamples( - "WebRTC.Video.Screenshare.InterframeDelayMaxInMs.S1")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs" - ".ExperimentGroup0")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayMaxInMs" - ".ExperimentGroup0")); - EXPECT_METRIC_EQ( - kInterFrameDelay1.ms(), - metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S0")); - EXPECT_METRIC_EQ( - kInterFrameDelay2.ms(), - metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs.S1")); - EXPECT_METRIC_EQ( - ((kInterFrameDelay1 + kInterFrameDelay2) / 2).ms(), - metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayInMs")); - EXPECT_METRIC_EQ( - kInterFrameDelay2.ms(), - metrics::MinSample("WebRTC.Video.Screenshare.InterframeDelayMaxInMs")); - EXPECT_METRIC_EQ( - ((kInterFrameDelay1 + kInterFrameDelay2) / 2).ms(), - metrics::MinSample( - "WebRTC.Video.Screenshare.InterframeDelayInMs.ExperimentGroup0")); - } else { - EXPECT_METRIC_EQ(1, - metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S0")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S0")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs.S1")); - EXPECT_METRIC_EQ( - 1, metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs.S1")); - EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs" - ".ExperimentGroup0")); - EXPECT_METRIC_EQ(1, - metrics::NumSamples("WebRTC.Video.InterframeDelayMaxInMs" - ".ExperimentGroup0")); - EXPECT_METRIC_EQ(kInterFrameDelay1.ms(), - metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S0")); - EXPECT_METRIC_EQ(kInterFrameDelay2.ms(), - metrics::MinSample("WebRTC.Video.InterframeDelayInMs.S1")); - EXPECT_METRIC_EQ((kInterFrameDelay1 + kInterFrameDelay2).ms() / 2, - metrics::MinSample("WebRTC.Video.InterframeDelayInMs")); - EXPECT_METRIC_EQ(kInterFrameDelay2.ms(), - metrics::MinSample("WebRTC.Video.InterframeDelayMaxInMs")); - EXPECT_METRIC_EQ((kInterFrameDelay1 + kInterFrameDelay2).ms() / 2, - metrics::MinSample( - "WebRTC.Video.InterframeDelayInMs.ExperimentGroup0")); - } -} - } // namespace internal } // namespace webrtc diff --git a/third_party/libwebrtc/video/render/incoming_video_stream_gn/moz.build b/third_party/libwebrtc/video/render/incoming_video_stream_gn/moz.build index 1dca41a24e5a..f450adac03fd 100644 --- a/third_party/libwebrtc/video/render/incoming_video_stream_gn/moz.build +++ b/third_party/libwebrtc/video/render/incoming_video_stream_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/video/render/video_render_frames_gn/moz.build b/third_party/libwebrtc/video/render/video_render_frames_gn/moz.build index df90ccd36237..0b715f217776 100644 --- a/third_party/libwebrtc/video/render/video_render_frames_gn/moz.build +++ b/third_party/libwebrtc/video/render/video_render_frames_gn/moz.build @@ -126,6 +126,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc b/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc index 6daa18ae30fa..faf5b3f45e1e 100644 --- a/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc +++ b/third_party/libwebrtc/video/rtp_video_stream_receiver2.cc @@ -1050,6 +1050,12 @@ absl::optional RtpVideoStreamReceiver2::LastReceivedPacketMs() const { return absl::nullopt; } +absl::optional +RtpVideoStreamReceiver2::LastReceivedFrameRtpTimestamp() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return last_received_rtp_timestamp_; +} + absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); diff --git a/third_party/libwebrtc/video/rtp_video_stream_receiver2.h b/third_party/libwebrtc/video/rtp_video_stream_receiver2.h index a39e7bd006fc..10329005babc 100644 --- a/third_party/libwebrtc/video/rtp_video_stream_receiver2.h +++ b/third_party/libwebrtc/video/rtp_video_stream_receiver2.h @@ -206,6 +206,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void SetProtectionPayloadTypes(int red_payload_type, int ulpfec_payload_type); absl::optional LastReceivedPacketMs() const; + absl::optional LastReceivedFrameRtpTimestamp() const; absl::optional LastReceivedKeyframePacketMs() const; // Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP diff --git a/third_party/libwebrtc/video/send_delay_stats.cc b/third_party/libwebrtc/video/send_delay_stats.cc index 56c416442492..184636d4fe56 100644 --- a/third_party/libwebrtc/video/send_delay_stats.cc +++ b/third_party/libwebrtc/video/send_delay_stats.cc @@ -18,8 +18,8 @@ namespace webrtc { namespace { // Packet with a larger delay are removed and excluded from the delay stats. -// Set to larger than max histogram delay which is 10000. -const int64_t kMaxSentPacketDelayMs = 11000; +// Set to larger than max histogram delay which is 10 seconds. +constexpr TimeDelta kMaxSentPacketDelay = TimeDelta::Seconds(11); const size_t kMaxPacketMapSize = 2000; // Limit for the maximum number of streams to calculate stats for. @@ -70,25 +70,24 @@ AvgCounter* SendDelayStats::GetSendDelayCounter(uint32_t ssrc) { } void SendDelayStats::OnSendPacket(uint16_t packet_id, - int64_t capture_time_ms, + Timestamp capture_time, uint32_t ssrc) { // Packet sent to transport. MutexLock lock(&mutex_); if (ssrcs_.find(ssrc) == ssrcs_.end()) return; - int64_t now = clock_->TimeInMilliseconds(); + Timestamp now = clock_->CurrentTime(); RemoveOld(now, &packets_); if (packets_.size() > kMaxPacketMapSize) { ++num_skipped_packets_; return; } - packets_.insert( - std::make_pair(packet_id, Packet(ssrc, capture_time_ms, now))); + packets_.insert(std::make_pair(packet_id, Packet(ssrc, capture_time, now))); } -bool SendDelayStats::OnSentPacket(int packet_id, int64_t time_ms) { +bool SendDelayStats::OnSentPacket(int packet_id, Timestamp time) { // Packet leaving socket. if (packet_id == -1) return false; @@ -100,16 +99,16 @@ bool SendDelayStats::OnSentPacket(int packet_id, int64_t time_ms) { // TODO(asapersson): Remove SendSideDelayUpdated(), use capture -> sent. // Elapsed time from send (to transport) -> sent (leaving socket). - int diff_ms = time_ms - it->second.send_time_ms; - GetSendDelayCounter(it->second.ssrc)->Add(diff_ms); + TimeDelta diff = time - it->second.send_time; + GetSendDelayCounter(it->second.ssrc)->Add(diff.ms()); packets_.erase(it); return true; } -void SendDelayStats::RemoveOld(int64_t now, PacketMap* packets) { +void SendDelayStats::RemoveOld(Timestamp now, PacketMap* packets) { while (!packets->empty()) { auto it = packets->begin(); - if (now - it->second.capture_time_ms < kMaxSentPacketDelayMs) + if (now - it->second.capture_time < kMaxSentPacketDelay) break; packets->erase(it); diff --git a/third_party/libwebrtc/video/send_delay_stats.h b/third_party/libwebrtc/video/send_delay_stats.h index fa76a1e39c71..50effe825f92 100644 --- a/third_party/libwebrtc/video/send_delay_stats.h +++ b/third_party/libwebrtc/video/send_delay_stats.h @@ -18,6 +18,7 @@ #include #include +#include "api/units/timestamp.h" #include "call/video_send_stream.h" #include "modules/include/module_common_types_public.h" #include "rtc_base/synchronization/mutex.h" @@ -43,13 +44,13 @@ class SendDelayStats : public SendPacketObserver { void AddSsrcs(const VideoSendStream::Config& config); // Called when a packet is sent (leaving socket). - bool OnSentPacket(int packet_id, int64_t time_ms); + bool OnSentPacket(int packet_id, Timestamp time); protected: // From SendPacketObserver. // Called when a packet is sent to the transport. void OnSendPacket(uint16_t packet_id, - int64_t capture_time_ms, + Timestamp capture_time, uint32_t ssrc) override; private: @@ -60,18 +61,16 @@ class SendDelayStats : public SendPacketObserver { } }; struct Packet { - Packet(uint32_t ssrc, int64_t capture_time_ms, int64_t send_time_ms) - : ssrc(ssrc), - capture_time_ms(capture_time_ms), - send_time_ms(send_time_ms) {} + Packet(uint32_t ssrc, Timestamp capture_time, Timestamp send_time) + : ssrc(ssrc), capture_time(capture_time), send_time(send_time) {} uint32_t ssrc; - int64_t capture_time_ms; - int64_t send_time_ms; + Timestamp capture_time; + Timestamp send_time; }; typedef std::map PacketMap; void UpdateHistograms(); - void RemoveOld(int64_t now, PacketMap* packets) + void RemoveOld(Timestamp now, PacketMap* packets) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); AvgCounter* GetSendDelayCounter(uint32_t ssrc) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); diff --git a/third_party/libwebrtc/video/send_delay_stats_unittest.cc b/third_party/libwebrtc/video/send_delay_stats_unittest.cc index e7481f929eb7..288bc5fd8dd5 100644 --- a/third_party/libwebrtc/video/send_delay_stats_unittest.cc +++ b/third_party/libwebrtc/video/send_delay_stats_unittest.cc @@ -24,7 +24,7 @@ const uint32_t kSsrc2 = 42; const uint32_t kRtxSsrc1 = 18; const uint32_t kRtxSsrc2 = 43; const uint16_t kPacketId = 2345; -const int64_t kMaxPacketDelayMs = 11000; +const TimeDelta kMaxPacketDelay = TimeDelta::Seconds(11); const int kMinRequiredPeriodicSamples = 5; const int kProcessIntervalMs = 2000; } // namespace @@ -50,16 +50,16 @@ class SendDelayStatsTest : public ::testing::Test { } void OnSendPacket(uint16_t id, uint32_t ssrc) { - OnSendPacket(id, ssrc, clock_.TimeInMilliseconds()); + OnSendPacket(id, ssrc, clock_.CurrentTime()); } - void OnSendPacket(uint16_t id, uint32_t ssrc, int64_t capture_ms) { + void OnSendPacket(uint16_t id, uint32_t ssrc, Timestamp capture) { SendPacketObserver* observer = stats_.get(); - observer->OnSendPacket(id, capture_ms, ssrc); + observer->OnSendPacket(id, capture, ssrc); } bool OnSentPacket(uint16_t id) { - return stats_->OnSentPacket(id, clock_.TimeInMilliseconds()); + return stats_->OnSentPacket(id, clock_.CurrentTime()); } SimulatedClock clock_; @@ -85,19 +85,19 @@ TEST_F(SendDelayStatsTest, SentPacketNotFoundForNonRegisteredSsrc) { TEST_F(SendDelayStatsTest, SentPacketFoundWithMaxSendDelay) { OnSendPacket(kPacketId, kSsrc1); - clock_.AdvanceTimeMilliseconds(kMaxPacketDelayMs - 1); + clock_.AdvanceTime(kMaxPacketDelay - TimeDelta::Millis(1)); OnSendPacket(kPacketId + 1, kSsrc1); // kPacketId -> not old/removed. EXPECT_TRUE(OnSentPacket(kPacketId)); // Packet found. EXPECT_TRUE(OnSentPacket(kPacketId + 1)); // Packet found. } TEST_F(SendDelayStatsTest, OldPacketsRemoved) { - const int64_t kCaptureTimeMs = clock_.TimeInMilliseconds(); - OnSendPacket(0xffffu, kSsrc1, kCaptureTimeMs); - OnSendPacket(0u, kSsrc1, kCaptureTimeMs); - OnSendPacket(1u, kSsrc1, kCaptureTimeMs + 1); - clock_.AdvanceTimeMilliseconds(kMaxPacketDelayMs); // 0xffff, 0 -> old. - OnSendPacket(2u, kSsrc1, kCaptureTimeMs + 2); + const Timestamp kCaptureTime = clock_.CurrentTime(); + OnSendPacket(0xffffu, kSsrc1, kCaptureTime); + OnSendPacket(0u, kSsrc1, kCaptureTime); + OnSendPacket(1u, kSsrc1, kCaptureTime + TimeDelta::Millis(1)); + clock_.AdvanceTime(kMaxPacketDelay); // 0xffff, 0 -> old. + OnSendPacket(2u, kSsrc1, kCaptureTime + TimeDelta::Millis(2)); EXPECT_FALSE(OnSentPacket(0xffffu)); // Old removed. EXPECT_FALSE(OnSentPacket(0u)); // Old removed. diff --git a/third_party/libwebrtc/video/send_statistics_proxy.cc b/third_party/libwebrtc/video/send_statistics_proxy.cc index fd576bb38b63..b857c0535b63 100644 --- a/third_party/libwebrtc/video/send_statistics_proxy.cc +++ b/third_party/libwebrtc/video/send_statistics_proxy.cc @@ -977,8 +977,8 @@ void SendStatisticsProxy::OnSendEncodedImage( stats->frames_encoded++; stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms - encoded_image.timing_.encode_start_ms; - if (codec_info) - stats->scalability_mode = codec_info->scalability_mode; + stats->scalability_mode = + codec_info ? codec_info->scalability_mode : absl::nullopt; // Report resolution of the top spatial layer. bool is_top_spatial_layer = codec_info == nullptr || codec_info->end_of_picture; @@ -1054,10 +1054,16 @@ void SendStatisticsProxy::OnSendEncodedImage( void SendStatisticsProxy::OnEncoderImplementationChanged( EncoderImplementation implementation) { MutexLock lock(&mutex_); - encoder_changed_ = EncoderChangeEvent{stats_.encoder_implementation_name, - implementation.name}; + encoder_changed_ = + EncoderChangeEvent{stats_.encoder_implementation_name.value_or("unknown"), + implementation.name}; stats_.encoder_implementation_name = implementation.name; stats_.power_efficient_encoder = implementation.is_hardware_accelerated; + // Clear cached scalability mode values, they may no longer be accurate. + for (auto& pair : stats_.substreams) { + VideoSendStream::StreamStats& stream_stats = pair.second; + stream_stats.scalability_mode = absl::nullopt; + } } int SendStatisticsProxy::GetInputFrameRate() const { diff --git a/third_party/libwebrtc/video/transport_adapter.cc b/third_party/libwebrtc/video/transport_adapter.cc index 5d6ccc881944..8222f7abb2e9 100644 --- a/third_party/libwebrtc/video/transport_adapter.cc +++ b/third_party/libwebrtc/video/transport_adapter.cc @@ -22,20 +22,19 @@ TransportAdapter::TransportAdapter(Transport* transport) TransportAdapter::~TransportAdapter() = default; -bool TransportAdapter::SendRtp(const uint8_t* packet, - size_t length, +bool TransportAdapter::SendRtp(rtc::ArrayView packet, const PacketOptions& options) { if (!enabled_.load()) return false; - return transport_->SendRtp(packet, length, options); + return transport_->SendRtp(packet, options); } -bool TransportAdapter::SendRtcp(const uint8_t* packet, size_t length) { +bool TransportAdapter::SendRtcp(rtc::ArrayView packet) { if (!enabled_.load()) return false; - return transport_->SendRtcp(packet, length); + return transport_->SendRtcp(packet); } void TransportAdapter::Enable() { diff --git a/third_party/libwebrtc/video/transport_adapter.h b/third_party/libwebrtc/video/transport_adapter.h index 95dd308601d2..a1b6995ee534 100644 --- a/third_party/libwebrtc/video/transport_adapter.h +++ b/third_party/libwebrtc/video/transport_adapter.h @@ -25,10 +25,9 @@ class TransportAdapter : public Transport { explicit TransportAdapter(Transport* transport); ~TransportAdapter() override; - bool SendRtp(const uint8_t* packet, - size_t length, + bool SendRtp(rtc::ArrayView packet, const PacketOptions& options) override; - bool SendRtcp(const uint8_t* packet, size_t length) override; + bool SendRtcp(rtc::ArrayView packet) override; void Enable(); void Disable(); diff --git a/third_party/libwebrtc/video/video_receive_stream2.cc b/third_party/libwebrtc/video/video_receive_stream2.cc index c6e8252135bd..808cb932fe25 100644 --- a/third_party/libwebrtc/video/video_receive_stream2.cc +++ b/third_party/libwebrtc/video/video_receive_stream2.cc @@ -832,8 +832,13 @@ void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait) { if (stream_is_active && !IsReceivingKeyFrame(now) && (!config_.crypto_options.sframe.require_frame_encryption || rtp_video_stream_receiver_.IsDecryptable())) { + absl::optional last_timestamp = + rtp_video_stream_receiver_.LastReceivedFrameRtpTimestamp(); RTC_LOG(LS_WARNING) << "No decodable frame in " << wait - << ", requesting keyframe."; + << " requesting keyframe. Last RTP timestamp " + << (last_timestamp ? rtc::ToString(*last_timestamp) + : "") + << "."; RequestKeyFrame(now); } diff --git a/third_party/libwebrtc/video/video_stream_encoder.cc b/third_party/libwebrtc/video/video_stream_encoder.cc index eee99f41de18..0782a5f5f27d 100644 --- a/third_party/libwebrtc/video/video_stream_encoder.cc +++ b/third_party/libwebrtc/video/video_stream_encoder.cc @@ -1106,8 +1106,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // or/and can be provided by encoder. In presence of both set of // limits, the final set is derived as their intersection. int min_bitrate_bps; - if (encoder_config_.simulcast_layers.empty() || - encoder_config_.simulcast_layers[0].min_bitrate_bps <= 0) { + if (encoder_config_.simulcast_layers[0].min_bitrate_bps <= 0) { min_bitrate_bps = encoder_bitrate_limits->min_bitrate_bps; } else { min_bitrate_bps = std::max(encoder_bitrate_limits->min_bitrate_bps, @@ -1115,10 +1114,20 @@ void VideoStreamEncoder::ReconfigureEncoder() { } int max_bitrate_bps; - // We don't check encoder_config_.simulcast_layers[0].max_bitrate_bps - // here since encoder_config_.max_bitrate_bps is derived from it (as - // well as from other inputs). - if (encoder_config_.max_bitrate_bps <= 0) { + // The API max bitrate comes from both `encoder_config_.max_bitrate_bps` + // and `encoder_config_.simulcast_layers[0].max_bitrate_bps`. + absl::optional api_max_bitrate_bps; + if (encoder_config_.simulcast_layers[0].max_bitrate_bps > 0) { + api_max_bitrate_bps = + encoder_config_.simulcast_layers[0].max_bitrate_bps; + } + if (encoder_config_.max_bitrate_bps > 0) { + api_max_bitrate_bps = api_max_bitrate_bps.has_value() + ? std::min(encoder_config_.max_bitrate_bps, + *api_max_bitrate_bps) + : encoder_config_.max_bitrate_bps; + } + if (!api_max_bitrate_bps.has_value()) { max_bitrate_bps = encoder_bitrate_limits->max_bitrate_bps; } else { max_bitrate_bps = std::min(encoder_bitrate_limits->max_bitrate_bps, @@ -1138,7 +1147,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { << ", max=" << encoder_bitrate_limits->max_bitrate_bps << ") do not intersect with limits set by app" << " (min=" << streams.back().min_bitrate_bps - << ", max=" << encoder_config_.max_bitrate_bps + << ", max=" << api_max_bitrate_bps.value_or(-1) << "). The app bitrate limits will be used."; } } @@ -2125,27 +2134,12 @@ EncodedImage VideoStreamEncoder::AugmentEncodedImage( .Parse(codec_type, stream_idx, image_copy.data(), image_copy.size()) .value_or(-1); } - RTC_LOG(LS_VERBOSE) << __func__ << " stream_idx " << stream_idx << " qp " + RTC_LOG(LS_VERBOSE) << __func__ << " ntp time " << encoded_image.NtpTimeMs() + << " stream_idx " << stream_idx << " qp " << image_copy.qp_; image_copy.SetAtTargetQuality(codec_type == kVideoCodecVP8 && image_copy.qp_ <= kVp8SteadyStateQpThreshold); - // Piggyback ALR experiment group id and simulcast id into the content type. - const uint8_t experiment_id = - experiment_groups_[videocontenttypehelpers::IsScreenshare( - image_copy.content_type_)]; - - // TODO(ilnik): This will force content type extension to be present even - // for realtime video. At the expense of miniscule overhead we will get - // sliced receive statistics. - RTC_CHECK(videocontenttypehelpers::SetExperimentId(&image_copy.content_type_, - experiment_id)); - // We count simulcast streams from 1 on the wire. That's why we set simulcast - // id in content type to +1 of that is actual simulcast index. This is because - // value 0 on the wire is reserved for 'no simulcast stream specified'. - RTC_CHECK(videocontenttypehelpers::SetSimulcastId( - &image_copy.content_type_, static_cast(stream_idx + 1))); - return image_copy; } diff --git a/third_party/libwebrtc/video/video_stream_encoder_interface_gn/moz.build b/third_party/libwebrtc/video/video_stream_encoder_interface_gn/moz.build index 9fd417f96be9..28d66f99d8dc 100644 --- a/third_party/libwebrtc/video/video_stream_encoder_interface_gn/moz.build +++ b/third_party/libwebrtc/video/video_stream_encoder_interface_gn/moz.build @@ -122,6 +122,9 @@ if CONFIG["OS_TARGET"] == "WINNT": DEFINES["__STD_C"] = True OS_LIBS += [ + "crypt32", + "iphlpapi", + "secur32", "winmm" ] diff --git a/third_party/libwebrtc/webrtc.gni b/third_party/libwebrtc/webrtc.gni index e30bc6b9bbda..0a7b56a3033e 100644 --- a/third_party/libwebrtc/webrtc.gni +++ b/third_party/libwebrtc/webrtc.gni @@ -59,6 +59,10 @@ declare_args() { } declare_args() { + # If set to true, C++ code will refer to the new JNI Generator symbols. + # If set to false the old ones will be used (to provide a nice update path). + rtc_jni_generator_legacy_symbols = false + # Setting this to true, will make RTC_DLOG() expand to log statements instead # of being removed by the preprocessor. # This is useful for example to be able to get RTC_DLOGs on a release build. @@ -211,9 +215,8 @@ declare_args() { rtc_apprtcmobile_broadcast_extension = false } - # Determines whether OpenGL is available on iOS/macOS. - rtc_ios_macos_use_opengl_rendering = - !(is_ios && target_environment == "catalyst") + # Determines whether OpenGL is available on iOS. + rtc_ios_use_opengl_rendering = is_ios && target_environment != "catalyst" # When set to false, builtin audio encoder/decoder factories and all the # audio codecs they depend on will not be included in libwebrtc.{a|lib} @@ -247,6 +250,11 @@ declare_args() { # "warn": RTC_LOGs a message with LS_WARNING severity if the field trial # hasn't been registered. rtc_strict_field_trials = "" + + # If different from "", symbols exported with RTC_OBJC_EXPORT will be prefixed + # with this string. + # See the definition of RTC_OBJC_TYPE_PREFIX in the code. + rtc_objc_prefix = "" } if (!build_with_mozilla) { @@ -362,7 +370,10 @@ webrtc_root = get_path_info(".", "abspath") rtc_common_configs = [ webrtc_root + ":common_config" ] if (is_mac || is_ios) { - rtc_common_configs += [ "//build/config/compiler:enable_arc" ] + if (filter_include(default_compiler_configs, + [ "//build/config/compiler:enable_arc" ]) == []) { + rtc_common_configs += [ "//build/config/compiler:enable_arc" ] + } } # Global public configuration that should be applied to all WebRTC targets. You